improve rcpsp_sat.py; add optional model to knapsack_2d_sat.py; minor change to hidato_sat.p

This commit is contained in:
Laurent Perron
2021-12-13 10:46:45 +01:00
parent 6f5052f668
commit 93934e3660
3 changed files with 128 additions and 36 deletions

View File

@@ -13,6 +13,7 @@
# limitations under the License.
"""Solves the Hidato problem with the CP-SAT solver."""
from absl import app
from ortools.sat.python import visualization
from ortools.sat.python import cp_model
@@ -188,5 +189,10 @@ def solve_hidato(puzzle, index):
print(' - wall time : %f s' % solver.WallTime())
for pb in range(1, 7):
solve_hidato(build_puzzle(pb), pb)
def main(_):
for pb in range(1, 7):
solve_hidato(build_puzzle(pb), pb)
if __name__ == '__main__':
app.run(main)

View File

@@ -34,7 +34,8 @@ flags.DEFINE_string('output_proto', '',
'Output file to write the cp_model proto to.')
flags.DEFINE_string('params', 'num_search_workers:16,log_search_progress:true',
'Sat solver parameters.')
flags.DEFINE_string('model', 'rotation', '\'duplicate\' or \'rotation\'')
flags.DEFINE_string('model', 'rotation',
'\'duplicate\' or \'rotation\' or \'optional\'')
def build_data():
@@ -156,6 +157,98 @@ def solve_with_duplicate_items(data, max_height, max_width):
print(data)
def solve_with_duplicate_optional_items(data, max_height, max_width):
"""Solve the problem by building 2 optional items (rotated or not) for each item."""
# Derived data (expanded to individual items).
data_widths = data['width'].to_numpy()
data_heights = data['height'].to_numpy()
data_availability = data['available'].to_numpy()
data_values = data['value'].to_numpy()
# Non duplicated items data.
base_item_widths = np.repeat(data_widths, data_availability)
base_item_heights = np.repeat(data_heights, data_availability)
base_item_values = np.repeat(data_values, data_availability)
num_data_items = len(base_item_values)
# Create rotated items by duplicating.
item_widths = np.concatenate((base_item_widths, base_item_heights))
item_heights = np.concatenate((base_item_heights, base_item_widths))
item_values = np.concatenate((base_item_values, base_item_values))
num_items = len(item_values)
# OR-Tools model
model = cp_model.CpModel()
# Variables
x_starts = []
y_starts = []
is_used = []
x_intervals = []
y_intervals = []
for i in range(num_items):
## Is the item used?
is_used.append(model.NewBoolVar(f'is_used{i}'))
## Item coordinates.
x_starts.append(
model.NewIntVar(0, max_width - int(item_widths[i]), f'x_start{i}'))
y_starts.append(
model.NewIntVar(0, max_height - int(item_heights[i]),
f'y_start{i}'))
## Interval variables.
x_intervals.append(
model.NewOptionalFixedSizeIntervalVar(x_starts[i], item_widths[i],
is_used[i], f'x_interval{i}'))
y_intervals.append(
model.NewOptionalFixedSizeIntervalVar(y_starts[i], item_heights[i],
is_used[i], f'y_interval{i}'))
# Constraints.
## Only one of non-rotated/rotated pair can be used.
for i in range(num_data_items):
model.Add(is_used[i] + is_used[i + num_data_items] <= 1)
## 2D no overlap.
model.AddNoOverlap2D(x_intervals, y_intervals)
## Objective.
model.Maximize(cp_model.DoubleLinearExpr.ScalProd(is_used, item_values))
# Output proto to file.
if FLAGS.output_proto:
print('Writing proto to %s' % FLAGS.output_proto)
with open(FLAGS.output_proto, 'w') as text_file:
text_file.write(str(model))
# Solve model.
solver = cp_model.CpSolver()
if FLAGS.params:
text_format.Parse(FLAGS.params, solver.parameters)
status = solver.Solve(model)
# Report solution.
if status == cp_model.OPTIMAL:
used = {i for i in range(num_items) if solver.BooleanValue(is_used[i])}
data = pd.DataFrame({
'x_start': [solver.Value(x_starts[i]) for i in used],
'y_start': [solver.Value(y_starts[i]) for i in used],
'item_width': [item_widths[i] for i in used],
'item_height': [item_heights[i] for i in used],
'x_end': [solver.Value(x_starts[i]) + item_widths[i] for i in used],
'y_end': [
solver.Value(y_starts[i]) + item_heights[i] for i in used
],
'item_value': [item_values[i] for i in used]
})
print(data)
def solve_with_rotations(data, max_height, max_width):
"""Solve the problem by rotating items."""
# Derived data (expanded to individual items).
@@ -272,6 +365,8 @@ def main(_):
data, max_height, max_width = build_data()
if FLAGS.model == 'duplicate':
solve_with_duplicate_items(data, max_height, max_width)
elif FLAGS.model == 'optional':
solve_with_duplicate_optional_items(data, max_height, max_width)
else:
solve_with_rotations(data, max_height, max_width)

View File

@@ -18,7 +18,6 @@ import collections
from absl import app
from absl import flags
from google.protobuf import text_format
from ortools.scheduling import pywraprcpsp
from ortools.sat.python import cp_model
FLAGS = flags.FLAGS
@@ -131,13 +130,18 @@ def SolveRcpsp(problem, proto_file, params):
start_var = model.NewIntVar(0, horizon, f'start_of_task_{t}')
end_var = model.NewIntVar(0, horizon, f'end_of_task_{t}')
# Create one literal per recipe.
literals = [
model.NewBoolVar(f'is_present_{t}_{r}') for r in all_recipes
]
literals = []
if num_recipes > 1:
# Create one literal per recipe.
literals = [
model.NewBoolVar(f'is_present_{t}_{r}') for r in all_recipes
]
# Exactly one recipe must be performed.
model.Add(cp_model.LinearExpr.Sum(literals) == 1)
# Exactly one recipe must be performed.
model.Add(cp_model.LinearExpr.Sum(literals) == 1)
else:
literals = [1]
# Temporary data structure to fill in 0 demands.
demand_matrix = collections.defaultdict(int)
@@ -153,11 +157,11 @@ def SolveRcpsp(problem, proto_file, params):
cp_model.Domain.FromValues(task_to_recipe_durations[t]),
f'duration_of_task_{t}')
# linear encoding of the duration (link recipe literals and duration).
min_duration = min(task_to_recipe_durations[t])
shifted = [x - min_duration for x in task_to_recipe_durations[t]]
model.Add(duration_var == min_duration +
cp_model.LinearExpr.ScalProd(literals, shifted))
# Link the recipe literals and the duration_var.
for r in range(num_recipes):
model.Add(
duration_var == task_to_recipe_durations[t][r]).OnlyEnforceIf(
literals[r])
# Create the interval of the task.
task_interval = model.NewIntervalVar(start_var, duration_var, end_var,
@@ -180,11 +184,12 @@ def SolveRcpsp(problem, proto_file, params):
cp_model.Domain.FromValues(demands), f'demand_{t}_{resource}')
task_to_resource_demands[t].append(demand_var)
# linear encoding of the demand per resource.
min_demand = min(demands)
shifted = [x - min_demand for x in demands]
model.Add(demand_var == min_demand +
cp_model.LinearExpr.ScalProd(literals, shifted))
# Link the recipe literals and the demand_var.
for r in all_recipes:
model.Add(demand_var == demand_matrix[(resource,
r)]).OnlyEnforceIf(
literals[r])
resource_to_sum_of_demand_max[resource] += max(demands)
# Create makespan variable
@@ -251,25 +256,11 @@ def SolveRcpsp(problem, proto_file, params):
capacities.append(capacity)
max_cost += c * resource.unit_cost
else: # Standard renewable resource.
energies = []
for t in all_active_tasks:
literals = task_to_presence_literals[t]
fixed_energies = [
task_resource_to_fixed_demands[(t, r)][index] *
task_to_recipe_durations[t][index]
for index in range(len(literals))
]
min_energy = min(fixed_energies)
scaled_energies = [x - min_energy for x in fixed_energies]
energies.append(
min_energy +
cp_model.LinearExpr.ScalProd(literals, scaled_energies))
if FLAGS.use_interval_makespan:
intervals.append(interval_makespan)
demands.append(c)
energies.append(c * makespan_size)
model.AddCumulativeWithEnergy(intervals, demands, energies, c)
model.AddCumulative(intervals, demands, c)
else: # Non empty non renewable resource. (single mode only)
if problem.is_consumer_producer:
reservoir_starts = []