2021-05-03 12:11:39 +02:00
|
|
|
#!/usr/bin/env python3
|
2025-01-10 11:35:44 +01:00
|
|
|
# Copyright 2010-2025 Google LLC
|
2018-05-31 10:54:36 -07:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
|
#
|
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
#
|
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
|
# limitations under the License.
|
2023-06-30 22:49:35 +02:00
|
|
|
|
2022-08-01 17:42:31 +02:00
|
|
|
"""Sat based solver for the RCPSP problems (see rcpsp.proto).
|
|
|
|
|
|
|
|
|
|
Introduction to the problem:
|
|
|
|
|
https://www.projectmanagement.ugent.be/research/project_scheduling/rcpsp
|
|
|
|
|
|
|
|
|
|
Data use in flags:
|
|
|
|
|
http://www.om-db.wi.tum.de/psplib/data.html
|
|
|
|
|
"""
|
2018-05-31 10:54:36 -07:00
|
|
|
|
2018-09-12 15:07:23 +02:00
|
|
|
import collections
|
2018-09-08 17:56:07 +02:00
|
|
|
|
2020-11-18 10:50:14 +01:00
|
|
|
from absl import app
|
|
|
|
|
from absl import flags
|
2023-06-30 22:49:35 +02:00
|
|
|
|
2018-09-12 15:07:23 +02:00
|
|
|
from ortools.sat.python import cp_model
|
2022-08-19 09:24:27 +02:00
|
|
|
from ortools.scheduling import rcpsp_pb2
|
2023-07-03 12:46:51 +02:00
|
|
|
from ortools.scheduling.python import rcpsp
|
2022-06-16 07:39:30 +02:00
|
|
|
|
2023-06-30 22:49:35 +02:00
|
|
|
_INPUT = flags.DEFINE_string("input", "", "Input file to parse and solve.")
|
2022-08-19 09:24:27 +02:00
|
|
|
_OUTPUT_PROTO = flags.DEFINE_string(
|
2023-06-30 22:49:35 +02:00
|
|
|
"output_proto", "", "Output file to write the cp_model proto to."
|
|
|
|
|
)
|
|
|
|
|
_PARAMS = flags.DEFINE_string("params", "", "Sat solver parameters.")
|
2022-08-19 09:24:27 +02:00
|
|
|
_USE_INTERVAL_MAKESPAN = flags.DEFINE_bool(
|
2023-06-30 22:49:35 +02:00
|
|
|
"use_interval_makespan",
|
2023-09-21 13:08:03 +02:00
|
|
|
True,
|
2023-06-30 22:49:35 +02:00
|
|
|
"Whether we encode the makespan using an interval or not.",
|
2023-06-27 14:21:34 +02:00
|
|
|
)
|
2023-06-30 22:49:35 +02:00
|
|
|
_HORIZON = flags.DEFINE_integer("horizon", -1, "Force horizon.")
|
2017-11-04 23:26:01 +01:00
|
|
|
|
|
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
def print_problem_statistics(problem: rcpsp_pb2.RcpspProblem):
|
2021-04-13 11:59:41 +02:00
|
|
|
"""Display various statistics on the problem."""
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
# Determine problem type.
|
2023-06-30 22:49:35 +02:00
|
|
|
problem_type = (
|
|
|
|
|
"Resource Investment Problem" if problem.is_resource_investment else "RCPSP"
|
|
|
|
|
)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
2021-04-13 11:59:41 +02:00
|
|
|
num_resources = len(problem.resources)
|
|
|
|
|
num_tasks = len(problem.tasks) - 2 # 2 sentinels.
|
|
|
|
|
tasks_with_alternatives = 0
|
|
|
|
|
variable_duration_tasks = 0
|
|
|
|
|
tasks_with_delay = 0
|
|
|
|
|
|
|
|
|
|
for task in problem.tasks:
|
|
|
|
|
if len(task.recipes) > 1:
|
|
|
|
|
tasks_with_alternatives += 1
|
|
|
|
|
duration_0 = task.recipes[0].duration
|
|
|
|
|
for recipe in task.recipes:
|
|
|
|
|
if recipe.duration != duration_0:
|
|
|
|
|
variable_duration_tasks += 1
|
|
|
|
|
break
|
|
|
|
|
if task.successor_delays:
|
|
|
|
|
tasks_with_delay += 1
|
|
|
|
|
|
2017-11-16 17:48:30 +01:00
|
|
|
if problem.is_rcpsp_max:
|
2023-06-30 22:49:35 +02:00
|
|
|
problem_type += "/Max delay"
|
2018-11-11 09:39:59 +01:00
|
|
|
# We print 2 less tasks as these are sentinel tasks that are not counted in
|
|
|
|
|
# the description of the rcpsp models.
|
|
|
|
|
if problem.is_consumer_producer:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f"Solving {problem_type} with:")
|
|
|
|
|
print(f" - {num_resources} reservoir resources")
|
|
|
|
|
print(f" - {num_tasks} tasks")
|
2017-11-04 23:26:01 +01:00
|
|
|
else:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f"Solving {problem_type} with:")
|
|
|
|
|
print(f" - {num_resources} renewable resources")
|
|
|
|
|
print(f" - {num_tasks} tasks")
|
2021-04-13 11:59:41 +02:00
|
|
|
if tasks_with_alternatives:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f" - {tasks_with_alternatives} tasks with alternative resources")
|
2021-04-13 11:59:41 +02:00
|
|
|
if variable_duration_tasks:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f" - {variable_duration_tasks} tasks with variable durations")
|
2021-04-13 11:59:41 +02:00
|
|
|
if tasks_with_delay:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f" - {tasks_with_delay} tasks with successor delays")
|
2021-04-13 11:59:41 +02:00
|
|
|
|
|
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
def solve_rcpsp(
|
|
|
|
|
problem: rcpsp_pb2.RcpspProblem,
|
|
|
|
|
proto_file: str,
|
|
|
|
|
params: str,
|
|
|
|
|
active_tasks: set[int],
|
|
|
|
|
source: int,
|
|
|
|
|
sink: int,
|
2025-03-07 10:33:36 +01:00
|
|
|
) -> None:
|
2022-07-28 00:22:08 +02:00
|
|
|
"""Parse and solve a given RCPSP problem in proto format.
|
|
|
|
|
|
2023-06-30 22:49:35 +02:00
|
|
|
The model will only look at the tasks {source} + {sink} + active_tasks, and
|
|
|
|
|
ignore all others.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
problem: the description of the model to solve in protobuf format
|
|
|
|
|
proto_file: the name of the file to export the CpModel proto to.
|
|
|
|
|
params: the string representation of the parameters to pass to the sat
|
|
|
|
|
solver.
|
|
|
|
|
active_tasks: the set of active tasks to consider.
|
|
|
|
|
source: the source task in the graph. Its end will be forced to 0.
|
|
|
|
|
sink: the sink task of the graph. Its start is the makespan of the problem.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
(lower_bound of the objective, best solution found, assignment)
|
|
|
|
|
"""
|
2018-11-11 09:39:59 +01:00
|
|
|
# Create the model.
|
|
|
|
|
model = cp_model.CpModel()
|
2023-11-16 19:46:56 +01:00
|
|
|
model.name = problem.name
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
num_resources = len(problem.resources)
|
|
|
|
|
|
2022-07-28 00:22:08 +02:00
|
|
|
all_active_tasks = list(active_tasks)
|
|
|
|
|
all_active_tasks.sort()
|
2018-11-11 09:39:59 +01:00
|
|
|
all_resources = range(num_resources)
|
|
|
|
|
|
|
|
|
|
horizon = problem.deadline if problem.deadline != -1 else problem.horizon
|
2022-08-19 09:24:27 +02:00
|
|
|
if _HORIZON.value > 0:
|
|
|
|
|
horizon = _HORIZON.value
|
2022-07-28 00:22:08 +02:00
|
|
|
elif horizon == -1: # Naive computation.
|
2018-11-28 10:56:33 +01:00
|
|
|
horizon = sum(max(r.duration for r in t.recipes) for t in problem.tasks)
|
2018-11-11 09:39:59 +01:00
|
|
|
if problem.is_rcpsp_max:
|
|
|
|
|
for t in problem.tasks:
|
|
|
|
|
for sd in t.successor_delays:
|
|
|
|
|
for rd in sd.recipe_delays:
|
|
|
|
|
for d in rd.min_delays:
|
|
|
|
|
horizon += abs(d)
|
2025-03-07 10:33:36 +01:00
|
|
|
print(f"Horizon = {horizon}", flush=True)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
2021-06-29 17:20:38 +02:00
|
|
|
# Containers.
|
2018-11-11 09:39:59 +01:00
|
|
|
task_starts = {}
|
|
|
|
|
task_ends = {}
|
2021-06-29 17:20:38 +02:00
|
|
|
task_durations = {}
|
|
|
|
|
task_intervals = {}
|
2022-07-28 00:22:08 +02:00
|
|
|
task_resource_to_energy = {}
|
2021-06-29 17:20:38 +02:00
|
|
|
task_to_resource_demands = collections.defaultdict(list)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
2021-06-29 17:20:38 +02:00
|
|
|
task_to_presence_literals = collections.defaultdict(list)
|
|
|
|
|
task_to_recipe_durations = collections.defaultdict(list)
|
|
|
|
|
task_resource_to_fixed_demands = collections.defaultdict(dict)
|
2022-07-28 00:22:08 +02:00
|
|
|
task_resource_to_max_energy = collections.defaultdict(int)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
2021-06-29 17:20:38 +02:00
|
|
|
resource_to_sum_of_demand_max = collections.defaultdict(int)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
2021-06-29 17:20:38 +02:00
|
|
|
# Create task variables.
|
2017-11-04 23:26:01 +01:00
|
|
|
for t in all_active_tasks:
|
2018-11-11 09:39:59 +01:00
|
|
|
task = problem.tasks[t]
|
2021-06-29 17:20:38 +02:00
|
|
|
num_recipes = len(task.recipes)
|
|
|
|
|
all_recipes = range(num_recipes)
|
|
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
start_var = model.new_int_var(0, horizon, f"start_of_task_{t}")
|
|
|
|
|
end_var = model.new_int_var(0, horizon, f"end_of_task_{t}")
|
2021-06-29 17:20:38 +02:00
|
|
|
|
2021-12-13 10:46:45 +01:00
|
|
|
if num_recipes > 1:
|
|
|
|
|
# Create one literal per recipe.
|
2023-11-16 19:46:56 +01:00
|
|
|
literals = [model.new_bool_var(f"is_present_{t}_{r}") for r in all_recipes]
|
2021-12-13 10:46:45 +01:00
|
|
|
|
|
|
|
|
# Exactly one recipe must be performed.
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add_exactly_one(literals)
|
2021-06-29 17:20:38 +02:00
|
|
|
|
2021-12-13 10:46:45 +01:00
|
|
|
else:
|
|
|
|
|
literals = [1]
|
2021-06-29 17:20:38 +02:00
|
|
|
|
|
|
|
|
# Temporary data structure to fill in 0 demands.
|
|
|
|
|
demand_matrix = collections.defaultdict(int)
|
|
|
|
|
|
|
|
|
|
# Scan recipes and build the demand matrix and the vector of durations.
|
|
|
|
|
for recipe_index, recipe in enumerate(task.recipes):
|
|
|
|
|
task_to_recipe_durations[t].append(recipe.duration)
|
|
|
|
|
for demand, resource in zip(recipe.demands, recipe.resources):
|
|
|
|
|
demand_matrix[(resource, recipe_index)] = demand
|
|
|
|
|
|
|
|
|
|
# Create the duration variable from the accumulated durations.
|
2023-11-16 19:46:56 +01:00
|
|
|
duration_var = model.new_int_var_from_domain(
|
|
|
|
|
cp_model.Domain.from_values(task_to_recipe_durations[t]),
|
2023-06-30 22:49:35 +02:00
|
|
|
f"duration_of_task_{t}",
|
|
|
|
|
)
|
2021-06-29 17:20:38 +02:00
|
|
|
|
2021-12-13 10:46:45 +01:00
|
|
|
# Link the recipe literals and the duration_var.
|
|
|
|
|
for r in range(num_recipes):
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(duration_var == task_to_recipe_durations[t][r]).only_enforce_if(
|
2023-06-30 22:49:35 +02:00
|
|
|
literals[r]
|
|
|
|
|
)
|
2021-06-29 17:20:38 +02:00
|
|
|
|
|
|
|
|
# Create the interval of the task.
|
2023-11-16 19:46:56 +01:00
|
|
|
task_interval = model.new_interval_var(
|
2023-06-30 22:49:35 +02:00
|
|
|
start_var, duration_var, end_var, f"task_interval_{t}"
|
|
|
|
|
)
|
2021-06-29 17:20:38 +02:00
|
|
|
|
|
|
|
|
# Store task variables.
|
|
|
|
|
task_starts[t] = start_var
|
|
|
|
|
task_ends[t] = end_var
|
|
|
|
|
task_durations[t] = duration_var
|
|
|
|
|
task_intervals[t] = task_interval
|
|
|
|
|
task_to_presence_literals[t] = literals
|
|
|
|
|
|
|
|
|
|
# Create the demand variable of the task for each resource.
|
2022-07-28 00:22:08 +02:00
|
|
|
for res in all_resources:
|
|
|
|
|
demands = [demand_matrix[(res, recipe)] for recipe in all_recipes]
|
|
|
|
|
task_resource_to_fixed_demands[(t, res)] = demands
|
2023-11-16 19:46:56 +01:00
|
|
|
demand_var = model.new_int_var_from_domain(
|
|
|
|
|
cp_model.Domain.from_values(demands), f"demand_{t}_{res}"
|
2023-06-30 22:49:35 +02:00
|
|
|
)
|
2021-06-29 17:20:38 +02:00
|
|
|
task_to_resource_demands[t].append(demand_var)
|
|
|
|
|
|
2021-12-13 10:46:45 +01:00
|
|
|
# Link the recipe literals and the demand_var.
|
|
|
|
|
for r in all_recipes:
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(demand_var == demand_matrix[(res, r)]).only_enforce_if(
|
2023-06-30 22:49:35 +02:00
|
|
|
literals[r]
|
|
|
|
|
)
|
2021-12-13 10:46:45 +01:00
|
|
|
|
2022-07-28 00:22:08 +02:00
|
|
|
resource_to_sum_of_demand_max[res] += max(demands)
|
|
|
|
|
|
|
|
|
|
# Create the energy expression for (task, resource):
|
|
|
|
|
for res in all_resources:
|
|
|
|
|
task_resource_to_energy[(t, res)] = sum(
|
2023-06-30 22:49:35 +02:00
|
|
|
literals[r]
|
|
|
|
|
* task_to_recipe_durations[t][r]
|
|
|
|
|
* task_resource_to_fixed_demands[(t, res)][r]
|
|
|
|
|
for r in all_recipes
|
|
|
|
|
)
|
2022-07-28 00:22:08 +02:00
|
|
|
task_resource_to_max_energy[(t, res)] = max(
|
2023-06-30 22:49:35 +02:00
|
|
|
task_to_recipe_durations[t][r]
|
|
|
|
|
* task_resource_to_fixed_demands[(t, res)][r]
|
|
|
|
|
for r in all_recipes
|
|
|
|
|
)
|
2021-04-13 11:59:41 +02:00
|
|
|
|
2018-11-11 09:39:59 +01:00
|
|
|
# Create makespan variable
|
2025-03-07 10:33:36 +01:00
|
|
|
makespan = model.new_int_var(0, horizon, "makespan")
|
2023-11-16 19:46:56 +01:00
|
|
|
makespan_size = model.new_int_var(1, horizon, "interval_makespan_size")
|
|
|
|
|
interval_makespan = model.new_interval_var(
|
|
|
|
|
makespan,
|
|
|
|
|
makespan_size,
|
|
|
|
|
model.new_constant(horizon + 1),
|
|
|
|
|
"interval_makespan",
|
2023-06-30 22:49:35 +02:00
|
|
|
)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
# Add precedences.
|
|
|
|
|
if problem.is_rcpsp_max:
|
2021-04-13 11:59:41 +02:00
|
|
|
# In RCPSP/Max problem, precedences are given and max delay (possible
|
|
|
|
|
# negative) between the starts of two tasks.
|
2018-11-11 09:39:59 +01:00
|
|
|
for task_id in all_active_tasks:
|
|
|
|
|
task = problem.tasks[task_id]
|
|
|
|
|
num_modes = len(task.recipes)
|
|
|
|
|
|
2023-06-27 14:21:34 +02:00
|
|
|
for successor_index, next_id in enumerate(task.successors):
|
2018-11-11 09:39:59 +01:00
|
|
|
delay_matrix = task.successor_delays[successor_index]
|
|
|
|
|
num_next_modes = len(problem.tasks[next_id].recipes)
|
|
|
|
|
for m1 in range(num_modes):
|
2021-04-13 11:59:41 +02:00
|
|
|
s1 = task_starts[task_id]
|
2021-06-29 17:20:38 +02:00
|
|
|
p1 = task_to_presence_literals[task_id][m1]
|
2022-07-28 00:22:08 +02:00
|
|
|
if next_id == sink:
|
2018-11-11 09:39:59 +01:00
|
|
|
delay = delay_matrix.recipe_delays[m1].min_delays[0]
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(s1 + delay <= makespan).only_enforce_if(p1)
|
2018-11-11 09:39:59 +01:00
|
|
|
else:
|
|
|
|
|
for m2 in range(num_next_modes):
|
2023-06-30 22:49:35 +02:00
|
|
|
delay = delay_matrix.recipe_delays[m1].min_delays[m2]
|
2021-04-13 11:59:41 +02:00
|
|
|
s2 = task_starts[next_id]
|
2021-06-29 17:20:38 +02:00
|
|
|
p2 = task_to_presence_literals[next_id][m2]
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(s1 + delay <= s2).only_enforce_if([p1, p2])
|
2021-04-13 11:59:41 +02:00
|
|
|
else:
|
|
|
|
|
# Normal dependencies (task ends before the start of successors).
|
2018-11-11 09:39:59 +01:00
|
|
|
for t in all_active_tasks:
|
|
|
|
|
for n in problem.tasks[t].successors:
|
2022-07-28 00:22:08 +02:00
|
|
|
if n == sink:
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(task_ends[t] <= makespan)
|
2022-07-28 00:22:08 +02:00
|
|
|
elif n in active_tasks:
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(task_ends[t] <= task_starts[n])
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
# Containers for resource investment problems.
|
2021-04-13 11:59:41 +02:00
|
|
|
capacities = [] # Capacity variables for all resources.
|
|
|
|
|
max_cost = 0 # Upper bound on the investment cost.
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
# Create resources.
|
2022-07-28 00:22:08 +02:00
|
|
|
for res in all_resources:
|
|
|
|
|
resource = problem.resources[res]
|
2018-11-11 09:39:59 +01:00
|
|
|
c = resource.max_capacity
|
|
|
|
|
if c == -1:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f"No capacity: {resource}")
|
2022-07-28 00:22:08 +02:00
|
|
|
c = resource_to_sum_of_demand_max[res]
|
2021-06-29 17:20:38 +02:00
|
|
|
|
|
|
|
|
# RIP problems have only renewable resources, and no makespan.
|
|
|
|
|
if problem.is_resource_investment or resource.renewable:
|
|
|
|
|
intervals = [task_intervals[t] for t in all_active_tasks]
|
2023-06-30 22:49:35 +02:00
|
|
|
demands = [task_to_resource_demands[t][res] for t in all_active_tasks]
|
2021-06-29 17:20:38 +02:00
|
|
|
|
|
|
|
|
if problem.is_resource_investment:
|
2023-11-16 19:46:56 +01:00
|
|
|
capacity = model.new_int_var(0, c, f"capacity_of_{res}")
|
|
|
|
|
model.add_cumulative(intervals, demands, capacity)
|
2021-06-29 17:20:38 +02:00
|
|
|
capacities.append(capacity)
|
|
|
|
|
max_cost += c * resource.unit_cost
|
|
|
|
|
else: # Standard renewable resource.
|
2022-08-19 09:24:27 +02:00
|
|
|
if _USE_INTERVAL_MAKESPAN.value:
|
2021-06-29 17:20:38 +02:00
|
|
|
intervals.append(interval_makespan)
|
|
|
|
|
demands.append(c)
|
2021-12-13 10:46:45 +01:00
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add_cumulative(intervals, demands, c)
|
2021-06-29 17:20:38 +02:00
|
|
|
else: # Non empty non renewable resource. (single mode only)
|
2018-11-11 09:39:59 +01:00
|
|
|
if problem.is_consumer_producer:
|
2021-06-29 17:20:38 +02:00
|
|
|
reservoir_starts = []
|
|
|
|
|
reservoir_demands = []
|
|
|
|
|
for t in all_active_tasks:
|
2022-07-28 00:22:08 +02:00
|
|
|
if task_resource_to_fixed_demands[(t, res)][0]:
|
2021-06-29 17:20:38 +02:00
|
|
|
reservoir_starts.append(task_starts[t])
|
|
|
|
|
reservoir_demands.append(
|
2023-06-30 22:49:35 +02:00
|
|
|
task_resource_to_fixed_demands[(t, res)][0]
|
|
|
|
|
)
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add_reservoir_constraint(
|
2023-06-30 22:49:35 +02:00
|
|
|
reservoir_starts,
|
|
|
|
|
reservoir_demands,
|
|
|
|
|
resource.min_capacity,
|
|
|
|
|
resource.max_capacity,
|
|
|
|
|
)
|
2021-06-29 17:20:38 +02:00
|
|
|
else: # No producer-consumer. We just sum the demands.
|
2023-11-16 19:46:56 +01:00
|
|
|
model.add(
|
|
|
|
|
cp_model.LinearExpr.sum(
|
2023-06-30 22:49:35 +02:00
|
|
|
[task_to_resource_demands[t][res] for t in all_active_tasks]
|
|
|
|
|
)
|
|
|
|
|
<= c
|
|
|
|
|
)
|
2018-11-11 09:39:59 +01:00
|
|
|
|
|
|
|
|
# Objective.
|
|
|
|
|
if problem.is_resource_investment:
|
2023-11-16 19:46:56 +01:00
|
|
|
objective = model.new_int_var(0, max_cost, "capacity_costs")
|
|
|
|
|
model.add(
|
2023-06-30 22:49:35 +02:00
|
|
|
objective
|
|
|
|
|
== sum(
|
|
|
|
|
problem.resources[i].unit_cost * capacities[i]
|
|
|
|
|
for i in range(len(capacities))
|
|
|
|
|
)
|
|
|
|
|
)
|
2018-11-11 09:39:59 +01:00
|
|
|
else:
|
|
|
|
|
objective = makespan
|
2017-11-04 23:26:01 +01:00
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
model.minimize(objective)
|
2017-11-04 23:26:01 +01:00
|
|
|
|
2022-07-28 00:22:08 +02:00
|
|
|
# Add sentinels.
|
|
|
|
|
task_starts[source] = 0
|
|
|
|
|
task_ends[source] = 0
|
|
|
|
|
task_to_presence_literals[0].append(True)
|
|
|
|
|
task_starts[sink] = makespan
|
|
|
|
|
task_to_presence_literals[sink].append(True)
|
|
|
|
|
|
|
|
|
|
# Write model to file.
|
2018-11-11 09:39:59 +01:00
|
|
|
if proto_file:
|
2023-06-30 22:49:35 +02:00
|
|
|
print(f"Writing proto to{proto_file}")
|
2023-11-16 19:46:56 +01:00
|
|
|
model.export_to_file(proto_file)
|
2017-11-04 23:26:01 +01:00
|
|
|
|
2018-11-11 09:39:59 +01:00
|
|
|
# Solve model.
|
|
|
|
|
solver = cp_model.CpSolver()
|
2023-10-02 19:52:11 +02:00
|
|
|
|
|
|
|
|
# Parse user specified parameters.
|
2018-11-11 09:39:59 +01:00
|
|
|
if params:
|
2025-07-23 17:38:49 +02:00
|
|
|
solver.parameters.parse_text_format(params)
|
2023-10-02 19:52:11 +02:00
|
|
|
|
2025-02-25 12:32:58 +01:00
|
|
|
# Favor objective_shaving over objective_lb_search.
|
2023-06-30 22:49:35 +02:00
|
|
|
if solver.parameters.num_workers >= 16 and solver.parameters.num_workers < 24:
|
|
|
|
|
solver.parameters.ignore_subsolvers.append("objective_lb_search")
|
2025-02-25 12:32:58 +01:00
|
|
|
solver.parameters.extra_subsolvers.append("objective_shaving")
|
2023-10-02 19:52:11 +02:00
|
|
|
|
|
|
|
|
# Experimental: Specify the fact that the objective is a makespan
|
|
|
|
|
solver.parameters.push_all_tasks_toward_start = True
|
|
|
|
|
|
|
|
|
|
# Enable logging in the main solve.
|
2025-03-07 10:33:36 +01:00
|
|
|
solver.parameters.log_search_progress = True
|
2023-10-02 19:52:11 +02:00
|
|
|
|
2025-03-07 10:33:36 +01:00
|
|
|
# Solve the model.
|
|
|
|
|
solver.solve(model)
|
2017-11-03 23:36:21 +01:00
|
|
|
|
|
|
|
|
|
2022-06-16 07:39:30 +02:00
|
|
|
def main(_):
|
2023-07-03 12:46:51 +02:00
|
|
|
rcpsp_parser = rcpsp.RcpspParser()
|
2023-06-30 22:49:35 +02:00
|
|
|
rcpsp_parser.parse_file(_INPUT.value)
|
2022-07-28 00:22:08 +02:00
|
|
|
|
2023-06-30 22:49:35 +02:00
|
|
|
problem = rcpsp_parser.problem()
|
2023-11-16 19:46:56 +01:00
|
|
|
print_problem_statistics(problem)
|
2022-07-28 00:22:08 +02:00
|
|
|
|
|
|
|
|
last_task = len(problem.tasks) - 1
|
2022-08-19 09:24:27 +02:00
|
|
|
|
2023-11-16 19:46:56 +01:00
|
|
|
solve_rcpsp(
|
2023-06-30 22:49:35 +02:00
|
|
|
problem=problem,
|
|
|
|
|
proto_file=_OUTPUT_PROTO.value,
|
|
|
|
|
params=_PARAMS.value,
|
|
|
|
|
active_tasks=set(range(1, last_task)),
|
|
|
|
|
source=0,
|
|
|
|
|
sink=last_task,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2020-11-18 10:50:14 +01:00
|
|
|
app.run(main)
|