2025-01-10 11:35:44 +01:00
|
|
|
// Copyright 2010-2025 Google LLC
|
2016-12-13 15:48:17 +01:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
|
//
|
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
//
|
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/sat/cumulative.h"
|
2017-03-28 16:11:06 +02:00
|
|
|
|
2016-12-13 15:48:17 +01:00
|
|
|
#include <algorithm>
|
2022-02-15 18:00:11 +01:00
|
|
|
#include <functional>
|
|
|
|
|
#include <vector>
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2023-05-24 11:42:11 +02:00
|
|
|
#include "absl/log/check.h"
|
2022-06-29 17:04:58 +02:00
|
|
|
#include "absl/strings/str_join.h"
|
2024-11-28 15:34:01 +01:00
|
|
|
#include "absl/types/span.h"
|
2018-06-08 16:40:43 +02:00
|
|
|
#include "ortools/base/logging.h"
|
2020-02-05 19:00:26 +01:00
|
|
|
#include "ortools/sat/cumulative_energy.h"
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/sat/disjunctive.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/sat/integer.h"
|
2024-12-04 17:47:10 +01:00
|
|
|
#include "ortools/sat/integer_base.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/sat/integer_expr.h"
|
|
|
|
|
#include "ortools/sat/intervals.h"
|
2020-02-05 11:27:02 +01:00
|
|
|
#include "ortools/sat/linear_constraint.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/sat/model.h"
|
2017-07-27 11:28:55 -07:00
|
|
|
#include "ortools/sat/pb_constraint.h"
|
|
|
|
|
#include "ortools/sat/precedences.h"
|
|
|
|
|
#include "ortools/sat/sat_base.h"
|
|
|
|
|
#include "ortools/sat/sat_parameters.pb.h"
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/sat/sat_solver.h"
|
2025-09-16 16:25:04 +02:00
|
|
|
#include "ortools/sat/scheduling_helpers.h"
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/sat/timetable.h"
|
|
|
|
|
#include "ortools/sat/timetable_edgefinding.h"
|
2022-02-07 14:31:18 +01:00
|
|
|
#include "ortools/util/strong_integers.h"
|
2016-12-13 15:48:17 +01:00
|
|
|
|
|
|
|
|
namespace operations_research {
|
|
|
|
|
namespace sat {
|
|
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
std::function<void(Model*)> Cumulative(
|
2025-09-16 16:25:04 +02:00
|
|
|
const std::vector<Literal>& enforcement_literals,
|
2020-10-28 13:42:36 +01:00
|
|
|
const std::vector<IntervalVariable>& vars,
|
2024-11-28 15:34:01 +01:00
|
|
|
absl::Span<const AffineExpression> demands, AffineExpression capacity,
|
2020-10-28 13:42:36 +01:00
|
|
|
SchedulingConstraintHelper* helper) {
|
2024-11-28 15:34:01 +01:00
|
|
|
return [=, demands = std::vector<AffineExpression>(
|
|
|
|
|
demands.begin(), demands.end())](Model* model) mutable {
|
2020-10-28 13:42:36 +01:00
|
|
|
auto* intervals = model->GetOrCreate<IntervalsRepository>();
|
|
|
|
|
auto* encoder = model->GetOrCreate<IntegerEncoder>();
|
|
|
|
|
auto* integer_trail = model->GetOrCreate<IntegerTrail>();
|
|
|
|
|
auto* watcher = model->GetOrCreate<GenericLiteralWatcher>();
|
2024-11-15 16:17:42 +01:00
|
|
|
SatSolver* sat_solver = model->GetOrCreate<SatSolver>();
|
|
|
|
|
|
2025-09-16 16:25:04 +02:00
|
|
|
if (enforcement_literals.empty()) {
|
|
|
|
|
if (!integer_trail->SafeEnqueue(capacity.GreaterOrEqual(0), {})) {
|
|
|
|
|
sat_solver->NotifyThatModelIsUnsat();
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
LinearConstraintBuilder builder(model, IntegerValue(0), kMaxIntegerValue);
|
|
|
|
|
builder.AddTerm(capacity, IntegerValue(1));
|
|
|
|
|
LoadConditionalLinearConstraint(enforcement_literals, builder.Build(),
|
|
|
|
|
model);
|
2024-11-18 17:04:13 +01:00
|
|
|
}
|
2024-11-15 16:17:42 +01:00
|
|
|
if (demands.empty()) {
|
2024-11-18 17:04:13 +01:00
|
|
|
// If there is no demand, since we already added a constraint that the
|
|
|
|
|
// capacity is not negative above, we can stop here.
|
2024-11-15 16:17:42 +01:00
|
|
|
return;
|
|
|
|
|
}
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2017-03-28 16:11:06 +02:00
|
|
|
// Redundant constraints to ensure that the resource capacity is high enough
|
|
|
|
|
// for each task. Also ensure that no task consumes more resource than what
|
|
|
|
|
// is available. This is useful because the subsequent propagators do not
|
|
|
|
|
// filter the capacity variable very well.
|
2020-02-05 11:27:02 +01:00
|
|
|
for (int i = 0; i < demands.size(); ++i) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (intervals->MaxSize(vars[i]) == 0) continue;
|
2017-03-28 16:11:06 +02:00
|
|
|
|
2020-02-05 11:27:02 +01:00
|
|
|
LinearConstraintBuilder builder(model, kMinIntegerValue, IntegerValue(0));
|
|
|
|
|
builder.AddTerm(demands[i], IntegerValue(1));
|
|
|
|
|
builder.AddTerm(capacity, IntegerValue(-1));
|
|
|
|
|
LinearConstraint ct = builder.Build();
|
|
|
|
|
|
2025-09-16 16:25:04 +02:00
|
|
|
std::vector<Literal> task_enforcement_literals = enforcement_literals;
|
2020-02-05 11:27:02 +01:00
|
|
|
if (intervals->IsOptional(vars[i])) {
|
2025-09-16 16:25:04 +02:00
|
|
|
task_enforcement_literals.push_back(
|
|
|
|
|
intervals->PresenceLiteral(vars[i]));
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
|
2020-02-05 11:27:02 +01:00
|
|
|
// If the interval can be of size zero, it currently do not count towards
|
|
|
|
|
// the capacity. TODO(user): Change that since we have optional interval
|
|
|
|
|
// for this.
|
2025-04-14 18:23:51 +02:00
|
|
|
if (intervals->MinSize(vars[i]) <= 0) {
|
2025-09-16 16:25:04 +02:00
|
|
|
task_enforcement_literals.push_back(
|
|
|
|
|
encoder->GetOrCreateAssociatedLiteral(
|
|
|
|
|
intervals->Size(vars[i]).GreaterOrEqual(IntegerValue(1))));
|
2020-02-05 11:27:02 +01:00
|
|
|
}
|
2017-03-28 16:11:06 +02:00
|
|
|
|
2025-09-16 16:25:04 +02:00
|
|
|
if (task_enforcement_literals.empty()) {
|
2020-02-05 11:27:02 +01:00
|
|
|
LoadLinearConstraint(ct, model);
|
2017-03-28 16:11:06 +02:00
|
|
|
} else {
|
2025-09-16 16:25:04 +02:00
|
|
|
LoadConditionalLinearConstraint(task_enforcement_literals, ct, model);
|
2016-12-14 22:03:52 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
if (vars.size() == 1) return;
|
2017-03-28 16:11:06 +02:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
const SatParameters& parameters = *(model->GetOrCreate<SatParameters>());
|
2017-03-28 16:11:06 +02:00
|
|
|
|
|
|
|
|
// Detect a subset of intervals that needs to be in disjunction and add a
|
|
|
|
|
// Disjunctive() constraint over them.
|
2022-06-16 07:45:19 +02:00
|
|
|
if (parameters.use_disjunctive_constraint_in_cumulative()) {
|
2017-03-28 16:11:06 +02:00
|
|
|
// TODO(user): We need to exclude intervals that can be of size zero
|
|
|
|
|
// because the disjunctive do not "ignore" them like the cumulative
|
|
|
|
|
// does. That is, the interval [2,2) will be assumed to be in
|
|
|
|
|
// disjunction with [1, 3) for instance. We need to uniformize the
|
|
|
|
|
// handling of interval with size zero.
|
|
|
|
|
std::vector<IntervalVariable> in_disjunction;
|
2022-04-20 17:38:59 +02:00
|
|
|
IntegerValue min_of_demands = kMaxIntegerValue;
|
|
|
|
|
const IntegerValue capa_max = integer_trail->UpperBound(capacity);
|
2016-12-14 22:03:52 +01:00
|
|
|
for (int i = 0; i < vars.size(); ++i) {
|
2022-04-20 17:38:59 +02:00
|
|
|
const IntegerValue size_min = intervals->MinSize(vars[i]);
|
|
|
|
|
if (size_min == 0) continue;
|
|
|
|
|
const IntegerValue demand_min = integer_trail->LowerBound(demands[i]);
|
|
|
|
|
if (2 * demand_min > capa_max) {
|
2017-03-28 16:11:06 +02:00
|
|
|
in_disjunction.push_back(vars[i]);
|
2022-04-20 17:38:59 +02:00
|
|
|
min_of_demands = std::min(min_of_demands, demand_min);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Liftable? We might be able to add one more interval!
|
|
|
|
|
if (!in_disjunction.empty()) {
|
|
|
|
|
IntervalVariable lift_var;
|
|
|
|
|
IntegerValue lift_size(0);
|
|
|
|
|
for (int i = 0; i < vars.size(); ++i) {
|
|
|
|
|
const IntegerValue size_min = intervals->MinSize(vars[i]);
|
|
|
|
|
if (size_min == 0) continue;
|
|
|
|
|
const IntegerValue demand_min = integer_trail->LowerBound(demands[i]);
|
|
|
|
|
if (2 * demand_min > capa_max) continue;
|
|
|
|
|
if (min_of_demands + demand_min > capa_max && size_min > lift_size) {
|
|
|
|
|
lift_var = vars[i];
|
|
|
|
|
lift_size = size_min;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (lift_size > 0) {
|
|
|
|
|
in_disjunction.push_back(lift_var);
|
2016-12-14 22:03:52 +01:00
|
|
|
}
|
|
|
|
|
}
|
2017-03-28 16:11:06 +02:00
|
|
|
|
|
|
|
|
// Add a disjunctive constraint on the intervals in in_disjunction. Do not
|
|
|
|
|
// create the cumulative at all when all intervals must be in disjunction.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Do proper experiments to see how beneficial this is, the
|
|
|
|
|
// disjunctive will propagate more but is also using slower algorithms.
|
|
|
|
|
// That said, this is more a question of optimizing the disjunctive
|
|
|
|
|
// propagation code.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Another "known" idea is to detect pair of tasks that must
|
|
|
|
|
// be in disjunction and to create a Boolean to indicate which one is
|
|
|
|
|
// before the other. It shouldn't change the propagation, but may result
|
|
|
|
|
// in a faster one with smaller explanations, and the solver can also take
|
|
|
|
|
// decision on such Boolean.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): A better place for stuff like this could be in the
|
|
|
|
|
// presolver so that it is easier to disable and play with alternatives.
|
2025-09-16 16:25:04 +02:00
|
|
|
if (in_disjunction.size() > 1)
|
|
|
|
|
AddDisjunctive(enforcement_literals, in_disjunction, model);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (in_disjunction.size() == vars.size()) return;
|
2016-12-14 22:03:52 +01:00
|
|
|
}
|
|
|
|
|
|
2019-03-28 21:23:28 +01:00
|
|
|
if (helper == nullptr) {
|
2025-09-16 16:25:04 +02:00
|
|
|
helper = intervals->GetOrCreateHelper(enforcement_literals, vars);
|
2019-03-28 21:23:28 +01:00
|
|
|
}
|
2022-06-16 07:45:19 +02:00
|
|
|
SchedulingDemandHelper* demands_helper =
|
2023-10-27 13:47:04 +02:00
|
|
|
intervals->GetOrCreateDemandHelper(helper, demands);
|
|
|
|
|
intervals->RegisterCumulative({capacity, helper, demands_helper});
|
2017-04-06 14:10:20 +02:00
|
|
|
|
2021-06-29 17:16:25 +02:00
|
|
|
// For each variables that is after a subset of task ends (i.e. like a
|
|
|
|
|
// makespan objective), we detect it and add a special constraint to
|
|
|
|
|
// propagate it.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Models that include the makespan as a special interval might
|
|
|
|
|
// be better, but then not everyone does that. In particular this code
|
|
|
|
|
// allows to have decent lower bound on the large cumulative minizinc
|
|
|
|
|
// instances.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): this require the precedence constraints to be already loaded,
|
|
|
|
|
// and there is no guarantee of that currently. Find a more robust way.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): There is a bit of code duplication with the disjunctive
|
|
|
|
|
// precedence propagator. Abstract more?
|
2022-06-16 07:45:19 +02:00
|
|
|
if (parameters.use_hard_precedences_in_cumulative()) {
|
2022-05-31 16:25:10 +02:00
|
|
|
// The CumulativeIsAfterSubsetConstraint() always reset the helper to the
|
|
|
|
|
// forward time direction, so it is important to also precompute the
|
|
|
|
|
// precedence relation using the same direction! This is needed in case
|
|
|
|
|
// the helper has already been used and set in the other direction.
|
|
|
|
|
if (!helper->SynchronizeAndSetTimeDirection(true)) {
|
|
|
|
|
model->GetOrCreate<SatSolver>()->NotifyThatModelIsUnsat();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2021-06-29 17:16:25 +02:00
|
|
|
std::vector<IntegerVariable> index_to_end_vars;
|
|
|
|
|
std::vector<int> index_to_task;
|
|
|
|
|
index_to_end_vars.clear();
|
|
|
|
|
for (int t = 0; t < helper->NumTasks(); ++t) {
|
|
|
|
|
const AffineExpression& end_exp = helper->Ends()[t];
|
|
|
|
|
|
|
|
|
|
// TODO(user): Handle generic affine relation?
|
|
|
|
|
if (end_exp.var == kNoIntegerVariable || end_exp.coeff != 1) continue;
|
|
|
|
|
index_to_end_vars.push_back(end_exp.var);
|
|
|
|
|
index_to_task.push_back(t);
|
|
|
|
|
}
|
2022-06-23 16:16:17 +02:00
|
|
|
|
|
|
|
|
// TODO(user): This can lead to many constraints. By analyzing a bit more
|
|
|
|
|
// the precedences, we could restrict that. In particular for cases were
|
|
|
|
|
// the cumulative is always (bunch of tasks B), T, (bunch of tasks A) and
|
|
|
|
|
// task T always in the middle, we never need to explicit list the
|
|
|
|
|
// precedence of a task in B with a task in A.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): If more than one variable are after the same set of
|
|
|
|
|
// intervals, we should regroup them in a single constraint rather than
|
|
|
|
|
// having two independent constraint doing the same propagation.
|
2023-06-14 15:13:14 +02:00
|
|
|
std::vector<FullIntegerPrecedence> full_precedences;
|
|
|
|
|
if (parameters.exploit_all_precedences()) {
|
2025-07-21 17:25:33 +02:00
|
|
|
model->GetOrCreate<TransitivePrecedencesEvaluator>()
|
|
|
|
|
->ComputeFullPrecedences(index_to_end_vars, &full_precedences);
|
2023-06-14 15:13:14 +02:00
|
|
|
}
|
|
|
|
|
for (const FullIntegerPrecedence& data : full_precedences) {
|
2022-06-23 16:16:17 +02:00
|
|
|
const int size = data.indices.size();
|
|
|
|
|
if (size <= 1) continue;
|
|
|
|
|
|
|
|
|
|
const IntegerVariable var = data.var;
|
2021-06-29 17:16:25 +02:00
|
|
|
std::vector<int> subtasks;
|
2022-06-23 16:16:17 +02:00
|
|
|
std::vector<IntegerValue> offsets;
|
2022-04-13 17:29:48 +02:00
|
|
|
IntegerValue sum_of_demand_max(0);
|
2022-06-23 16:16:17 +02:00
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
const int t = index_to_task[data.indices[i]];
|
2021-06-29 17:16:25 +02:00
|
|
|
subtasks.push_back(t);
|
2022-04-13 17:29:48 +02:00
|
|
|
sum_of_demand_max += integer_trail->LevelZeroUpperBound(demands[t]);
|
2021-06-29 17:16:25 +02:00
|
|
|
|
|
|
|
|
// We have var >= end_exp.var + offset, so
|
|
|
|
|
// var >= (end_exp.var + end_exp.cte) + (offset - end_exp.cte)
|
|
|
|
|
// var >= task end + new_offset.
|
|
|
|
|
const AffineExpression& end_exp = helper->Ends()[t];
|
2022-06-23 16:16:17 +02:00
|
|
|
offsets.push_back(data.offsets[i] - end_exp.constant);
|
2021-06-29 17:16:25 +02:00
|
|
|
}
|
2022-06-23 16:16:17 +02:00
|
|
|
if (sum_of_demand_max > integer_trail->LevelZeroLowerBound(capacity)) {
|
|
|
|
|
VLOG(2) << "Cumulative precedence constraint! var= " << var
|
|
|
|
|
<< " #task: " << absl::StrJoin(subtasks, ",");
|
2021-06-29 17:16:25 +02:00
|
|
|
CumulativeIsAfterSubsetConstraint* constraint =
|
2022-06-23 16:16:17 +02:00
|
|
|
new CumulativeIsAfterSubsetConstraint(var, capacity, subtasks,
|
|
|
|
|
offsets, helper,
|
2022-06-16 07:45:19 +02:00
|
|
|
demands_helper, model);
|
2021-06-29 17:16:25 +02:00
|
|
|
constraint->RegisterWith(watcher);
|
|
|
|
|
model->TakeOwnership(constraint);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-02-06 16:11:43 +01:00
|
|
|
// Propagator responsible for applying Timetabling filtering rule. It
|
|
|
|
|
// increases the minimum of the start variables, decrease the maximum of the
|
|
|
|
|
// end variables, and increase the minimum of the capacity variable.
|
2020-10-28 13:42:36 +01:00
|
|
|
TimeTablingPerTask* time_tabling =
|
2022-06-16 07:45:19 +02:00
|
|
|
new TimeTablingPerTask(capacity, helper, demands_helper, model);
|
2020-02-05 11:27:02 +01:00
|
|
|
time_tabling->RegisterWith(watcher);
|
2016-12-13 15:48:17 +01:00
|
|
|
model->TakeOwnership(time_tabling);
|
2017-02-06 16:11:43 +01:00
|
|
|
|
|
|
|
|
// Propagator responsible for applying the Overload Checking filtering rule.
|
|
|
|
|
// It increases the minimum of the capacity variable.
|
2022-06-16 07:45:19 +02:00
|
|
|
if (parameters.use_overload_checker_in_cumulative()) {
|
|
|
|
|
AddCumulativeOverloadChecker(capacity, helper, demands_helper, model);
|
2017-02-06 16:11:43 +01:00
|
|
|
}
|
2024-07-12 13:56:11 +02:00
|
|
|
if (parameters.use_conservative_scale_overload_checker()) {
|
|
|
|
|
// Since we use the potential DFF conflict on demands to apply the
|
|
|
|
|
// heuristic, only do so if any demand is greater than 1.
|
|
|
|
|
bool any_demand_greater_than_one = false;
|
|
|
|
|
for (int i = 0; i < vars.size(); ++i) {
|
|
|
|
|
const IntegerValue demand_min = integer_trail->LowerBound(demands[i]);
|
|
|
|
|
if (demand_min > 1) {
|
|
|
|
|
any_demand_greater_than_one = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (any_demand_greater_than_one) {
|
|
|
|
|
AddCumulativeOverloadCheckerDff(capacity, helper, demands_helper,
|
|
|
|
|
model);
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-02-06 16:11:43 +01:00
|
|
|
|
|
|
|
|
// Propagator responsible for applying the Timetable Edge finding filtering
|
|
|
|
|
// rule. It increases the minimum of the start variables and decreases the
|
|
|
|
|
// maximum of the end variables,
|
2023-09-21 13:07:09 +02:00
|
|
|
if (parameters.use_timetable_edge_finding_in_cumulative() &&
|
|
|
|
|
helper->NumTasks() <=
|
|
|
|
|
parameters.max_num_intervals_for_timetable_edge_finding()) {
|
2022-06-16 07:45:19 +02:00
|
|
|
TimeTableEdgeFinding* time_table_edge_finding =
|
|
|
|
|
new TimeTableEdgeFinding(capacity, helper, demands_helper, model);
|
2020-02-05 11:27:02 +01:00
|
|
|
time_table_edge_finding->RegisterWith(watcher);
|
2017-02-06 16:11:43 +01:00
|
|
|
model->TakeOwnership(time_table_edge_finding);
|
|
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
};
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
std::function<void(Model*)> CumulativeTimeDecomposition(
|
2025-09-16 16:25:04 +02:00
|
|
|
absl::Span<const Literal> enforcement_literals,
|
2024-11-28 15:34:01 +01:00
|
|
|
absl::Span<const IntervalVariable> vars,
|
|
|
|
|
absl::Span<const AffineExpression> demands, AffineExpression capacity,
|
2025-09-16 16:25:04 +02:00
|
|
|
SchedulingConstraintHelper* /*helper*/) {
|
|
|
|
|
CHECK(enforcement_literals.empty());
|
2024-11-28 15:34:01 +01:00
|
|
|
return [=, vars = std::vector<IntervalVariable>(vars.begin(), vars.end()),
|
|
|
|
|
demands = std::vector<AffineExpression>(
|
|
|
|
|
demands.begin(), demands.end())](Model* model) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (vars.empty()) return;
|
2017-03-28 16:11:06 +02:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
|
2020-02-05 11:27:02 +01:00
|
|
|
CHECK(integer_trail->IsFixed(capacity));
|
2020-10-22 23:36:58 +02:00
|
|
|
const Coefficient fixed_capacity(
|
|
|
|
|
integer_trail->UpperBound(capacity).value());
|
2020-02-05 11:27:02 +01:00
|
|
|
|
|
|
|
|
const int num_tasks = vars.size();
|
2020-10-28 13:42:36 +01:00
|
|
|
SatSolver* sat_solver = model->GetOrCreate<SatSolver>();
|
|
|
|
|
IntegerEncoder* encoder = model->GetOrCreate<IntegerEncoder>();
|
2024-01-12 16:31:18 +01:00
|
|
|
IntervalsRepository* repository = model->GetOrCreate<IntervalsRepository>();
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2024-01-12 16:31:18 +01:00
|
|
|
std::vector<AffineExpression> start_exprs;
|
|
|
|
|
std::vector<AffineExpression> end_exprs;
|
2020-02-05 11:27:02 +01:00
|
|
|
std::vector<IntegerValue> fixed_demands;
|
2016-12-13 15:48:17 +01:00
|
|
|
|
|
|
|
|
for (int t = 0; t < num_tasks; ++t) {
|
2024-01-12 16:31:18 +01:00
|
|
|
start_exprs.push_back(repository->Start(vars[t]));
|
|
|
|
|
end_exprs.push_back(repository->End(vars[t]));
|
2020-02-05 11:27:02 +01:00
|
|
|
CHECK(integer_trail->IsFixed(demands[t]));
|
|
|
|
|
fixed_demands.push_back(integer_trail->LowerBound(demands[t]));
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Compute time range.
|
|
|
|
|
IntegerValue min_start = kMaxIntegerValue;
|
|
|
|
|
IntegerValue max_end = kMinIntegerValue;
|
|
|
|
|
for (int t = 0; t < num_tasks; ++t) {
|
2024-01-12 16:31:18 +01:00
|
|
|
min_start =
|
|
|
|
|
std::min(min_start, integer_trail->LowerBound(start_exprs[t]));
|
|
|
|
|
max_end = std::max(max_end, integer_trail->UpperBound(end_exprs[t]));
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
|
2017-03-28 16:11:06 +02:00
|
|
|
for (IntegerValue time = min_start; time < max_end; ++time) {
|
2016-12-13 15:48:17 +01:00
|
|
|
std::vector<LiteralWithCoeff> literals_with_coeff;
|
|
|
|
|
for (int t = 0; t < num_tasks; ++t) {
|
2023-10-18 15:47:37 +02:00
|
|
|
if (!sat_solver->Propagate()) return;
|
2024-01-12 16:31:18 +01:00
|
|
|
const IntegerValue start_min =
|
|
|
|
|
integer_trail->LowerBound(start_exprs[t]);
|
|
|
|
|
const IntegerValue end_max = integer_trail->UpperBound(end_exprs[t]);
|
2020-02-05 11:27:02 +01:00
|
|
|
if (end_max <= time || time < start_min || fixed_demands[t] == 0) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2017-03-28 16:11:06 +02:00
|
|
|
// Task t consumes the resource at time if consume_condition is true.
|
|
|
|
|
std::vector<Literal> consume_condition;
|
|
|
|
|
const Literal consume = Literal(model->Add(NewBooleanVariable()), true);
|
|
|
|
|
|
2024-01-23 14:15:38 +01:00
|
|
|
// Task t consumes the resource at time if it is present.
|
2024-01-12 16:31:18 +01:00
|
|
|
if (repository->IsOptional(vars[t])) {
|
|
|
|
|
consume_condition.push_back(repository->PresenceLiteral(vars[t]));
|
|
|
|
|
}
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2024-01-23 14:15:38 +01:00
|
|
|
// Task t overlaps time.
|
|
|
|
|
consume_condition.push_back(encoder->GetOrCreateAssociatedLiteral(
|
|
|
|
|
start_exprs[t].LowerOrEqual(IntegerValue(time))));
|
|
|
|
|
consume_condition.push_back(encoder->GetOrCreateAssociatedLiteral(
|
|
|
|
|
end_exprs[t].GreaterOrEqual(IntegerValue(time + 1))));
|
|
|
|
|
|
2017-03-28 16:11:06 +02:00
|
|
|
model->Add(ReifiedBoolAnd(consume_condition, consume));
|
2016-12-13 15:48:17 +01:00
|
|
|
|
2022-06-06 11:05:46 +02:00
|
|
|
// this is needed because we currently can't create a boolean variable
|
|
|
|
|
// if the model is unsat.
|
2022-09-21 13:35:11 +02:00
|
|
|
if (sat_solver->ModelIsUnsat()) return;
|
2016-12-13 15:48:17 +01:00
|
|
|
|
|
|
|
|
literals_with_coeff.push_back(
|
2020-02-05 11:27:02 +01:00
|
|
|
LiteralWithCoeff(consume, Coefficient(fixed_demands[t].value())));
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
// The profile cannot exceed the capacity at time.
|
|
|
|
|
sat_solver->AddLinearConstraint(false, Coefficient(0), true,
|
2020-02-05 11:27:02 +01:00
|
|
|
fixed_capacity, &literals_with_coeff);
|
2018-09-24 11:08:10 +02:00
|
|
|
|
|
|
|
|
// Abort if UNSAT.
|
2022-09-21 13:35:11 +02:00
|
|
|
if (sat_solver->ModelIsUnsat()) return;
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
};
|
2016-12-13 15:48:17 +01:00
|
|
|
}
|
|
|
|
|
|
2020-12-24 06:41:44 +01:00
|
|
|
std::function<void(Model*)> CumulativeUsingReservoir(
|
2025-09-16 16:25:04 +02:00
|
|
|
absl::Span<const Literal> enforcement_literals,
|
2024-11-28 15:34:01 +01:00
|
|
|
absl::Span<const IntervalVariable> vars,
|
|
|
|
|
absl::Span<const AffineExpression> demands, AffineExpression capacity,
|
2025-09-16 16:25:04 +02:00
|
|
|
SchedulingConstraintHelper* /*helper*/) {
|
2024-11-28 15:34:01 +01:00
|
|
|
return [=, vars = std::vector<IntervalVariable>(vars.begin(), vars.end()),
|
|
|
|
|
demands = std::vector<AffineExpression>(
|
|
|
|
|
demands.begin(), demands.end())](Model* model) {
|
2020-12-24 06:41:44 +01:00
|
|
|
if (vars.empty()) return;
|
|
|
|
|
|
|
|
|
|
auto* integer_trail = model->GetOrCreate<IntegerTrail>();
|
|
|
|
|
auto* encoder = model->GetOrCreate<IntegerEncoder>();
|
2024-01-12 16:31:18 +01:00
|
|
|
auto* repository = model->GetOrCreate<IntervalsRepository>();
|
2020-12-24 06:41:44 +01:00
|
|
|
|
|
|
|
|
CHECK(integer_trail->IsFixed(capacity));
|
|
|
|
|
const IntegerValue fixed_capacity(
|
|
|
|
|
integer_trail->UpperBound(capacity).value());
|
|
|
|
|
|
|
|
|
|
std::vector<AffineExpression> times;
|
2022-09-09 16:48:39 +02:00
|
|
|
std::vector<AffineExpression> deltas;
|
2020-12-24 06:41:44 +01:00
|
|
|
std::vector<Literal> presences;
|
|
|
|
|
|
|
|
|
|
const int num_tasks = vars.size();
|
|
|
|
|
for (int t = 0; t < num_tasks; ++t) {
|
|
|
|
|
CHECK(integer_trail->IsFixed(demands[t]));
|
2024-01-12 16:31:18 +01:00
|
|
|
times.push_back(repository->Start(vars[t]));
|
2022-09-09 16:48:39 +02:00
|
|
|
deltas.push_back(demands[t]);
|
2024-01-12 16:31:18 +01:00
|
|
|
times.push_back(repository->End(vars[t]));
|
2022-09-09 16:48:39 +02:00
|
|
|
deltas.push_back(demands[t].Negated());
|
2024-01-12 16:31:18 +01:00
|
|
|
if (repository->IsOptional(vars[t])) {
|
|
|
|
|
presences.push_back(repository->PresenceLiteral(vars[t]));
|
|
|
|
|
presences.push_back(repository->PresenceLiteral(vars[t]));
|
2020-12-24 06:41:44 +01:00
|
|
|
} else {
|
|
|
|
|
presences.push_back(encoder->GetTrueLiteral());
|
|
|
|
|
presences.push_back(encoder->GetTrueLiteral());
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-09-16 16:25:04 +02:00
|
|
|
AddReservoirConstraint(enforcement_literals, times, deltas, presences, 0,
|
|
|
|
|
fixed_capacity.value(), model);
|
2020-12-24 06:41:44 +01:00
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
} // namespace sat
|
|
|
|
|
} // namespace operations_research
|