OR-Tools  9.1
linear_relaxation.cc
Go to the documentation of this file.
1// Copyright 2010-2021 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
15
16#include <algorithm>
17#include <cstdint>
18#include <limits>
19#include <vector>
20
21#include "absl/container/flat_hash_set.h"
24#include "ortools/sat/circuit.h" // for ReindexArcs.
27#include "ortools/sat/cuts.h"
29#include "ortools/sat/integer.h"
37
38namespace operations_research {
39namespace sat {
40
41bool AppendFullEncodingRelaxation(IntegerVariable var, const Model& model,
42 LinearRelaxation* relaxation) {
43 const auto* encoder = model.Get<IntegerEncoder>();
44 if (encoder == nullptr) return false;
45 if (!encoder->VariableIsFullyEncoded(var)) return false;
46
47 const auto& encoding = encoder->FullDomainEncoding(var);
48 const IntegerValue var_min = model.Get<IntegerTrail>()->LowerBound(var);
49
50 LinearConstraintBuilder at_least_one(&model, IntegerValue(1),
52 LinearConstraintBuilder encoding_ct(&model, var_min, var_min);
53 encoding_ct.AddTerm(var, IntegerValue(1));
54
55 // Create the constraint if all literal have a view.
56 std::vector<Literal> at_most_one;
57
58 for (const auto value_literal : encoding) {
59 const Literal lit = value_literal.literal;
60 const IntegerValue delta = value_literal.value - var_min;
61 DCHECK_GE(delta, IntegerValue(0));
62 at_most_one.push_back(lit);
63 if (!at_least_one.AddLiteralTerm(lit, IntegerValue(1))) return false;
64 if (delta != IntegerValue(0)) {
65 if (!encoding_ct.AddLiteralTerm(lit, -delta)) return false;
66 }
67 }
68
69 relaxation->linear_constraints.push_back(at_least_one.Build());
70 relaxation->linear_constraints.push_back(encoding_ct.Build());
71 relaxation->at_most_ones.push_back(at_most_one);
72 return true;
73}
74
75namespace {
76
77// TODO(user): Not super efficient.
78std::pair<IntegerValue, IntegerValue> GetMinAndMaxNotEncoded(
79 IntegerVariable var,
80 const absl::flat_hash_set<IntegerValue>& encoded_values,
81 const Model& model) {
82 const auto* domains = model.Get<IntegerDomains>();
83 if (domains == nullptr || var >= domains->size()) {
85 }
86
87 // The domain can be large, but the list of values shouldn't, so this
88 // runs in O(encoded_values.size());
89 IntegerValue min = kMaxIntegerValue;
90 for (const int64_t v : (*domains)[var].Values()) {
91 if (!encoded_values.contains(IntegerValue(v))) {
92 min = IntegerValue(v);
93 break;
94 }
95 }
96
97 IntegerValue max = kMinIntegerValue;
98 const auto& domain = (*domains)[var];
99 for (int i = domain.NumIntervals() - 1; i >= 0; --i) {
100 const ClosedInterval interval = domain[i];
101 for (IntegerValue v(interval.end); v >= interval.start; --v) {
102 if (!encoded_values.contains(v)) {
103 max = v;
104 break;
105 }
106 }
107 if (max != kMinIntegerValue) break;
108 }
109
110 return {min, max};
111}
112
113bool LinMaxContainsOnlyOneVarInExpressions(const ConstraintProto& ct) {
114 CHECK_EQ(ct.constraint_case(), ConstraintProto::ConstraintCase::kLinMax);
115 int current_var = -1;
116 for (const LinearExpressionProto& expr : ct.lin_max().exprs()) {
117 if (expr.vars().empty()) continue;
118 if (expr.vars().size() > 1) return false;
119 const int var = PositiveRef(expr.vars(0));
120 if (current_var == -1) {
121 current_var = var;
122 } else if (var != current_var) {
123 return false;
124 }
125 }
126 return true;
127}
128
129bool IntMaxIsIntAbs(const ConstraintProto& ct) {
130 if (ct.constraint_case() != ConstraintProto::ConstraintCase::kIntMax ||
131 ct.int_max().vars_size() != 2) {
132 return false;
133 }
134 return ct.int_max().vars(0) == NegatedRef(ct.int_max().vars(1));
135}
136
137// Collect all the affines expressions in a LinMax constraint.
138// It checks that these are indeed affine expressions, and that they all share
139// the same variable.
140// It returns the shared variable, as well as a vector of pairs
141// (coefficient, offset) when each affine is coefficient * shared_var + offset.
142void CollectAffineExpressionWithSingleVariable(
143 const ConstraintProto& ct, CpModelMapping* mapping, IntegerVariable* var,
144 std::vector<std::pair<IntegerValue, IntegerValue>>* affines) {
145 DCHECK(LinMaxContainsOnlyOneVarInExpressions(ct));
146 CHECK_EQ(ct.constraint_case(), ConstraintProto::ConstraintCase::kLinMax);
148 affines->clear();
149 for (const LinearExpressionProto& expr : ct.lin_max().exprs()) {
150 if (expr.vars().empty()) {
151 affines->push_back({IntegerValue(0), IntegerValue(expr.offset())});
152 } else {
153 CHECK_EQ(expr.vars().size(), 1);
154 const IntegerVariable affine_var = mapping->Integer(expr.vars(0));
155 if (*var == kNoIntegerVariable) {
156 *var = PositiveVariable(affine_var);
157 }
158 if (VariableIsPositive(affine_var)) {
159 CHECK_EQ(affine_var, *var);
160 affines->push_back(
161 {IntegerValue(expr.coeffs(0)), IntegerValue(expr.offset())});
162 } else {
163 CHECK_EQ(NegationOf(affine_var), *var);
164 affines->push_back(
165 {IntegerValue(-expr.coeffs(0)), IntegerValue(expr.offset())});
166 }
167 }
168 }
169}
170
171} // namespace
172
173void AppendPartialEncodingRelaxation(IntegerVariable var, const Model& model,
174 LinearRelaxation* relaxation) {
175 const auto* encoder = model.Get<IntegerEncoder>();
176 const auto* integer_trail = model.Get<IntegerTrail>();
177 if (encoder == nullptr || integer_trail == nullptr) return;
178
179 const std::vector<IntegerEncoder::ValueLiteralPair>& encoding =
180 encoder->PartialDomainEncoding(var);
181 if (encoding.empty()) return;
182
183 std::vector<Literal> at_most_one_ct;
184 absl::flat_hash_set<IntegerValue> encoded_values;
185 for (const auto value_literal : encoding) {
186 const Literal literal = value_literal.literal;
187
188 // Note that we skip pairs that do not have an Integer view.
189 if (encoder->GetLiteralView(literal) == kNoIntegerVariable &&
190 encoder->GetLiteralView(literal.Negated()) == kNoIntegerVariable) {
191 continue;
192 }
193
194 at_most_one_ct.push_back(literal);
195 encoded_values.insert(value_literal.value);
196 }
197 if (encoded_values.empty()) return;
198
199 // TODO(user): The PartialDomainEncoding() function automatically exclude
200 // values that are no longer in the initial domain, so we could be a bit
201 // tighter here. That said, this is supposed to be called just after the
202 // presolve, so it shouldn't really matter.
203 const auto pair = GetMinAndMaxNotEncoded(var, encoded_values, model);
204 if (pair.first == kMaxIntegerValue) {
205 // TODO(user): try to remove the duplication with
206 // AppendFullEncodingRelaxation()? actually I am not sure we need the other
207 // function since this one is just more general.
208 LinearConstraintBuilder exactly_one_ct(&model, IntegerValue(1),
209 IntegerValue(1));
210 LinearConstraintBuilder encoding_ct(&model, IntegerValue(0),
211 IntegerValue(0));
212 encoding_ct.AddTerm(var, IntegerValue(1));
213 for (const auto value_literal : encoding) {
214 const Literal lit = value_literal.literal;
215 CHECK(exactly_one_ct.AddLiteralTerm(lit, IntegerValue(1)));
216 CHECK(
217 encoding_ct.AddLiteralTerm(lit, IntegerValue(-value_literal.value)));
218 }
219 relaxation->linear_constraints.push_back(exactly_one_ct.Build());
220 relaxation->linear_constraints.push_back(encoding_ct.Build());
221 return;
222 }
223
224 // min + sum li * (xi - min) <= var.
225 const IntegerValue d_min = pair.first;
226 LinearConstraintBuilder lower_bound_ct(&model, d_min, kMaxIntegerValue);
227 lower_bound_ct.AddTerm(var, IntegerValue(1));
228 for (const auto value_literal : encoding) {
229 CHECK(lower_bound_ct.AddLiteralTerm(value_literal.literal,
230 d_min - value_literal.value));
231 }
232
233 // var <= max + sum li * (xi - max).
234 const IntegerValue d_max = pair.second;
235 LinearConstraintBuilder upper_bound_ct(&model, kMinIntegerValue, d_max);
236 upper_bound_ct.AddTerm(var, IntegerValue(1));
237 for (const auto value_literal : encoding) {
238 CHECK(upper_bound_ct.AddLiteralTerm(value_literal.literal,
239 d_max - value_literal.value));
240 }
241
242 // Note that empty/trivial constraints will be filtered later.
243 relaxation->at_most_ones.push_back(at_most_one_ct);
244 relaxation->linear_constraints.push_back(lower_bound_ct.Build());
245 relaxation->linear_constraints.push_back(upper_bound_ct.Build());
246}
247
249 const Model& model,
250 LinearRelaxation* relaxation) {
251 const auto* integer_trail = model.Get<IntegerTrail>();
252 const auto* encoder = model.Get<IntegerEncoder>();
253 if (integer_trail == nullptr || encoder == nullptr) return;
254
255 const std::map<IntegerValue, Literal>& greater_than_encoding =
256 encoder->PartialGreaterThanEncoding(var);
257 if (greater_than_encoding.empty()) return;
258
259 // Start by the var >= side.
260 // And also add the implications between used literals.
261 {
262 IntegerValue prev_used_bound = integer_trail->LowerBound(var);
263 LinearConstraintBuilder lb_constraint(&model, prev_used_bound,
265 lb_constraint.AddTerm(var, IntegerValue(1));
266 LiteralIndex prev_literal_index = kNoLiteralIndex;
267 for (const auto entry : greater_than_encoding) {
268 if (entry.first <= prev_used_bound) continue;
269
270 const LiteralIndex literal_index = entry.second.Index();
271 const IntegerValue diff = prev_used_bound - entry.first;
272
273 // Skip the entry if the literal doesn't have a view.
274 if (!lb_constraint.AddLiteralTerm(entry.second, diff)) continue;
275 if (prev_literal_index != kNoLiteralIndex) {
276 // Add var <= prev_var, which is the same as var + not(prev_var) <= 1
277 relaxation->at_most_ones.push_back(
278 {Literal(literal_index), Literal(prev_literal_index).Negated()});
279 }
280 prev_used_bound = entry.first;
281 prev_literal_index = literal_index;
282 }
283 relaxation->linear_constraints.push_back(lb_constraint.Build());
284 }
285
286 // Do the same for the var <= side by using NegationOfVar().
287 // Note that we do not need to add the implications between literals again.
288 {
289 IntegerValue prev_used_bound = integer_trail->LowerBound(NegationOf(var));
290 LinearConstraintBuilder lb_constraint(&model, prev_used_bound,
292 lb_constraint.AddTerm(var, IntegerValue(-1));
293 for (const auto entry :
294 encoder->PartialGreaterThanEncoding(NegationOf(var))) {
295 if (entry.first <= prev_used_bound) continue;
296 const IntegerValue diff = prev_used_bound - entry.first;
297
298 // Skip the entry if the literal doesn't have a view.
299 if (!lb_constraint.AddLiteralTerm(entry.second, diff)) continue;
300 prev_used_bound = entry.first;
301 }
302 relaxation->linear_constraints.push_back(lb_constraint.Build());
303 }
304}
305
306namespace {
307// Adds enforcing_lit => target <= bounding_var to relaxation.
308void AppendEnforcedUpperBound(const Literal enforcing_lit,
309 const IntegerVariable target,
310 const IntegerVariable bounding_var, Model* model,
311 LinearRelaxation* relaxation) {
312 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
313 const IntegerValue max_target_value = integer_trail->UpperBound(target);
314 const IntegerValue min_var_value = integer_trail->LowerBound(bounding_var);
315 const IntegerValue max_term_value = max_target_value - min_var_value;
316 LinearConstraintBuilder lc(model, kMinIntegerValue, max_term_value);
317 lc.AddTerm(target, IntegerValue(1));
318 lc.AddTerm(bounding_var, IntegerValue(-1));
319 CHECK(lc.AddLiteralTerm(enforcing_lit, max_term_value));
320 relaxation->linear_constraints.push_back(lc.Build());
321}
322
323// Adds {enforcing_lits} => rhs_domain_min <= expr <= rhs_domain_max.
324// Requires expr offset to be 0.
325void AppendEnforcedLinearExpression(
326 const std::vector<Literal>& enforcing_literals,
327 const LinearExpression& expr, const IntegerValue rhs_domain_min,
328 const IntegerValue rhs_domain_max, const Model& model,
329 LinearRelaxation* relaxation) {
330 CHECK_EQ(expr.offset, IntegerValue(0));
331 const LinearExpression canonical_expr = CanonicalizeExpr(expr);
332 const IntegerTrail* integer_trail = model.Get<IntegerTrail>();
333 const IntegerValue min_expr_value =
334 LinExprLowerBound(canonical_expr, *integer_trail);
335
336 if (rhs_domain_min > min_expr_value) {
337 // And(ei) => terms >= rhs_domain_min
338 // <=> Sum_i (~ei * (rhs_domain_min - min_expr_value)) + terms >=
339 // rhs_domain_min
340 LinearConstraintBuilder lc(&model, rhs_domain_min, kMaxIntegerValue);
341 for (const Literal& literal : enforcing_literals) {
342 CHECK(lc.AddLiteralTerm(literal.Negated(),
343 rhs_domain_min - min_expr_value));
344 }
345 for (int i = 0; i < canonical_expr.vars.size(); i++) {
346 lc.AddTerm(canonical_expr.vars[i], canonical_expr.coeffs[i]);
347 }
348 relaxation->linear_constraints.push_back(lc.Build());
349 }
350 const IntegerValue max_expr_value =
351 LinExprUpperBound(canonical_expr, *integer_trail);
352 if (rhs_domain_max < max_expr_value) {
353 // And(ei) => terms <= rhs_domain_max
354 // <=> Sum_i (~ei * (rhs_domain_max - max_expr_value)) + terms <=
355 // rhs_domain_max
356 LinearConstraintBuilder lc(&model, kMinIntegerValue, rhs_domain_max);
357 for (const Literal& literal : enforcing_literals) {
358 CHECK(lc.AddLiteralTerm(literal.Negated(),
359 rhs_domain_max - max_expr_value));
360 }
361 for (int i = 0; i < canonical_expr.vars.size(); i++) {
362 lc.AddTerm(canonical_expr.vars[i], canonical_expr.coeffs[i]);
363 }
364 relaxation->linear_constraints.push_back(lc.Build());
365 }
366}
367
368bool AllLiteralsHaveViews(const IntegerEncoder& encoder,
369 const std::vector<Literal>& literals) {
370 for (const Literal lit : literals) {
371 if (!encoder.LiteralOrNegationHasView(lit)) return false;
372 }
373 return true;
374}
375
376} // namespace
377
379 LinearRelaxation* relaxation) {
380 auto* mapping = model->GetOrCreate<CpModelMapping>();
381 LinearConstraintBuilder lc(model, IntegerValue(1), kMaxIntegerValue);
382 for (const int enforcement_ref : ct.enforcement_literal()) {
383 CHECK(lc.AddLiteralTerm(mapping->Literal(NegatedRef(enforcement_ref)),
384 IntegerValue(1)));
385 }
386 for (const int ref : ct.bool_or().literals()) {
387 CHECK(lc.AddLiteralTerm(mapping->Literal(ref), IntegerValue(1)));
388 }
389 relaxation->linear_constraints.push_back(lc.Build());
390}
391
393 LinearRelaxation* relaxation) {
394 // TODO(user): These constraints can be many, and if they are not regrouped
395 // in big at most ones, then they should probably only added lazily as cuts.
396 // Regroup this with future clique-cut separation logic.
397 if (!HasEnforcementLiteral(ct)) return;
398
399 auto* mapping = model->GetOrCreate<CpModelMapping>();
400 if (ct.enforcement_literal().size() == 1) {
401 const Literal enforcement = mapping->Literal(ct.enforcement_literal(0));
402 for (const int ref : ct.bool_and().literals()) {
403 relaxation->at_most_ones.push_back(
404 {enforcement, mapping->Literal(ref).Negated()});
405 }
406 return;
407 }
408
409 // Andi(e_i) => Andj(x_j)
410 // <=> num_rhs_terms <= Sum_j(x_j) + num_rhs_terms * Sum_i(~e_i)
411 int num_literals = ct.bool_and().literals_size();
412 LinearConstraintBuilder lc(model, IntegerValue(num_literals),
414 for (const int ref : ct.bool_and().literals()) {
415 CHECK(lc.AddLiteralTerm(mapping->Literal(ref), IntegerValue(1)));
416 }
417 for (const int enforcement_ref : ct.enforcement_literal()) {
418 CHECK(lc.AddLiteralTerm(mapping->Literal(NegatedRef(enforcement_ref)),
419 IntegerValue(num_literals)));
420 }
421 relaxation->linear_constraints.push_back(lc.Build());
422}
423
425 LinearRelaxation* relaxation) {
426 if (HasEnforcementLiteral(ct)) return;
427
428 auto* mapping = model->GetOrCreate<CpModelMapping>();
429 relaxation->at_most_ones.push_back(
430 mapping->Literals(ct.at_most_one().literals()));
431}
432
434 LinearRelaxation* relaxation) {
435 if (HasEnforcementLiteral(ct)) return;
436 auto* mapping = model->GetOrCreate<CpModelMapping>();
437 auto* encoder = model->GetOrCreate<IntegerEncoder>();
438
439 const std::vector<Literal> literals =
440 mapping->Literals(ct.exactly_one().literals());
441 if (AllLiteralsHaveViews(*encoder, literals)) {
442 LinearConstraintBuilder lc(model, IntegerValue(1), IntegerValue(1));
443 for (const Literal lit : literals) {
444 CHECK(lc.AddLiteralTerm(lit, IntegerValue(1)));
445 }
446 relaxation->linear_constraints.push_back(lc.Build());
447 } else {
448 // We just encode the at most one part that might be partially linearized
449 // later.
450 relaxation->at_most_ones.push_back(literals);
451 }
452}
453
455 int num_literals, Model* model, LinearRelaxation* relaxation) {
456 auto* encoder = model->GetOrCreate<IntegerEncoder>();
457
458 if (num_literals == 1) {
459 // This is not supposed to happen, but it is easy enough to cover, just
460 // in case. We might however want to use encoder->GetTrueLiteral().
461 const IntegerVariable var = model->Add(NewIntegerVariable(1, 1));
462 const Literal lit =
463 encoder->GetOrCreateLiteralAssociatedToEquality(var, IntegerValue(1));
464 return {lit};
465 }
466
467 if (num_literals == 2) {
468 const IntegerVariable var = model->Add(NewIntegerVariable(0, 1));
469 const Literal lit =
470 encoder->GetOrCreateLiteralAssociatedToEquality(var, IntegerValue(1));
471
472 // TODO(user): We shouldn't need to create this view ideally. Even better,
473 // we should be able to handle Literal natively in the linear relaxation,
474 // but that is a lot of work.
475 const IntegerVariable var2 = model->Add(NewIntegerVariable(0, 1));
476 encoder->AssociateToIntegerEqualValue(lit.Negated(), var2, IntegerValue(1));
477
478 return {lit, lit.Negated()};
479 }
480
481 std::vector<Literal> literals;
482 LinearConstraintBuilder lc_builder(model, IntegerValue(1), IntegerValue(1));
483 for (int i = 0; i < num_literals; ++i) {
484 const IntegerVariable var = model->Add(NewIntegerVariable(0, 1));
485 const Literal lit =
486 encoder->GetOrCreateLiteralAssociatedToEquality(var, IntegerValue(1));
487 literals.push_back(lit);
488 CHECK(lc_builder.AddLiteralTerm(lit, IntegerValue(1)));
489 }
490 model->Add(ExactlyOneConstraint(literals));
491 relaxation->linear_constraints.push_back(lc_builder.Build());
492 return literals;
493}
494
495namespace {
496
497void AddIntMaxLowerRelaxation(IntegerVariable target,
498 const std::vector<IntegerVariable>& vars,
499 Model* model, LinearRelaxation* relaxation) {
500 // Case X = max(X_1, X_2, ..., X_N)
501 // Part 1: Encode X >= max(X_1, X_2, ..., X_N)
502 for (const IntegerVariable var : vars) {
503 // This deal with the corner case X = max(X, Y, Z, ..) !
504 // Note that this can be presolved into X >= Y, X >= Z, ...
505 if (target == var) continue;
506 LinearConstraintBuilder lc(model, kMinIntegerValue, IntegerValue(0));
507 lc.AddTerm(var, IntegerValue(1));
508 lc.AddTerm(target, IntegerValue(-1));
509 relaxation->linear_constraints.push_back(lc.Build());
510 }
511}
512
513void AddIntAbsUpperRelaxation(IntegerVariable target, IntegerVariable var,
514 Model* model, LinearRelaxation* relaxation) {
515 LinearExpression target_expr;
516 target_expr.vars.push_back(target);
517 target_expr.coeffs.push_back(IntegerValue(1));
518 const std::vector<std::pair<IntegerValue, IntegerValue>> affines = {
519 {IntegerValue(1), IntegerValue(0)}, {IntegerValue(-1), IntegerValue(0)}};
520 relaxation->linear_constraints.push_back(
521 BuildMaxAffineUpConstraint(target_expr, var, affines, model));
522}
523
524void AddIntMaxUpperRelaxation(IntegerVariable target,
525 const std::vector<IntegerVariable>& vars,
526 Model* model, LinearRelaxation* relaxation) {
527 // For each X_i, we encode l_i => X <= X_i. And at least one of the l_i is
528 // true. Note that the correct y_i will be chosen because of the first part in
529 // linearlization (X >= X_i).
530 GenericLiteralWatcher* watcher = model->GetOrCreate<GenericLiteralWatcher>();
531 std::vector<Literal> literals =
532 CreateAlternativeLiteralsWithView(vars.size(), model, relaxation);
533 for (int i = 0; i < vars.size(); ++i) {
534 // TODO(user): Only lower bound is needed, experiment.
535 //
536 // TODO(user): It makes more sense to use ConditionalLowerOrEqual()
537 // here since only the lower bounding is needed, but that degrades perf on
538 // the road*.fzn problem. Understand why.
539 AppendEnforcedUpperBound(literals[i], target, vars[i], model, relaxation);
540 IntegerSumLE* upper_bound_constraint = new IntegerSumLE(
541 {literals[i]}, {target, vars[i]}, {IntegerValue(1), IntegerValue(-1)},
542 IntegerValue(0), model);
543 upper_bound_constraint->RegisterWith(watcher);
544 model->TakeOwnership(upper_bound_constraint);
545 }
546}
547
548} // namespace
549
550// Adds linearization of int max constraints. This can also be used to linearize
551// int min with negated variables.
552void AppendIntMaxRelaxation(const ConstraintProto& ct, int linearization_level,
553 Model* model, LinearRelaxation* relaxation) {
554 if (HasEnforcementLiteral(ct)) return;
555
556 auto* mapping = model->GetOrCreate<CpModelMapping>();
557 const IntegerVariable target = mapping->Integer(ct.int_max().target());
558 const std::vector<IntegerVariable> vars =
559 mapping->Integers(ct.int_max().vars());
560
561 AddIntMaxLowerRelaxation(target, vars, model, relaxation);
562 if (IntMaxIsIntAbs(ct)) {
563 // TODO(user): consider support for int_abs encoded using int_min.
564 AddIntAbsUpperRelaxation(target, PositiveVariable(vars[0]), model,
565 relaxation);
566 } else if (linearization_level > 1) {
567 AddIntMaxUpperRelaxation(target, vars, model, relaxation);
568 }
569}
570
572 LinearRelaxation* relaxation) {
573 if (HasEnforcementLiteral(ct)) return;
574 auto* mapping = model->GetOrCreate<CpModelMapping>();
575 const int num_arcs = ct.circuit().literals_size();
576 CHECK_EQ(num_arcs, ct.circuit().tails_size());
577 CHECK_EQ(num_arcs, ct.circuit().heads_size());
578
579 // Each node must have exactly one incoming and one outgoing arc (note
580 // that it can be the unique self-arc of this node too).
581 std::map<int, std::vector<Literal>> incoming_arc_constraints;
582 std::map<int, std::vector<Literal>> outgoing_arc_constraints;
583 for (int i = 0; i < num_arcs; i++) {
584 const Literal arc = mapping->Literal(ct.circuit().literals(i));
585 const int tail = ct.circuit().tails(i);
586 const int head = ct.circuit().heads(i);
587
588 // Make sure this literal has a view.
590 outgoing_arc_constraints[tail].push_back(arc);
591 incoming_arc_constraints[head].push_back(arc);
592 }
593 for (const auto* node_map :
594 {&outgoing_arc_constraints, &incoming_arc_constraints}) {
595 for (const auto& entry : *node_map) {
596 const std::vector<Literal>& exactly_one = entry.second;
597 if (exactly_one.size() > 1) {
598 LinearConstraintBuilder at_least_one_lc(model, IntegerValue(1),
600 for (const Literal l : exactly_one) {
601 CHECK(at_least_one_lc.AddLiteralTerm(l, IntegerValue(1)));
602 }
603
604 // We separate the two constraints.
605 relaxation->at_most_ones.push_back(exactly_one);
606 relaxation->linear_constraints.push_back(at_least_one_lc.Build());
607 }
608 }
609 }
610}
611
613 LinearRelaxation* relaxation) {
614 if (HasEnforcementLiteral(ct)) return;
615 auto* mapping = model->GetOrCreate<CpModelMapping>();
616 const int num_arcs = ct.routes().literals_size();
617 CHECK_EQ(num_arcs, ct.routes().tails_size());
618 CHECK_EQ(num_arcs, ct.routes().heads_size());
619
620 // Each node except node zero must have exactly one incoming and one outgoing
621 // arc (note that it can be the unique self-arc of this node too). For node
622 // zero, the number of incoming arcs should be the same as the number of
623 // outgoing arcs.
624 std::map<int, std::vector<Literal>> incoming_arc_constraints;
625 std::map<int, std::vector<Literal>> outgoing_arc_constraints;
626 for (int i = 0; i < num_arcs; i++) {
627 const Literal arc = mapping->Literal(ct.routes().literals(i));
628 const int tail = ct.routes().tails(i);
629 const int head = ct.routes().heads(i);
630
631 // Make sure this literal has a view.
633 outgoing_arc_constraints[tail].push_back(arc);
634 incoming_arc_constraints[head].push_back(arc);
635 }
636 for (const auto* node_map :
637 {&outgoing_arc_constraints, &incoming_arc_constraints}) {
638 for (const auto& entry : *node_map) {
639 if (entry.first == 0) continue;
640 const std::vector<Literal>& exactly_one = entry.second;
641 if (exactly_one.size() > 1) {
642 LinearConstraintBuilder at_least_one_lc(model, IntegerValue(1),
644 for (const Literal l : exactly_one) {
645 CHECK(at_least_one_lc.AddLiteralTerm(l, IntegerValue(1)));
646 }
647
648 // We separate the two constraints.
649 relaxation->at_most_ones.push_back(exactly_one);
650 relaxation->linear_constraints.push_back(at_least_one_lc.Build());
651 }
652 }
653 }
654 LinearConstraintBuilder zero_node_balance_lc(model, IntegerValue(0),
655 IntegerValue(0));
656 for (const Literal& incoming_arc : incoming_arc_constraints[0]) {
657 CHECK(zero_node_balance_lc.AddLiteralTerm(incoming_arc, IntegerValue(1)));
658 }
659 for (const Literal& outgoing_arc : outgoing_arc_constraints[0]) {
660 CHECK(zero_node_balance_lc.AddLiteralTerm(outgoing_arc, IntegerValue(-1)));
661 }
662 relaxation->linear_constraints.push_back(zero_node_balance_lc.Build());
663}
664
666 LinearRelaxation* relaxation) {
667 // If the interval is using views, then the linear equation is already
668 // present in the model.
669 if (ct.interval().has_start_view()) return;
670
671 auto* mapping = model->GetOrCreate<CpModelMapping>();
672 const IntegerVariable start = mapping->Integer(ct.interval().start());
673 const IntegerVariable size = mapping->Integer(ct.interval().size());
674 const IntegerVariable end = mapping->Integer(ct.interval().end());
675 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
676 const bool size_is_fixed = integer_trail->IsFixed(size);
677 const IntegerValue rhs =
678 size_is_fixed ? -integer_trail->LowerBound(size) : IntegerValue(0);
679 LinearConstraintBuilder lc(model, rhs, rhs);
680 lc.AddTerm(start, IntegerValue(1));
681 if (!size_is_fixed) {
682 lc.AddTerm(size, IntegerValue(1));
683 }
684 lc.AddTerm(end, IntegerValue(-1));
686 LinearConstraint tmp_lc = lc.Build();
687 LinearExpression expr;
688 expr.coeffs = tmp_lc.coeffs;
689 expr.vars = tmp_lc.vars;
690 AppendEnforcedLinearExpression(mapping->Literals(ct.enforcement_literal()),
691 expr, tmp_lc.ub, tmp_lc.ub, *model,
692 relaxation);
693 } else {
694 relaxation->linear_constraints.push_back(lc.Build());
695 }
696}
697
698// TODO(user): Use affine demand.
699void AddCumulativeRelaxation(const std::vector<IntervalVariable>& intervals,
700 const std::vector<IntegerVariable>& demands,
701 const std::vector<LinearExpression>& energies,
702 IntegerValue capacity_upper_bound, Model* model,
703 LinearRelaxation* relaxation) {
704 // TODO(user): Keep a map intervals -> helper, or ct_index->helper to avoid
705 // creating many helpers for the same constraint.
706 auto* helper = new SchedulingConstraintHelper(intervals, model);
707 model->TakeOwnership(helper);
708 const int num_intervals = helper->NumTasks();
709
710 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
711
712 IntegerValue min_of_starts = kMaxIntegerValue;
713 IntegerValue max_of_ends = kMinIntegerValue;
714
715 int num_variable_sizes = 0;
716 int num_optionals = 0;
717
718 for (int index = 0; index < num_intervals; ++index) {
719 min_of_starts = std::min(min_of_starts, helper->StartMin(index));
720 max_of_ends = std::max(max_of_ends, helper->EndMax(index));
721
722 if (helper->IsOptional(index)) {
723 num_optionals++;
724 }
725
726 if (!helper->SizeIsFixed(index) ||
727 (!demands.empty() && !integer_trail->IsFixed(demands[index]))) {
728 num_variable_sizes++;
729 }
730 }
731
732 VLOG(2) << "Span [" << min_of_starts << ".." << max_of_ends << "] with "
733 << num_optionals << " optional intervals, and " << num_variable_sizes
734 << " variable size intervals out of " << num_intervals
735 << " intervals";
736
737 if (num_variable_sizes + num_optionals == 0) return;
738
739 const IntegerVariable span_start =
740 integer_trail->AddIntegerVariable(min_of_starts, max_of_ends);
741 const IntegerVariable span_size = integer_trail->AddIntegerVariable(
742 IntegerValue(0), max_of_ends - min_of_starts);
743 const IntegerVariable span_end =
744 integer_trail->AddIntegerVariable(min_of_starts, max_of_ends);
745
746 IntervalVariable span_var;
747 if (num_optionals < num_intervals) {
748 span_var = model->Add(NewInterval(span_start, span_end, span_size));
749 } else {
750 const Literal span_lit = Literal(model->Add(NewBooleanVariable()), true);
751 span_var = model->Add(
752 NewOptionalInterval(span_start, span_end, span_size, span_lit));
753 }
754
755 model->Add(SpanOfIntervals(span_var, intervals));
756
757 LinearConstraintBuilder lc(model, kMinIntegerValue, IntegerValue(0));
758 lc.AddTerm(span_size, -capacity_upper_bound);
759 for (int i = 0; i < num_intervals; ++i) {
760 const IntegerValue demand_lower_bound =
761 demands.empty() ? IntegerValue(1)
762 : integer_trail->LowerBound(demands[i]);
763 const bool demand_is_fixed =
764 demands.empty() || integer_trail->IsFixed(demands[i]);
765 if (!helper->IsOptional(i)) {
766 if (demand_is_fixed) {
767 lc.AddTerm(helper->Sizes()[i], demand_lower_bound);
768 } else if (!helper->SizeIsFixed(i) && !energies.empty()) {
769 // We prefer the energy additional info instead of the McCormick
770 // relaxation.
771 lc.AddLinearExpression(energies[i]);
772 } else {
773 lc.AddQuadraticLowerBound(helper->Sizes()[i], demands[i],
774 integer_trail);
775 }
776 } else {
777 if (!lc.AddLiteralTerm(helper->PresenceLiteral(i),
778 helper->SizeMin(i) * demand_lower_bound)) {
779 return;
780 }
781 }
782 }
783 relaxation->linear_constraints.push_back(lc.Build());
784}
785
787 const ConstraintProto& ct, Model* model,
788 LinearRelaxation* relaxation) {
789 CHECK(ct.has_cumulative());
790 if (HasEnforcementLiteral(ct)) return;
791
792 auto* mapping = model->GetOrCreate<CpModelMapping>();
793 const std::vector<IntegerVariable> demands =
794 mapping->Integers(ct.cumulative().demands());
795 std::vector<IntervalVariable> intervals =
796 mapping->Intervals(ct.cumulative().intervals());
797 const IntegerValue capacity_upper_bound =
798 model->GetOrCreate<IntegerTrail>()->UpperBound(
799 mapping->Integer(ct.cumulative().capacity()));
800 std::vector<LinearExpression> energies;
801 energies.reserve(ct.cumulative().energies_size());
802 for (int i = 0; i < ct.cumulative().energies_size(); ++i) {
803 // Note: Cut generator requires all expressions to contain only positive
804 // vars.
805 energies.push_back(mapping->GetExprFromProto(ct.cumulative().energies(i)));
806 }
807
808 AddCumulativeRelaxation(intervals, demands, energies, capacity_upper_bound,
809 model, relaxation);
810}
811
813 const ConstraintProto& ct, Model* model,
814 LinearRelaxation* relaxation) {
815 CHECK(ct.has_no_overlap());
816 if (HasEnforcementLiteral(ct)) return;
817
818 auto* mapping = model->GetOrCreate<CpModelMapping>();
819 std::vector<IntervalVariable> intervals =
820 mapping->Intervals(ct.no_overlap().intervals());
821 AddCumulativeRelaxation(intervals, /*demands=*/{}, /*energies=*/{},
822 /*capacity_upper_bound=*/IntegerValue(1), model,
823 relaxation);
824}
825
827 LinearRelaxation* relaxation) {
828 auto* mapping = model->GetOrCreate<CpModelMapping>();
829
830 // We want to linearize target = max(exprs[1], exprs[2], ..., exprs[d]).
831 // Part 1: Encode target >= max(exprs[1], exprs[2], ..., exprs[d])
832 const LinearExpression negated_target =
833 NegationOf(mapping->GetExprFromProto(ct.lin_max().target()));
834 for (int i = 0; i < ct.lin_max().exprs_size(); ++i) {
835 const LinearExpression expr =
836 mapping->GetExprFromProto(ct.lin_max().exprs(i));
837 LinearConstraintBuilder lc(model, kMinIntegerValue, IntegerValue(0));
838 lc.AddLinearExpression(negated_target);
839 lc.AddLinearExpression(expr);
840 relaxation->linear_constraints.push_back(lc.Build());
841 }
842}
843
844// TODO(user): experiment with:
845// 1) remove this code
846// 2) keep this code
847// 3) remove this code and create the cut generator at level 1.
849 LinearRelaxation* relaxation) {
850 IntegerVariable var;
851 std::vector<std::pair<IntegerValue, IntegerValue>> affines;
852 auto* mapping = model->GetOrCreate<CpModelMapping>();
853 CollectAffineExpressionWithSingleVariable(ct, mapping, &var, &affines);
854 if (var == kNoIntegerVariable ||
855 model->GetOrCreate<IntegerTrail>()->IsFixed(var)) {
856 return;
857 }
858
860 const LinearExpression target_expr =
861 PositiveVarExpr(mapping->GetExprFromProto(ct.lin_max().target()));
862 relaxation->linear_constraints.push_back(
863 BuildMaxAffineUpConstraint(target_expr, var, affines, model));
864}
865
867 LinearRelaxation* relaxation) {
868 IntegerVariable var;
869 std::vector<std::pair<IntegerValue, IntegerValue>> affines;
870 auto* mapping = model->GetOrCreate<CpModelMapping>();
871 CollectAffineExpressionWithSingleVariable(ct, mapping, &var, &affines);
872 if (var == kNoIntegerVariable ||
873 model->GetOrCreate<IntegerTrail>()->IsFixed(var)) {
874 return;
875 }
876
877 CHECK_EQ(1, ct.lin_max().target().vars_size());
878 const LinearExpression target_expr =
879 PositiveVarExpr(mapping->GetExprFromProto(ct.lin_max().target()));
880 relaxation->cut_generators.push_back(CreateMaxAffineCutGenerator(
881 target_expr, var, affines, "AffineMax", model));
882}
883
885 LinearRelaxation* relaxation) {
886 auto* mapping = model->GetOrCreate<CpModelMapping>();
887 const IntegerVariable var =
888 PositiveVariable(mapping->Integer(ct.int_max().vars(0)));
889
890 LinearExpression target_expr;
891 target_expr.vars.push_back(mapping->Integer(ct.int_max().target()));
892 target_expr.coeffs.push_back(IntegerValue(1));
893 const std::vector<std::pair<IntegerValue, IntegerValue>> affines = {
894 {IntegerValue(1), IntegerValue(0)}, {IntegerValue(-1), IntegerValue(0)}};
895
896 relaxation->cut_generators.push_back(
897 CreateMaxAffineCutGenerator(target_expr, var, affines, "IntAbs", model));
898}
899
900// Part 2: Encode upper bound on X.
901//
902// Add linking constraint to the CP solver
903// sum zi = 1 and for all i, zi => max = expr_i.
905 IntegerVariable target, const std::vector<Literal>& alternative_literals,
906 const std::vector<LinearExpression>& exprs, Model* model,
907 LinearRelaxation* relaxation) {
908 const int num_exprs = exprs.size();
909 GenericLiteralWatcher* watcher = model->GetOrCreate<GenericLiteralWatcher>();
910
911 // First add the CP constraints.
912 for (int i = 0; i < num_exprs; ++i) {
913 LinearExpression local_expr;
914 local_expr.vars = NegationOf(exprs[i].vars);
915 local_expr.vars.push_back(target);
916 local_expr.coeffs = exprs[i].coeffs;
917 local_expr.coeffs.push_back(IntegerValue(1));
919 new IntegerSumLE({alternative_literals[i]}, local_expr.vars,
920 local_expr.coeffs, exprs[i].offset, model);
921 upper_bound->RegisterWith(watcher);
922 model->TakeOwnership(upper_bound);
923 }
924
925 // For the relaxation, we use different constraints with a stronger linear
926 // relaxation as explained in the .h
927 //
928 // TODO(user): Consider passing the x_vars to this method instead of
929 // computing it here.
930 std::vector<IntegerVariable> x_vars;
931 for (int i = 0; i < num_exprs; ++i) {
932 x_vars.insert(x_vars.end(), exprs[i].vars.begin(), exprs[i].vars.end());
933 }
935
936 // All expressions should only contain positive variables.
937 DCHECK(std::all_of(x_vars.begin(), x_vars.end(), [](IntegerVariable var) {
938 return VariableIsPositive(var);
939 }));
940
941 std::vector<std::vector<IntegerValue>> sum_of_max_corner_diff(
942 num_exprs, std::vector<IntegerValue>(num_exprs, IntegerValue(0)));
943
944 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
945 for (int i = 0; i < num_exprs; ++i) {
946 for (int j = 0; j < num_exprs; ++j) {
947 if (i == j) continue;
948 for (const IntegerVariable x_var : x_vars) {
949 const IntegerValue lb = integer_trail->LevelZeroLowerBound(x_var);
950 const IntegerValue ub = integer_trail->LevelZeroUpperBound(x_var);
951 const IntegerValue diff =
952 GetCoefficient(x_var, exprs[j]) - GetCoefficient(x_var, exprs[i]);
953 sum_of_max_corner_diff[i][j] += std::max(diff * lb, diff * ub);
954 }
955 }
956 }
957 for (int i = 0; i < num_exprs; ++i) {
958 LinearConstraintBuilder lc(model, kMinIntegerValue, IntegerValue(0));
959 lc.AddTerm(target, IntegerValue(1));
960 for (int j = 0; j < exprs[i].vars.size(); ++j) {
961 lc.AddTerm(exprs[i].vars[j], -exprs[i].coeffs[j]);
962 }
963 for (int j = 0; j < num_exprs; ++j) {
964 CHECK(lc.AddLiteralTerm(alternative_literals[j],
965 -exprs[j].offset - sum_of_max_corner_diff[i][j]));
966 }
967 relaxation->linear_constraints.push_back(lc.Build());
968 }
969}
970
972 bool linearize_enforced_constraints,
973 Model* model,
974 LinearRelaxation* relaxation) {
975 auto* mapping = model->Get<CpModelMapping>();
976
977 // Note that we ignore the holes in the domain.
978 //
979 // TODO(user): In LoadLinearConstraint() we already created intermediate
980 // Booleans for each disjoint interval, we should reuse them here if
981 // possible.
982 //
983 // TODO(user): process the "at most one" part of a == 1 separately?
984 const IntegerValue rhs_domain_min = IntegerValue(ct.linear().domain(0));
985 const IntegerValue rhs_domain_max =
986 IntegerValue(ct.linear().domain(ct.linear().domain_size() - 1));
987 if (rhs_domain_min == std::numeric_limits<int64_t>::min() &&
988 rhs_domain_max == std::numeric_limits<int64_t>::max())
989 return;
990
992 LinearConstraintBuilder lc(model, rhs_domain_min, rhs_domain_max);
993 for (int i = 0; i < ct.linear().vars_size(); i++) {
994 const int ref = ct.linear().vars(i);
995 const int64_t coeff = ct.linear().coeffs(i);
996 lc.AddTerm(mapping->Integer(ref), IntegerValue(coeff));
997 }
998 relaxation->linear_constraints.push_back(lc.Build());
999 return;
1000 }
1001
1002 // Reified version.
1003 if (!linearize_enforced_constraints) return;
1004
1005 // We linearize fully reified constraints of size 1 all together for a given
1006 // variable. But we need to process half-reified ones.
1007 if (!mapping->IsHalfEncodingConstraint(&ct) && ct.linear().vars_size() <= 1) {
1008 return;
1009 }
1010
1011 std::vector<Literal> enforcing_literals;
1012 enforcing_literals.reserve(ct.enforcement_literal_size());
1013 for (const int enforcement_ref : ct.enforcement_literal()) {
1014 enforcing_literals.push_back(mapping->Literal(enforcement_ref));
1015 }
1016 LinearExpression expr;
1017 expr.vars.reserve(ct.linear().vars_size());
1018 expr.coeffs.reserve(ct.linear().vars_size());
1019 for (int i = 0; i < ct.linear().vars_size(); i++) {
1020 int ref = ct.linear().vars(i);
1021 IntegerValue coeff(ct.linear().coeffs(i));
1022 if (!RefIsPositive(ref)) {
1023 ref = PositiveRef(ref);
1024 coeff = -coeff;
1025 }
1026 const IntegerVariable int_var = mapping->Integer(ref);
1027 expr.vars.push_back(int_var);
1028 expr.coeffs.push_back(coeff);
1029 }
1030 AppendEnforcedLinearExpression(enforcing_literals, expr, rhs_domain_min,
1031 rhs_domain_max, *model, relaxation);
1032}
1033
1034// Add a linear relaxation of the CP constraint to the set of linear
1035// constraints. The highest linearization_level is, the more types of constraint
1036// we encode. This method should be called only for linearization_level > 0.
1037//
1038// Note: IntProd is linearized dynamically using the cut generators.
1039//
1040// TODO(user): In full generality, we could encode all the constraint as an LP.
1041// TODO(user): Add unit tests for this method.
1043 const ConstraintProto& ct,
1044 int linearization_level, Model* model,
1045 LinearRelaxation* relaxation) {
1046 CHECK_EQ(model->GetOrCreate<SatSolver>()->CurrentDecisionLevel(), 0);
1047 DCHECK_GT(linearization_level, 0);
1048
1049 switch (ct.constraint_case()) {
1050 case ConstraintProto::ConstraintCase::kBoolOr: {
1051 if (linearization_level > 1) {
1052 AppendBoolOrRelaxation(ct, model, relaxation);
1053 }
1054 break;
1055 }
1056 case ConstraintProto::ConstraintCase::kBoolAnd: {
1057 if (linearization_level > 1) {
1058 AppendBoolAndRelaxation(ct, model, relaxation);
1059 }
1060 break;
1061 }
1062 case ConstraintProto::ConstraintCase::kAtMostOne: {
1063 AppendAtMostOneRelaxation(ct, model, relaxation);
1064 break;
1065 }
1066 case ConstraintProto::ConstraintCase::kExactlyOne: {
1067 AppendExactlyOneRelaxation(ct, model, relaxation);
1068 break;
1069 }
1070 case ConstraintProto::ConstraintCase::kIntMax: {
1071 AppendIntMaxRelaxation(ct, linearization_level, model, relaxation);
1072 break;
1073 }
1074 case ConstraintProto::ConstraintCase::kLinMax: {
1075 AppendLinMaxRelaxationPart1(ct, model, relaxation);
1076 if (LinMaxContainsOnlyOneVarInExpressions(ct)) {
1077 AppendMaxAffineRelaxation(ct, model, relaxation);
1078 }
1079 break;
1080 }
1081 case ConstraintProto::ConstraintCase::kLinear: {
1083 ct, /*linearize_enforced_constraints=*/linearization_level > 1, model,
1084 relaxation);
1085 break;
1086 }
1087 case ConstraintProto::ConstraintCase::kCircuit: {
1088 AppendCircuitRelaxation(ct, model, relaxation);
1089 break;
1090 }
1091 case ConstraintProto::ConstraintCase::kRoutes: {
1092 AppendRoutesRelaxation(ct, model, relaxation);
1093 break;
1094 }
1095 case ConstraintProto::ConstraintCase::kInterval: {
1096 if (linearization_level > 1) {
1097 AppendIntervalRelaxation(ct, model, relaxation);
1098 }
1099 break;
1100 }
1101 case ConstraintProto::ConstraintCase::kNoOverlap: {
1102 if (linearization_level > 1) {
1104 }
1105 break;
1106 }
1107 case ConstraintProto::ConstraintCase::kCumulative: {
1108 if (linearization_level > 1) {
1110 }
1111 break;
1112 }
1113 default: {
1114 }
1115 }
1116}
1117
1118// Cut generators.
1119
1121 LinearRelaxation* relaxation) {
1122 std::vector<int> tails(ct.circuit().tails().begin(),
1123 ct.circuit().tails().end());
1124 std::vector<int> heads(ct.circuit().heads().begin(),
1125 ct.circuit().heads().end());
1126 auto* mapping = m->GetOrCreate<CpModelMapping>();
1127 std::vector<Literal> literals = mapping->Literals(ct.circuit().literals());
1128 const int num_nodes = ReindexArcs(&tails, &heads);
1129
1131 num_nodes, tails, heads, literals, m));
1132}
1133
1135 LinearRelaxation* relaxation) {
1136 std::vector<int> tails(ct.routes().tails().begin(),
1137 ct.routes().tails().end());
1138 std::vector<int> heads(ct.routes().heads().begin(),
1139 ct.routes().heads().end());
1140 auto* mapping = m->GetOrCreate<CpModelMapping>();
1141 std::vector<Literal> literals = mapping->Literals(ct.routes().literals());
1142
1143 int num_nodes = 0;
1144 for (int i = 0; i < ct.routes().tails_size(); ++i) {
1145 num_nodes = std::max(num_nodes, 1 + ct.routes().tails(i));
1146 num_nodes = std::max(num_nodes, 1 + ct.routes().heads(i));
1147 }
1148 if (ct.routes().demands().empty() || ct.routes().capacity() == 0) {
1149 relaxation->cut_generators.push_back(
1150 CreateStronglyConnectedGraphCutGenerator(num_nodes, tails, heads,
1151 literals, m));
1152 } else {
1153 const std::vector<int64_t> demands(ct.routes().demands().begin(),
1154 ct.routes().demands().end());
1155 relaxation->cut_generators.push_back(CreateCVRPCutGenerator(
1156 num_nodes, tails, heads, literals, demands, ct.routes().capacity(), m));
1157 }
1158}
1159
1160void AddIntProdCutGenerator(const ConstraintProto& ct, int linearization_level,
1161 Model* m, LinearRelaxation* relaxation) {
1162 if (HasEnforcementLiteral(ct)) return;
1163 if (ct.int_prod().vars_size() != 2) return;
1164 auto* mapping = m->GetOrCreate<CpModelMapping>();
1165
1166 // Constraint is z == x * y.
1167
1168 IntegerVariable z = mapping->Integer(ct.int_prod().target());
1169 IntegerVariable x = mapping->Integer(ct.int_prod().vars(0));
1170 IntegerVariable y = mapping->Integer(ct.int_prod().vars(1));
1171
1172 IntegerTrail* const integer_trail = m->GetOrCreate<IntegerTrail>();
1173 IntegerValue x_lb = integer_trail->LowerBound(x);
1174 IntegerValue x_ub = integer_trail->UpperBound(x);
1175 IntegerValue y_lb = integer_trail->LowerBound(y);
1176 IntegerValue y_ub = integer_trail->UpperBound(y);
1177
1178 if (x == y) {
1179 // We currently only support variables with non-negative domains.
1180 if (x_lb < 0 && x_ub > 0) return;
1181
1182 // Change the sigh of x if its domain is non-positive.
1183 if (x_ub <= 0) {
1184 x = NegationOf(x);
1185 }
1186
1187 relaxation->cut_generators.push_back(
1188 CreateSquareCutGenerator(z, x, linearization_level, m));
1189 } else {
1190 // We currently only support variables with non-negative domains.
1191 if (x_lb < 0 && x_ub > 0) return;
1192 if (y_lb < 0 && y_ub > 0) return;
1193
1194 // Change signs to return to the case where all variables are a domain
1195 // with non negative values only.
1196 if (x_ub <= 0) {
1197 x = NegationOf(x);
1198 z = NegationOf(z);
1199 }
1200 if (y_ub <= 0) {
1201 y = NegationOf(y);
1202 z = NegationOf(z);
1203 }
1204
1205 relaxation->cut_generators.push_back(
1206 CreatePositiveMultiplicationCutGenerator(z, x, y, linearization_level,
1207 m));
1208 }
1209}
1210
1212 LinearRelaxation* relaxation) {
1213 if (HasEnforcementLiteral(ct)) return;
1214 auto* mapping = m->GetOrCreate<CpModelMapping>();
1215 const int num_vars = ct.all_diff().vars_size();
1216 if (num_vars <= m->GetOrCreate<SatParameters>()->max_all_diff_cut_size()) {
1217 std::vector<IntegerVariable> vars = mapping->Integers(ct.all_diff().vars());
1218 relaxation->cut_generators.push_back(
1220 }
1221}
1222
1224 LinearRelaxation* relaxation) {
1225 if (HasEnforcementLiteral(ct)) return;
1226 auto* mapping = m->GetOrCreate<CpModelMapping>();
1227
1228 const std::vector<IntegerVariable> demands =
1229 mapping->Integers(ct.cumulative().demands());
1230 const std::vector<IntervalVariable> intervals =
1231 mapping->Intervals(ct.cumulative().intervals());
1232 const IntegerVariable capacity = mapping->Integer(ct.cumulative().capacity());
1233 std::vector<LinearExpression> energies;
1234 energies.reserve(ct.cumulative().energies_size());
1235 for (int i = 0; i < ct.cumulative().energies_size(); ++i) {
1236 // Note: Cut generator requires all expressions to contain only positive
1237 // vars.
1238 energies.push_back(mapping->GetExprFromProto(ct.cumulative().energies(i)));
1239 }
1240
1241 relaxation->cut_generators.push_back(
1242 CreateCumulativeTimeTableCutGenerator(intervals, capacity, demands, m));
1244 intervals, capacity, demands, energies, m));
1245 relaxation->cut_generators.push_back(
1247 energies, m));
1248 relaxation->cut_generators.push_back(
1249 CreateCumulativePrecedenceCutGenerator(intervals, capacity, demands, m));
1250}
1251
1253 LinearRelaxation* relaxation) {
1254 if (HasEnforcementLiteral(ct)) return;
1255
1256 auto* mapping = m->GetOrCreate<CpModelMapping>();
1257 std::vector<IntervalVariable> intervals =
1258 mapping->Intervals(ct.no_overlap().intervals());
1259 relaxation->cut_generators.push_back(
1260 CreateNoOverlapEnergyCutGenerator(intervals, m));
1261 relaxation->cut_generators.push_back(
1263 relaxation->cut_generators.push_back(
1265}
1266
1268 LinearRelaxation* relaxation) {
1269 if (HasEnforcementLiteral(ct)) return;
1270
1271 auto* mapping = m->GetOrCreate<CpModelMapping>();
1272 std::vector<IntervalVariable> x_intervals =
1273 mapping->Intervals(ct.no_overlap_2d().x_intervals());
1274 std::vector<IntervalVariable> y_intervals =
1275 mapping->Intervals(ct.no_overlap_2d().y_intervals());
1276 // TODO(user): We can add CumulativeEnergyCuts for no_overlap_2d if boxes
1277 // do not have a fixed size.
1278 relaxation->cut_generators.push_back(
1279 CreateNoOverlap2dCompletionTimeCutGenerator(x_intervals, y_intervals, m));
1280}
1281
1283 LinearRelaxation* relaxation) {
1284 if (!m->GetOrCreate<SatParameters>()->add_lin_max_cuts()) return;
1285 if (HasEnforcementLiteral(ct)) return;
1286
1287 // TODO(user): Support linearization of general target expression.
1288 auto* mapping = m->GetOrCreate<CpModelMapping>();
1289 if (ct.lin_max().target().vars_size() != 1) return;
1290 if (ct.lin_max().target().coeffs(0) != 1) return;
1291 if (ct.lin_max().target().offset() != 0) return;
1292
1293 const IntegerVariable target =
1294 mapping->Integer(ct.lin_max().target().vars(0));
1295 std::vector<LinearExpression> exprs;
1296 exprs.reserve(ct.lin_max().exprs_size());
1297 for (int i = 0; i < ct.lin_max().exprs_size(); ++i) {
1298 // Note: Cut generator requires all expressions to contain only positive
1299 // vars.
1300 exprs.push_back(
1301 PositiveVarExpr(mapping->GetExprFromProto(ct.lin_max().exprs(i))));
1302 }
1303
1304 const std::vector<Literal> alternative_literals =
1305 CreateAlternativeLiteralsWithView(exprs.size(), m, relaxation);
1306
1307 // TODO(user): Move this out of here.
1308 //
1309 // Add initial big-M linear relaxation.
1310 // z_vars[i] == 1 <=> target = exprs[i].
1311 AppendLinMaxRelaxationPart2(target, alternative_literals, exprs, m,
1312 relaxation);
1313
1314 std::vector<IntegerVariable> z_vars;
1315 auto* encoder = m->GetOrCreate<IntegerEncoder>();
1316 for (const Literal lit : alternative_literals) {
1317 z_vars.push_back(encoder->GetLiteralView(lit));
1318 CHECK_NE(z_vars.back(), kNoIntegerVariable);
1319 }
1320 relaxation->cut_generators.push_back(
1321 CreateLinMaxCutGenerator(target, exprs, z_vars, m));
1322}
1323
1324// TODO(user): Remove and merge with model loading.
1325void TryToAddCutGenerators(const ConstraintProto& ct, int linearization_level,
1326 Model* m, LinearRelaxation* relaxation) {
1327 switch (ct.constraint_case()) {
1328 case ConstraintProto::ConstraintCase::kCircuit: {
1329 if (linearization_level > 1) {
1330 AddCircuitCutGenerator(ct, m, relaxation);
1331 }
1332 break;
1333 }
1334 case ConstraintProto::ConstraintCase::kRoutes: {
1335 if (linearization_level > 1) {
1336 AddRoutesCutGenerator(ct, m, relaxation);
1337 }
1338 break;
1339 }
1340 case ConstraintProto::ConstraintCase::kIntProd: {
1341 AddIntProdCutGenerator(ct, linearization_level, m, relaxation);
1342 break;
1343 }
1344 case ConstraintProto::ConstraintCase::kAllDiff: {
1345 if (linearization_level > 1) {
1346 AddAllDiffCutGenerator(ct, m, relaxation);
1347 }
1348 break;
1349 }
1350 case ConstraintProto::ConstraintCase::kCumulative: {
1351 if (linearization_level > 1) {
1352 AddCumulativeCutGenerator(ct, m, relaxation);
1353 }
1354 break;
1355 }
1356 case ConstraintProto::ConstraintCase::kNoOverlap: {
1357 if (linearization_level > 1) {
1358 AddNoOverlapCutGenerator(ct, m, relaxation);
1359 }
1360 break;
1361 }
1362 case ConstraintProto::ConstraintCase::kNoOverlap2D: {
1363 if (linearization_level > 1) {
1364 AddNoOverlap2dCutGenerator(ct, m, relaxation);
1365 }
1366 break;
1367 }
1368 case ConstraintProto::ConstraintCase::kIntMax: {
1369 if (linearization_level > 1 && IntMaxIsIntAbs(ct)) {
1370 AddIntAbsCutGenerator(ct, m, relaxation);
1371 }
1372 break;
1373 }
1374 case ConstraintProto::ConstraintCase::kLinMax: {
1375 if (linearization_level > 1) {
1376 if (LinMaxContainsOnlyOneVarInExpressions(ct)) {
1377 AddMaxAffineCutGenerator(ct, m, relaxation);
1378 } else {
1379 AddLinMaxCutGenerator(ct, m, relaxation);
1380 }
1381 }
1382 break;
1383 }
1384 default: {
1385 }
1386 }
1387}
1388
1389// If we have an exactly one between literals l_i, and each l_i => var ==
1390// value_i, then we can add a strong linear relaxation: var = sum l_i * value_i.
1391//
1392// This codes detect this and add the corresponding linear equations.
1394 LinearRelaxation* relaxation) {
1395 auto* implied_bounds = m->GetOrCreate<ImpliedBounds>();
1396 auto* mapping = m->GetOrCreate<CpModelMapping>();
1397
1398 for (const ConstraintProto& ct : model_proto.constraints()) {
1399 if (ct.constraint_case() != ConstraintProto::ConstraintCase::kExactlyOne) {
1400 continue;
1401 }
1402
1403 // Project the implied values onto each integer variable.
1404 absl::flat_hash_map<IntegerVariable,
1405 std::vector<std::pair<Literal, IntegerValue>>>
1406 var_to_literal_value_list;
1407 for (const int l : ct.exactly_one().literals()) {
1408 const Literal literal = mapping->Literal(l);
1409 for (const auto& var_value : implied_bounds->GetImpliedValues(literal)) {
1410 var_to_literal_value_list[var_value.first].push_back(
1411 std::make_pair(literal, var_value.second));
1412 }
1413 }
1414
1415 // Search for variable fully covered by the literals of the exactly_one.
1416 for (const auto& var_encoding : var_to_literal_value_list) {
1417 if (var_encoding.second.size() < ct.exactly_one().literals_size()) {
1418 continue;
1419 }
1420
1421 // We only want to deal with the case with duplicate values, because
1422 // otherwise, the target will be fully encoded, and this is already
1423 // covered by another function.
1424 IntegerValue min_value = kMaxIntegerValue;
1425 {
1426 absl::flat_hash_set<IntegerValue> values;
1427 for (const auto& literal_value : var_encoding.second) {
1428 min_value = std::min(min_value, literal_value.second);
1429 values.insert(literal_value.second);
1430 }
1431 if (values.size() == ct.exactly_one().literals_size()) continue;
1432 }
1433
1434 LinearConstraintBuilder linear_encoding(m, -min_value, -min_value);
1435 linear_encoding.AddTerm(var_encoding.first, IntegerValue(-1));
1436 for (const auto& literal_value : var_encoding.second) {
1437 const IntegerValue delta_min = literal_value.second - min_value;
1438 if (delta_min != 0) {
1439 if (!linear_encoding.AddLiteralTerm(literal_value.first, delta_min)) {
1440 return;
1441 }
1442 }
1443 }
1444 relaxation->linear_constraints.push_back(linear_encoding.Build());
1445 }
1446 }
1447}
1448
1450 int linearization_level, Model* m,
1451 LinearRelaxation* relaxation) {
1452 CHECK(relaxation != nullptr);
1453
1454 // Linearize the constraints.
1455 absl::flat_hash_set<int> used_integer_variable;
1456
1457 auto* mapping = m->GetOrCreate<CpModelMapping>();
1458 auto* encoder = m->GetOrCreate<IntegerEncoder>();
1459 for (const auto& ct : model_proto.constraints()) {
1460 TryToLinearizeConstraint(model_proto, ct, linearization_level, m,
1461 relaxation);
1462 TryToAddCutGenerators(ct, linearization_level, m, relaxation);
1463 }
1464
1465 // Linearize the encoding of variable that are fully encoded.
1466 int num_full_encoding_relaxations = 0;
1467 int num_partial_encoding_relaxations = 0;
1468 for (int i = 0; i < model_proto.variables_size(); ++i) {
1469 if (mapping->IsBoolean(i)) continue;
1470
1471 const IntegerVariable var = mapping->Integer(i);
1472 if (m->Get(IsFixed(var))) continue;
1473
1474 // TODO(user): This different encoding for the partial variable might be
1475 // better (less LP constraints), but we do need more investigation to
1476 // decide.
1477 if (/* DISABLES CODE */ (false)) {
1478 AppendPartialEncodingRelaxation(var, *m, relaxation);
1479 continue;
1480 }
1481
1482 if (encoder->VariableIsFullyEncoded(var)) {
1483 if (AppendFullEncodingRelaxation(var, *m, relaxation)) {
1484 ++num_full_encoding_relaxations;
1485 continue;
1486 }
1487 }
1488
1489 // Even if the variable is fully encoded, sometimes not all its associated
1490 // literal have a view (if they are not part of the original model for
1491 // instance).
1492 //
1493 // TODO(user): Should we add them to the LP anyway? this isn't clear as
1494 // we can sometimes create a lot of Booleans like this.
1495 const int old = relaxation->linear_constraints.size();
1497 if (relaxation->linear_constraints.size() > old) {
1498 ++num_partial_encoding_relaxations;
1499 }
1500 }
1501
1502 // TODO(user): This is really similar to the AppendFullEncodingRelaxation()
1503 // above. Investigate if we can merge the code.
1504 if (linearization_level >= 2) {
1506 }
1507
1508 if (!m->GetOrCreate<SatSolver>()->FinishPropagation()) return;
1509
1510 // Linearize the at most one constraints. Note that we transform them
1511 // into maximum "at most one" first and we removes redundant ones.
1512 m->GetOrCreate<BinaryImplicationGraph>()->TransformIntoMaxCliques(
1513 &relaxation->at_most_ones);
1514 for (const std::vector<Literal>& at_most_one : relaxation->at_most_ones) {
1515 if (at_most_one.empty()) continue;
1516
1517 LinearConstraintBuilder lc(m, kMinIntegerValue, IntegerValue(1));
1518 for (const Literal literal : at_most_one) {
1519 // Note that it is okay to simply ignore the literal if it has no
1520 // integer view.
1521 const bool unused ABSL_ATTRIBUTE_UNUSED =
1522 lc.AddLiteralTerm(literal, IntegerValue(1));
1523 }
1524 relaxation->linear_constraints.push_back(lc.Build());
1525 }
1526
1527 // We converted all at_most_one to LP constraints, so we need to clear them
1528 // so that we don't do extra work in the connected component computation.
1529 relaxation->at_most_ones.clear();
1530
1531 // Remove size one LP constraints, they are not useful.
1532 relaxation->linear_constraints.erase(
1533 std::remove_if(
1534 relaxation->linear_constraints.begin(),
1535 relaxation->linear_constraints.end(),
1536 [](const LinearConstraint& lc) { return lc.vars.size() <= 1; }),
1537 relaxation->linear_constraints.end());
1538
1539 VLOG(3) << "num_full_encoding_relaxations: " << num_full_encoding_relaxations;
1540 VLOG(3) << "num_partial_encoding_relaxations: "
1541 << num_partial_encoding_relaxations;
1542 VLOG(3) << relaxation->linear_constraints.size()
1543 << " constraints in the LP relaxation.";
1544 VLOG(3) << relaxation->cut_generators.size() << " cuts generators.";
1545}
1546
1547} // namespace sat
1548} // namespace operations_research
int64_t max
Definition: alldiff_cst.cc:140
int64_t min
Definition: alldiff_cst.cc:139
#define CHECK(condition)
Definition: base/logging.h:491
#define CHECK_EQ(val1, val2)
Definition: base/logging.h:698
#define DCHECK_GE(val1, val2)
Definition: base/logging.h:890
#define CHECK_NE(val1, val2)
Definition: base/logging.h:699
#define DCHECK_GT(val1, val2)
Definition: base/logging.h:891
#define DCHECK(condition)
Definition: base/logging.h:885
#define VLOG(verboselevel)
Definition: base/logging.h:979
std::vector< IntegerVariable > Integers(const List &list) const
std::vector< IntervalVariable > Intervals(const ProtoIndices &indices) const
IntegerVariable Integer(int ref) const
std::vector< sat::Literal > Literals(const ProtoIndices &indices) const
const ::operations_research::sat::ConstraintProto & constraints(int index) const
std::vector< ValueLiteralPair > FullDomainEncoding(IntegerVariable var) const
Definition: integer.cc:133
bool IsFixed(IntegerVariable i) const
Definition: integer.h:1353
IntegerValue UpperBound(IntegerVariable i) const
Definition: integer.h:1349
IntegerValue LevelZeroUpperBound(IntegerVariable var) const
Definition: integer.h:1412
IntegerVariable AddIntegerVariable(IntegerValue lower_bound, IntegerValue upper_bound)
Definition: integer.cc:640
IntegerValue LevelZeroLowerBound(IntegerVariable var) const
Definition: integer.h:1407
IntegerValue LowerBound(IntegerVariable i) const
Definition: integer.h:1345
ABSL_MUST_USE_RESULT bool AddLiteralTerm(Literal lit, IntegerValue coeff)
void AddLinearExpression(const LinearExpression &expr)
void AddQuadraticLowerBound(AffineExpression left, AffineExpression right, IntegerTrail *integer_trail)
void AddTerm(IntegerVariable var, IntegerValue coeff)
Literal(int signed_value)
Definition: sat_base.h:69
Class that owns everything related to a particular optimization model.
Definition: sat/model.h:38
T Get(std::function< T(const Model &)> f) const
Similar to Add() but this is const.
Definition: sat/model.h:87
T * GetOrCreate()
Returns an object of type T that is unique to this model (like a "local" singleton).
Definition: sat/model.h:106
CpModelProto const * model_proto
const Constraint * ct
IntVar * var
Definition: expr_array.cc:1874
double upper_bound
GRBmodel * model
void STLSortAndRemoveDuplicates(T *v, const LessFunc &less_func)
Definition: stl_util.h:58
CutGenerator CreateCumulativePrecedenceCutGenerator(const std::vector< IntervalVariable > &intervals, IntegerVariable capacity, const std::vector< IntegerVariable > &demands, Model *model)
void AddCumulativeCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
CutGenerator CreateCumulativeEnergyCutGenerator(const std::vector< IntervalVariable > &intervals, const IntegerVariable capacity, const std::vector< IntegerVariable > &demands, const std::vector< LinearExpression > &energies, Model *model)
std::function< IntegerVariable(Model *)> NewIntegerVariableFromLiteral(Literal lit)
Definition: integer.h:1501
CutGenerator CreateCVRPCutGenerator(int num_nodes, const std::vector< int > &tails, const std::vector< int > &heads, const std::vector< Literal > &literals, const std::vector< int64_t > &demands, int64_t capacity, Model *model)
CutGenerator CreateCumulativeCompletionTimeCutGenerator(const std::vector< IntervalVariable > &intervals, const IntegerVariable capacity, const std::vector< IntegerVariable > &demands, const std::vector< LinearExpression > &energies, Model *model)
constexpr IntegerValue kMaxIntegerValue(std::numeric_limits< IntegerValue::ValueType >::max() - 1)
void AppendLinMaxRelaxationPart1(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendBoolOrRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
IntegerValue LinExprLowerBound(const LinearExpression &expr, const IntegerTrail &integer_trail)
CutGenerator CreateNoOverlapCompletionTimeCutGenerator(const std::vector< IntervalVariable > &intervals, Model *model)
CutGenerator CreateSquareCutGenerator(IntegerVariable y, IntegerVariable x, int linearization_level, Model *model)
Definition: cuts.cc:1428
void TryToLinearizeConstraint(const CpModelProto &model_proto, const ConstraintProto &ct, int linearization_level, Model *model, LinearRelaxation *relaxation)
bool RefIsPositive(int ref)
CutGenerator CreateNoOverlapPrecedenceCutGenerator(const std::vector< IntervalVariable > &intervals, Model *model)
void AppendNoOverlapRelaxation(const CpModelProto &model_proto, const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void ComputeLinearRelaxation(const CpModelProto &model_proto, int linearization_level, Model *m, LinearRelaxation *relaxation)
const LiteralIndex kNoLiteralIndex(-1)
void AddMaxAffineCutGenerator(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendAtMostOneRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendCumulativeRelaxation(const CpModelProto &model_proto, const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void TryToAddCutGenerators(const ConstraintProto &ct, int linearization_level, Model *m, LinearRelaxation *relaxation)
constexpr IntegerValue kMinIntegerValue(-kMaxIntegerValue)
std::function< int64_t(const Model &)> LowerBound(IntegerVariable v)
Definition: integer.h:1524
void AddNoOverlapCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
std::function< BooleanVariable(Model *)> NewBooleanVariable()
Definition: integer.h:1469
bool HasEnforcementLiteral(const ConstraintProto &ct)
std::function< bool(const Model &)> IsFixed(IntegerVariable v)
Definition: integer.h:1536
void AddIntProdCutGenerator(const ConstraintProto &ct, int linearization_level, Model *m, LinearRelaxation *relaxation)
LinearExpression PositiveVarExpr(const LinearExpression &expr)
std::function< IntervalVariable(Model *)> NewInterval(int64_t min_start, int64_t max_end, int64_t size)
Definition: intervals.h:666
void AppendBoolAndRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
bool AppendFullEncodingRelaxation(IntegerVariable var, const Model &model, LinearRelaxation *relaxation)
const IntegerVariable kNoIntegerVariable(-1)
void AppendLinearConstraintRelaxation(const ConstraintProto &ct, bool linearize_enforced_constraints, Model *model, LinearRelaxation *relaxation)
LinearExpression CanonicalizeExpr(const LinearExpression &expr)
void AppendIntervalRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddNoOverlap2dCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
void AddCircuitCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
CutGenerator CreateNoOverlap2dCompletionTimeCutGenerator(const std::vector< IntervalVariable > &x_intervals, const std::vector< IntervalVariable > &y_intervals, Model *model)
std::function< IntervalVariable(Model *)> NewOptionalInterval(int64_t min_start, int64_t max_end, int64_t size, Literal is_present)
Definition: intervals.h:696
IntegerVariable PositiveVariable(IntegerVariable i)
Definition: integer.h:142
CutGenerator CreateLinMaxCutGenerator(const IntegerVariable target, const std::vector< LinearExpression > &exprs, const std::vector< IntegerVariable > &z_vars, Model *model)
Definition: cuts.cc:1917
CutGenerator CreateAllDifferentCutGenerator(const std::vector< IntegerVariable > &vars, Model *model)
Definition: cuts.cc:1819
void AppendMaxAffineRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
LinearConstraint BuildMaxAffineUpConstraint(const LinearExpression &target, IntegerVariable var, const std::vector< std::pair< IntegerValue, IntegerValue > > &affines, Model *model)
Definition: cuts.cc:2001
void AppendExactlyOneRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
IntegerValue GetCoefficient(const IntegerVariable var, const LinearExpression &expr)
int ReindexArcs(IntContainer *tails, IntContainer *heads)
Definition: circuit.h:168
std::vector< Literal > CreateAlternativeLiteralsWithView(int num_literals, Model *model, LinearRelaxation *relaxation)
std::function< int64_t(const Model &)> UpperBound(IntegerVariable v)
Definition: integer.h:1530
void AppendElementEncodingRelaxation(const CpModelProto &model_proto, Model *m, LinearRelaxation *relaxation)
void AppendCircuitRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendPartialEncodingRelaxation(IntegerVariable var, const Model &model, LinearRelaxation *relaxation)
std::vector< IntegerVariable > NegationOf(const std::vector< IntegerVariable > &vars)
Definition: integer.cc:29
std::function< IntegerVariable(Model *)> NewIntegerVariable(int64_t lb, int64_t ub)
Definition: integer.h:1483
void AddAllDiffCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
CutGenerator CreateCumulativeTimeTableCutGenerator(const std::vector< IntervalVariable > &intervals, const IntegerVariable capacity, const std::vector< IntegerVariable > &demands, Model *model)
CutGenerator CreateNoOverlapEnergyCutGenerator(const std::vector< IntervalVariable > &intervals, Model *model)
CutGenerator CreateMaxAffineCutGenerator(LinearExpression target, IntegerVariable var, std::vector< std::pair< IntegerValue, IntegerValue > > affines, const std::string cut_name, Model *model)
Definition: cuts.cc:2037
std::function< void(Model *)> SpanOfIntervals(IntervalVariable span, const std::vector< IntervalVariable > &intervals)
void AddIntAbsCutGenerator(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddLinMaxCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
void AppendRoutesRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendIntMaxRelaxation(const ConstraintProto &ct, int linearization_level, Model *model, LinearRelaxation *relaxation)
std::function< void(Model *)> ExactlyOneConstraint(const std::vector< Literal > &literals)
Definition: sat_solver.h:878
IntegerValue LinExprUpperBound(const LinearExpression &expr, const IntegerTrail &integer_trail)
void AddCumulativeRelaxation(const std::vector< IntervalVariable > &x_intervals, SchedulingConstraintHelper *x, SchedulingConstraintHelper *y, Model *model)
Definition: sat/diffn.cc:80
bool VariableIsPositive(IntegerVariable i)
Definition: integer.h:138
void AddRoutesCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
CutGenerator CreateStronglyConnectedGraphCutGenerator(int num_nodes, const std::vector< int > &tails, const std::vector< int > &heads, const std::vector< Literal > &literals, Model *model)
CutGenerator CreatePositiveMultiplicationCutGenerator(IntegerVariable z, IntegerVariable x, IntegerVariable y, int linearization_level, Model *model)
Definition: cuts.cc:1341
void AppendLinMaxRelaxationPart2(IntegerVariable target, const std::vector< Literal > &alternative_literals, const std::vector< LinearExpression > &exprs, Model *model, LinearRelaxation *relaxation)
void AppendPartialGreaterThanEncodingRelaxation(IntegerVariable var, const Model &model, LinearRelaxation *relaxation)
Collection of objects used to extend the Constraint Solver library.
Literal literal
Definition: optimization.cc:85
int index
Definition: pack.cc:509
int64_t delta
Definition: resource.cc:1692
IntervalVar * interval
Definition: resource.cc:100
int64_t capacity
int64_t tail
int64_t head
std::vector< std::vector< Literal > > at_most_ones
std::vector< LinearConstraint > linear_constraints
std::vector< CutGenerator > cut_generators