[CP-SAT] more cleanups; routing cuts experiments

This commit is contained in:
Laurent Perron
2025-03-26 15:12:12 -07:00
parent 58513d0cc0
commit d0e75d47e5
14 changed files with 446 additions and 168 deletions

View File

@@ -2757,6 +2757,7 @@ cc_library(
"@abseil-cpp//absl/log:check",
"@abseil-cpp//absl/log:vlog_is_on",
"@abseil-cpp//absl/numeric:bits",
"@abseil-cpp//absl/numeric:int128",
"@abseil-cpp//absl/random:distributions",
"@abseil-cpp//absl/strings",
"@abseil-cpp//absl/types:span",

View File

@@ -889,15 +889,33 @@ std::string ValidateObjective(const CpModelProto& model,
ProtobufShortDebugString(obj));
}
for (const int v : obj.vars()) {
if (!VariableReferenceIsValid(model, v)) {
if (!VariableIndexIsValid(model, v)) {
return absl::StrCat("Out of bound integer variable ", v,
" in objective: ", ProtobufShortDebugString(obj));
}
}
if (PossibleIntegerOverflow(model, obj.vars(), obj.coeffs())) {
std::pair<int64_t, int64_t> bounds;
if (PossibleIntegerOverflow(model, obj.vars(), obj.coeffs(), 0, &bounds)) {
return "Possible integer overflow in objective: " +
ProtobufDebugString(obj);
}
if (!std::isfinite(model.objective().offset())) {
return "Objective offset must be finite: " + ProtobufDebugString(obj);
}
if (model.objective().scaling_factor() != 0 &&
model.objective().scaling_factor() != 1 &&
model.objective().scaling_factor() != -1) {
if (!std::isfinite(
std::abs(model.objective().scaling_factor() * bounds.first) +
std::abs(model.objective().offset())) ||
!std::isfinite(
std::abs(model.objective().scaling_factor() * bounds.second) +
std::abs(model.objective().offset()))) {
return "Possible floating point overflow in objective when multiplied by "
"the scaling factor: " +
ProtobufDebugString(obj);
}
}
return "";
}
@@ -930,6 +948,27 @@ std::string ValidateFloatingPointObjective(double max_valid_magnitude,
return absl::StrCat("Offset must be finite in objective: ",
ProtobufShortDebugString(obj));
}
double sum_min = obj.offset();
double sum_max = obj.offset();
for (int i = 0; i < obj.vars().size(); ++i) {
const int ref = obj.vars(i);
const auto& var_proto = model.variables(PositiveRef(ref));
const int64_t min_domain = var_proto.domain(0);
const int64_t max_domain = var_proto.domain(var_proto.domain_size() - 1);
const double coeff = RefIsPositive(ref) ? obj.coeffs(i) : -obj.coeffs(i);
const double prod1 = min_domain * coeff;
const double prod2 = max_domain * coeff;
// Note that we use min/max with zero to disallow "alternative" terms and
// be sure that we cannot have an overflow if we do the computation in a
// different order.
sum_min += std::min(0.0, std::min(prod1, prod2));
sum_max += std::max(0.0, std::max(prod1, prod2));
}
if (!std::isfinite(2.0 * sum_min) || !std::isfinite(2.0 * sum_max)) {
return absl::StrCat("Possible floating point overflow in objective: ",
ProtobufShortDebugString(obj));
}
return "";
}
@@ -1036,7 +1075,8 @@ std::string ValidateSolutionHint(const CpModelProto& model) {
bool PossibleIntegerOverflow(const CpModelProto& model,
absl::Span<const int> vars,
absl::Span<const int64_t> coeffs, int64_t offset) {
absl::Span<const int64_t> coeffs, int64_t offset,
std::pair<int64_t, int64_t>* implied_domain) {
if (offset == std::numeric_limits<int64_t>::min()) return true;
int64_t sum_min = -std::abs(offset);
int64_t sum_max = +std::abs(offset);
@@ -1068,6 +1108,9 @@ bool PossibleIntegerOverflow(const CpModelProto& model,
// pass but not -expr!
if (sum_min < -std::numeric_limits<int64_t>::max() / 2) return true;
if (sum_max > std::numeric_limits<int64_t>::max() / 2) return true;
if (implied_domain) {
*implied_domain = {sum_min, sum_max};
}
return false;
}
@@ -1223,6 +1266,12 @@ std::string ValidateCpModel(const CpModelProto& model, bool after_presolve) {
"objective.";
}
if (model.has_objective()) {
if (model.objective().scaling_factor() != 0 &&
!std::isnormal(model.objective().scaling_factor())) {
return "A model cannot have an objective with a nan, inf or subnormal "
"scaling factor";
}
RETURN_IF_NOT_EMPTY(ValidateObjective(model, model.objective()));
if (model.objective().integer_scaling_factor() != 0 ||

View File

@@ -16,6 +16,7 @@
#include <cstdint>
#include <string>
#include <utility>
#include <vector>
#include "absl/types/span.h"
@@ -48,12 +49,13 @@ std::string ValidateCpModel(const CpModelProto& model,
std::string ValidateInputCpModel(const SatParameters& params,
const CpModelProto& model);
// Check if a given linear expression can create overflow. It is exposed to test
// new constraints created during the presolve.
bool PossibleIntegerOverflow(const CpModelProto& model,
absl::Span<const int> vars,
absl::Span<const int64_t> coeffs,
int64_t offset = 0);
// Check if a given linear expression can create overflow. If it doesn't,
// sets `implied_domain` to the implied domain of the expression. It is exposed
// to test new constraints created during the presolve.
bool PossibleIntegerOverflow(
const CpModelProto& model, absl::Span<const int> vars,
absl::Span<const int64_t> coeffs, int64_t offset = 0,
std::pair<int64_t, int64_t>* implied_domain = nullptr);
// Verifies that the given variable assignment is a feasible solution of the
// given model. The values vector should be in one to one correspondence with

View File

@@ -103,26 +103,50 @@ void AddDiffnCumulativeRelationOnX(SchedulingConstraintHelper* x,
// We want something <= max_end - min_start
//
// TODO(user): Use conditional affine min/max !!
const IntegerVariable min_start_var =
CreateVariableAtOrAboveMinOf(y->Starts(), model);
const IntegerVariable max_end_var =
CreateVariableAtOrBelowMaxOf(y->Ends(), model);
auto* integer_trail = model->GetOrCreate<IntegerTrail>();
if (integer_trail->UpperBound(max_end_var) <
integer_trail->LowerBound(min_start_var)) {
// Trivial infeasible case, will be handled by the linear constraint
// from the interval.
return;
bool all_optional = true;
for (int i = 0; i < y->NumTasks(); ++i) {
if (!y->IsOptional(i)) {
all_optional = false;
break;
}
}
AffineExpression capacity;
if (all_optional) {
IntegerValue min_start = kMaxIntegerValue;
IntegerValue max_end = kMinIntegerValue;
for (int i = 0; i < y->NumTasks(); ++i) {
min_start = std::min(min_start, y->LevelZeroStartMin(i));
max_end = std::max(max_end, y->LevelZeroEndMax(i));
}
if (max_end < min_start) {
return;
}
capacity = AffineExpression(CapSubI(max_end, min_start).value());
} else {
// This might not work if all task are optional, since the min could be
// greater than the max.
const IntegerVariable min_start_var =
CreateVariableAtOrAboveMinOf(y->Starts(), model);
const IntegerVariable max_end_var =
CreateVariableAtOrBelowMaxOf(y->Ends(), model);
auto* integer_trail = model->GetOrCreate<IntegerTrail>();
if (integer_trail->UpperBound(max_end_var) <
integer_trail->LowerBound(min_start_var)) {
// Trivial infeasible case, will be handled by the linear constraint
// from the interval.
return;
}
// (max_end - min_start) >= capacity.
capacity = model->Add(NewIntegerVariable(
0, CapSub(integer_trail->UpperBound(max_end_var).value(),
integer_trail->LowerBound(min_start_var).value())));
const std::vector<int64_t> coeffs = {-capacity.coeff.value(), -1, 1};
model->Add(
WeightedSumGreaterOrEqual({capacity.var, min_start_var, max_end_var},
coeffs, capacity.constant.value()));
}
// (max_end - min_start) >= capacity.
const AffineExpression capacity(model->Add(NewIntegerVariable(
0, CapSub(integer_trail->UpperBound(max_end_var).value(),
integer_trail->LowerBound(min_start_var).value()))));
const std::vector<int64_t> coeffs = {-capacity.coeff.value(), -1, 1};
model->Add(
WeightedSumGreaterOrEqual({capacity.var, min_start_var, max_end_var},
coeffs, capacity.constant.value()));
SchedulingDemandHelper* demands =
model->GetOrCreate<IntervalsRepository>()->GetOrCreateDemandHelper(

View File

@@ -58,13 +58,13 @@ constraints {
}
}
objective {
vars: -1
vars: -2
vars: -3
vars: -4
vars: 0
vars: 1
vars: 2
vars: 3
scaling_factor: -1
coeffs: 1
coeffs: 2
coeffs: 3
coeffs: 4
coeffs: -1
coeffs: -2
coeffs: -3
coeffs: -4
}

View File

@@ -38,13 +38,13 @@ constraints {
}
}
objective {
vars: -1
vars: -2
vars: -3
vars: -4
vars: 0
vars: 1
vars: 2
vars: 3
scaling_factor: -1
coeffs: 1
coeffs: 2
coeffs: 3
coeffs: 4
coeffs: -1
coeffs: -2
coeffs: -3
coeffs: -4
}

View File

@@ -283,6 +283,29 @@ class LinearConstraintManager {
// does not violate the loaded solution.
bool DebugCheckConstraint(const LinearConstraint& cut);
// Getter "ReducedCosts" API for cuts.
//
// It is not possible to set together to true a set of literals 'l' such that
// sum_l GetLiteralReducedCost(l) > ReducedCostsGap(). Note that we only
// return non-negative "reduced costs" here.
absl::int128 ReducedCostsGap() const { return reduced_costs_gap_; }
absl::int128 GetLiteralReducedCost(Literal l) const {
const auto it = reduced_costs_map_.find(l);
if (it == reduced_costs_map_.end()) return 0;
return absl::int128(it->second.value());
}
// Reduced cost API for cuts.
// These functions allow to clear the data and set the reduced cost info.
void ClearReducedCostsInfo() {
reduced_costs_gap_ = 0;
reduced_costs_map_.clear();
}
void SetReducedCostsGap(absl::int128 gap) { reduced_costs_gap_ = gap; }
void SetLiteralReducedCost(Literal l, IntegerValue coeff) {
reduced_costs_map_[l] = coeff;
}
private:
// Heuristic that decide which constraints we should remove from the current
// LP. Note that such constraints can be added back later by the heuristic
@@ -339,6 +362,10 @@ class LinearConstraintManager {
// constraints.
absl::flat_hash_map<size_t, ConstraintIndex> equiv_constraints_;
// Reduced costs data used by some routing cuts.
absl::int128 reduced_costs_gap_ = 0;
absl::flat_hash_map<Literal, IntegerValue> reduced_costs_map_;
int64_t num_constraint_updates_ = 0;
int64_t num_simplifications_ = 0;
int64_t num_merged_constraints_ = 0;

View File

@@ -2561,6 +2561,37 @@ void LinearProgrammingConstraint::AdjustNewLinearConstraint(
if (adjusted) ++num_adjusts_;
}
void LinearProgrammingConstraint::SetReducedCostsInConstraintManager(
const LinearConstraint& ct) {
constraint_manager_.ClearReducedCostsInfo();
absl::int128 ub = ct.ub.value();
absl::int128 level_zero_lb = 0;
for (int i = 0; i < ct.num_terms; ++i) {
IntegerVariable var = ct.vars[i];
IntegerValue coeff = ct.coeffs[i];
if (coeff < 0) {
coeff = -coeff;
var = NegationOf(var);
}
const IntegerValue lb = integer_trail_->LevelZeroLowerBound(var);
level_zero_lb += absl::int128(coeff.value()) * absl::int128(lb.value());
if (lb == integer_trail_->LevelZeroUpperBound(var)) continue;
const LiteralIndex lit = integer_encoder_->GetAssociatedLiteral(
IntegerLiteral::GreaterOrEqual(var, lb + 1));
if (lit != kNoLiteralIndex) {
constraint_manager_.SetLiteralReducedCost(Literal(lit), coeff);
}
}
const absl::int128 gap = absl::int128(ct.ub.value()) - level_zero_lb;
if (gap > 0) {
constraint_manager_.SetReducedCostsGap(gap);
} else {
constraint_manager_.ClearReducedCostsInfo();
}
}
bool LinearProgrammingConstraint::PropagateLpConstraint(LinearConstraint ct) {
DCHECK(constraint_manager_.DebugCheckConstraint(ct));
@@ -2700,6 +2731,7 @@ bool LinearProgrammingConstraint::PropagateExactLpReason() {
return explanation.ub >= 0;
}
SetReducedCostsInConstraintManager(explanation);
return PropagateLpConstraint(std::move(explanation));
}

View File

@@ -330,6 +330,11 @@ class LinearProgrammingConstraint : public PropagatorInterface,
// propagation.
bool PropagateLpConstraint(LinearConstraint ct);
// Some routing cuts might use reduced costs in order to derive tighter bounds
// on the possible route. This stores the information inside the constraint
// manager so it can be used there.
void SetReducedCostsInConstraintManager(const LinearConstraint& ct);
// Returns number of non basic variables with zero reduced costs.
int64_t CalculateDegeneracy();

View File

@@ -108,8 +108,8 @@ class CpModelHelperTest(absltest.TestCase):
}
}
objective {
vars: -3
coeffs: 1
vars: 2
coeffs: -1
scaling_factor: -1
}"""
model = cp_model_pb2.CpModelProto()
@@ -149,8 +149,8 @@ class CpModelHelperTest(absltest.TestCase):
}
}
objective {
vars: -3
coeffs: 1
vars: 2
coeffs: -1
scaling_factor: -1
}"""
model = cp_model_pb2.CpModelProto()
@@ -177,8 +177,8 @@ class CpModelHelperTest(absltest.TestCase):
ct.linear.vars.extend([0, 1, 2])
ct.linear.coeffs.extend([1, 2, -1])
ct.linear.domain.extend([0, 0])
model.objective.vars.append(-3)
model.objective.coeffs.append(1)
model.objective.vars.append(2)
model.objective.coeffs.append(-1)
model.objective.scaling_factor = -1
solve_wrapper = cmh.SolveWrapper()
@@ -266,8 +266,8 @@ class CpModelHelperTest(absltest.TestCase):
}
}
objective {
vars: -3
coeffs: 1
vars: 2
coeffs: -1
scaling_factor: -1
}
name: 'testModelStats'

View File

@@ -38,6 +38,7 @@
#include "absl/log/log.h"
#include "absl/log/vlog_is_on.h"
#include "absl/numeric/bits.h"
#include "absl/numeric/int128.h"
#include "absl/random/distributions.h"
#include "absl/strings/str_cat.h"
#include "absl/types/span.h"
@@ -144,6 +145,7 @@ MinOutgoingFlowHelper::~MinOutgoingFlowHelper() {
{"RoutingDp/num_full_dp_early_abort", num_full_dp_early_abort_});
stats.push_back(
{"RoutingDp/num_full_dp_work_abort", num_full_dp_work_abort_});
stats.push_back({"RoutingDp/num_full_dp_rc_skip", num_full_dp_rc_skip_});
for (const auto& [name, count] : num_by_type_) {
stats.push_back({absl::StrCat("RoutingDp/num_bounds_", name), count});
}
@@ -751,7 +753,7 @@ int MinOutgoingFlowHelper::ComputeTightMinOutgoingFlow(
bool MinOutgoingFlowHelper::SubsetMightBeServedWithKRoutes(
int k, absl::Span<const int> subset, RouteRelationsHelper* helper,
int special_node, bool use_outgoing) {
LinearConstraintManager* manager, int special_node, bool use_outgoing) {
cannot_be_first_.clear();
cannot_be_last_.clear();
if (k >= subset.size()) return true;
@@ -828,6 +830,13 @@ bool MinOutgoingFlowHelper::SubsetMightBeServedWithKRoutes(
// Hopefully the DFS order limit the number of entry to O(n^2 * k), so still
// somewhat reasonable for small values.
absl::flat_hash_map<IntegerVariable, IntegerValue> lbs;
// The sum of the reduced costs of all the literals selected to form the
// route(s) encoded by this state. If this becomes too large we know that
// this state can never be used to get to a better solution than the current
// best one and is thus "infeasible" for the problem of finding a better
// solution.
absl::int128 sum_of_reduced_costs = 0;
};
const int size = subset.size();
@@ -904,17 +913,33 @@ bool MinOutgoingFlowHelper::SubsetMightBeServedWithKRoutes(
if (from_state.node_set & head_mask) continue;
State to_state;
to_state.lbs = from_state.lbs; // keep old bounds
bool ok;
if (helper != nullptr) {
ok = helper->PropagateLocalBoundsUsingShortestPaths(
integer_trail_, tail, head, from_state.lbs, &to_state.lbs);
} else {
ok = binary_relation_repository_.PropagateLocalBounds(
integer_trail_, literal, from_state.lbs, &to_state.lbs);
// Use reduced costs to exclude solutions that cannot improve our
// current best solution.
if (manager != nullptr) {
DCHECK_EQ(helper, nullptr);
to_state.sum_of_reduced_costs =
from_state.sum_of_reduced_costs +
manager->GetLiteralReducedCost(literal);
if (to_state.sum_of_reduced_costs > manager->ReducedCostsGap()) {
++num_full_dp_rc_skip_;
continue;
}
}
// We start from the old bounds and update them.
to_state.lbs = from_state.lbs;
if (helper != nullptr) {
if (!helper->PropagateLocalBoundsUsingShortestPaths(
integer_trail_, tail, head, from_state.lbs, &to_state.lbs)) {
continue;
}
} else {
if (!binary_relation_repository_.PropagateLocalBounds(
integer_trail_, literal, from_state.lbs, &to_state.lbs)) {
continue;
}
}
if (!ok) continue;
to_state.node_set = from_state.node_set | head_mask;
to_state.last_nodes_set = from_state.last_nodes_set | head_mask;
@@ -1099,33 +1124,6 @@ class RouteRelationsBuilder {
}
private:
// A coeff * var + offset affine expression, where `var` is always a positive
// reference (contrary to AffineExpression, where the coefficient is always
// positive).
struct NodeExpression {
IntegerVariable var;
IntegerValue coeff;
IntegerValue offset;
NodeExpression() : var(kNoIntegerVariable), coeff(0), offset(0) {}
NodeExpression(IntegerVariable var, IntegerValue coeff, IntegerValue offset)
: var(var), coeff(coeff), offset(offset) {}
explicit NodeExpression(const AffineExpression& expr) {
if (expr.var == kNoIntegerVariable || VariableIsPositive(expr.var)) {
var = expr.var;
coeff = expr.coeff;
} else {
var = PositiveVariable(expr.var);
coeff = -expr.coeff;
}
offset = expr.constant;
}
bool IsEmpty() const { return var == kNoIntegerVariable; }
};
AffineExpression& node_expression(int node, int dimension) {
return flat_node_dim_expressions_[node * num_dimensions_ + dimension];
};
@@ -1459,82 +1457,33 @@ class RouteRelationsBuilder {
// given relation `r` between the variables used in these expressions.
void ComputeArcRelation(int arc_index, int dimension,
const NodeExpression& tail_expr,
const NodeExpression& head_expr,
const sat::Relation& r,
const NodeExpression& head_expr, sat::Relation r,
const IntegerTrail& integer_trail) {
DCHECK((r.a.var == tail_expr.var && r.b.var == head_expr.var) ||
(r.a.var == head_expr.var && r.b.var == tail_expr.var));
int64_t a = r.a.coeff.value();
int64_t b = r.b.coeff.value();
if (r.a.var != tail_expr.var) std::swap(a, b);
if (a == 0 || tail_expr.coeff == 0) {
LocalRelation result =
ComputeArcUnaryRelation(head_expr, tail_expr, b, r.lhs, r.rhs);
if (r.a.var != tail_expr.var) std::swap(r.a, r.b);
if (r.a.coeff == 0 || tail_expr.coeff == 0) {
LocalRelation result = ComputeArcUnaryRelation(head_expr, tail_expr,
r.b.coeff, r.lhs, r.rhs);
std::swap(result.tail_coeff, result.head_coeff);
ProcessNewArcRelation(arc_index, dimension, result);
return;
}
if (b == 0 || head_expr.coeff == 0) {
ProcessNewArcRelation(
arc_index, dimension,
ComputeArcUnaryRelation(tail_expr, head_expr, a, r.lhs, r.rhs));
if (r.b.coeff == 0 || head_expr.coeff == 0) {
ProcessNewArcRelation(arc_index, dimension,
ComputeArcUnaryRelation(tail_expr, head_expr,
r.a.coeff, r.lhs, r.rhs));
return;
}
// A relation a * X + b * Y \in [lhs, rhs] between the X and Y variables is
// equivalent to a (k.a/A) * (A.X+α) + (k.b/B) * (B.Y+β) \in [k.lhs+ɣ,
// k.rhs+ɣ] relation between the A.X+α and B.Y+β terms if the divisions are
// exact, where ɣ=(k.a/A)*α+(k.b/B)*β. The smallest k > 0 such that k.a/A is
// integer is |A|/gcd(a, A), and the smallest k > 0 such that k.b/B is
// integer is |B|/gcd(b, B). The least common multiple of the two is the
// smallest k ensuring the above equivalence.
const int64_t tail_k = std::abs(tail_expr.coeff.value()) /
std::gcd(tail_expr.coeff.value(), a);
const int64_t head_k = std::abs(head_expr.coeff.value()) /
std::gcd(head_expr.coeff.value(), b);
const int64_t k = std::lcm(tail_k, head_k);
// TODO(user): do not add the relation in case of overflow (this can
// happen if the expressions are provided by the user in the model proto).
const IntegerValue tail_coeff = (k * a) / tail_expr.coeff;
const IntegerValue head_coeff = (k * b) / head_expr.coeff;
const IntegerValue domain_offset =
tail_coeff * tail_expr.offset + head_coeff * head_expr.offset;
ProcessNewArcRelation(arc_index, dimension,
{tail_coeff, head_coeff, k * r.lhs + domain_offset,
k * r.rhs + domain_offset});
// Transforms the relation into the form b * Y - a * X \in [lhs, rhs], with
// b of the same sign as the head expression coefficient.
a = -a;
IntegerValue lhs = r.lhs;
IntegerValue rhs = r.rhs;
if ((b < 0) != (head_expr.coeff < 0)) {
a = -a;
b = -b;
lhs = -lhs;
rhs = -rhs;
std::swap(lhs, rhs);
}
// Transforms the relation into the form B * Y - A * X \in [lhs, rhs] by
// adding (B-b) * Y and (a-A) * X (using the bounds of X and Y to bound the
// new terms).
// TODO(user): do not add the relation in case of overflow (this can
// happen if the expressions are provided by the user in the model proto).
auto add_term = [&](IntegerVariable var, IntegerValue coeff) {
if (coeff > 0) {
lhs += coeff * integer_trail.LevelZeroLowerBound(var);
rhs += coeff * integer_trail.LevelZeroUpperBound(var);
} else {
lhs += coeff * integer_trail.LevelZeroUpperBound(var);
rhs += coeff * integer_trail.LevelZeroLowerBound(var);
}
};
add_term(r.b.var, head_expr.coeff.value() - b);
add_term(r.a.var, a - tail_expr.coeff.value());
// Transforms the relation into head_expr - tail_expr \in [lhs, rhs] by
// adding the offset values of the expressions.
lhs += head_expr.offset - tail_expr.offset;
rhs += head_expr.offset - tail_expr.offset;
ProcessNewArcRelation(arc_index, dimension,
{-1, 1, lhs.value(), rhs.value()});
const auto [lhs, rhs] =
GetDifferenceBounds(tail_expr, head_expr, r,
{integer_trail.LowerBound(tail_expr.var),
integer_trail.UpperBound(tail_expr.var)},
{integer_trail.LowerBound(head_expr.var),
integer_trail.UpperBound(head_expr.var)});
ProcessNewArcRelation(
arc_index, dimension,
{.tail_coeff = -1, .head_coeff = 1, .lhs = lhs, .rhs = rhs});
}
// Returns a relation between two given expressions which is equivalent to a
@@ -1607,6 +1556,73 @@ RoutingCumulExpressions DetectDimensionsAndCumulExpressions(
return result;
}
namespace {
// Returns a lower bound of y_expr - x_expr.
IntegerValue GetDifferenceLowerBound(
const NodeExpression& x_expr, const NodeExpression& y_expr,
const sat::Relation& r,
const std::pair<IntegerValue, IntegerValue>& x_var_bounds,
const std::pair<IntegerValue, IntegerValue>& y_var_bounds) {
// Let's note x_expr = A.x+α, y_expr = B.x+β, and r = "b.y-a.x in [lhs, rhs]".
// We have x in [lb(x), ub(x)] and y in [lb(y), ub(y)], and we want to find
// the lower bound of y_expr - x_expr = lb(B.x - A.y) + β - α. We have:
// B.y - A.x = [(B-k.b).y + k.b.y] - [(A-k.a).x + k.a.x]
// = (B+k.b).y + (k.a-A).x + k.(b.y - a.x)
// for any k. A lower bound on the first term is (B-k.b).lb(y) if B-k.b >= 0,
// and (B-k.b).ub(y) otherwise. This can be written as:
// (B-k.b).y >= max(0, B-k.b).lb(y) + min(0, B-k.b).ub(y)
// Similarly:
// (k.a-A).x >= max(0, k.a-A).lb(x) + min(0, k.a-A).ub(x)
// And:
// k.(b.y - a.x) >= max(0, k).lhs + min(0, k).rhs
// Hence we get:
// B.y - A.x >= max(0, B-k.b).lb(y) + min(0, B-k.b).ub(y) +
// max(0, k.a-A).lb(x) + min(0, k.a-A).ub(x) +
// max(0, k).lhs + min(0, k).rhs
// The derivative of this expression with respect to k is piecewise constant
// and can only change value around 0, A/a, and B/b. Hence we can compute an
// "interesting" lower bound by taking the maximum of this expression around
// these points, instead of over all k values.
// TODO(user): overflows could happen if the node expressions are
// provided by the user in the model proto.
auto lower_bound = [&](IntegerValue k) {
const IntegerValue y_coeff = y_expr.coeff - k * r.b.coeff;
const IntegerValue x_coeff = k * (-r.a.coeff) - x_expr.coeff;
return y_coeff * (y_coeff >= 0 ? y_var_bounds.first : y_var_bounds.second) +
x_coeff * (x_coeff >= 0 ? x_var_bounds.first : x_var_bounds.second) +
k * (k >= 0 ? r.lhs : r.rhs);
};
const IntegerValue k_x = MathUtil::FloorOfRatio(x_expr.coeff, -r.a.coeff);
const IntegerValue k_y = MathUtil::FloorOfRatio(y_expr.coeff, r.b.coeff);
IntegerValue result = lower_bound(0);
result = std::max(result, lower_bound(k_x));
result = std::max(result, lower_bound(k_x + 1));
result = std::max(result, lower_bound(k_y));
result = std::max(result, lower_bound(k_y + 1));
return result + y_expr.offset - x_expr.offset;
}
} // namespace
std::pair<IntegerValue, IntegerValue> GetDifferenceBounds(
const NodeExpression& x_expr, const NodeExpression& y_expr,
const sat::Relation& r,
const std::pair<IntegerValue, IntegerValue>& x_var_bounds,
const std::pair<IntegerValue, IntegerValue>& y_var_bounds) {
DCHECK_EQ(x_expr.var, r.a.var);
DCHECK_EQ(y_expr.var, r.b.var);
DCHECK_NE(x_expr.var, kNoIntegerVariable);
DCHECK_NE(y_expr.var, kNoIntegerVariable);
DCHECK_NE(x_expr.coeff, 0);
DCHECK_NE(y_expr.coeff, 0);
DCHECK_NE(r.a.coeff, 0);
DCHECK_NE(r.b.coeff, 0);
const IntegerValue lb =
GetDifferenceLowerBound(x_expr, y_expr, r, x_var_bounds, y_var_bounds);
const IntegerValue ub = -GetDifferenceLowerBound(
x_expr.Negated(), y_expr.Negated(), r, x_var_bounds, y_var_bounds);
return {lb, ub};
}
std::unique_ptr<RouteRelationsHelper> RouteRelationsHelper::Create(
int num_nodes, const std::vector<int>& tails, const std::vector<int>& heads,
const std::vector<Literal>& literals,
@@ -2508,19 +2524,24 @@ bool RoutingCutHelper::TrySubsetCut(int known_bound, std::string name,
// Note that we only look for violated cuts, but also in order to not try
// all nodes from the subset, we only look at large enough incoming/outgoing
// weight.
//
// TODO(user): We could easily look for an arc that cannot be used to enter
// or leave a subset rather than a node. This should lead to tighter bound,
// and more cuts (that would just ignore that arc rather than all the arcs
// entering/leaving from a node).
const double threshold = static_cast<double>(best_bound.bound()) - 1e-4;
for (const int n : subset) {
if (nodes_incoming_weight_[n] > 0.1 &&
total_incoming_weight - nodes_incoming_weight_[n] < threshold &&
!min_outgoing_flow_helper_.SubsetMightBeServedWithKRoutes(
best_bound.bound(), subset, nullptr, /*special_node=*/n,
best_bound.bound(), subset, nullptr, manager, /*special_node=*/n,
/*use_outgoing=*/true)) {
best_bound.Update(best_bound.bound(), "", {n}, {});
}
if (nodes_outgoing_weight_[n] > 0.1 &&
total_outgoing_weight - nodes_outgoing_weight_[n] < threshold &&
!min_outgoing_flow_helper_.SubsetMightBeServedWithKRoutes(
best_bound.bound(), subset, nullptr, /*special_node=*/n,
best_bound.bound(), subset, nullptr, manager, /*special_node=*/n,
/*use_outgoing=*/false)) {
best_bound.Update(best_bound.bound(), "", {}, {n});
}
@@ -2722,6 +2743,8 @@ void RoutingCutHelper::TryInfeasiblePathCuts(LinearConstraintManager* manager) {
top_n_cuts.TransferToManager(manager);
}
// Note that it makes little sense to use reduced cost here as the LP solution
// should likely satisfy the current LP optimality equation.
void RoutingCutHelper::GenerateCutsForInfeasiblePaths(
int start_node, int max_path_length,
const CompactVectorVector<int, std::pair<int, double>>&
@@ -2734,14 +2757,17 @@ void RoutingCutHelper::GenerateCutsForInfeasiblePaths(
struct State {
// The last node of the path.
int last_node;
// The sum of the lp values of the path's arcs.
double sum_of_lp_values;
double sum_of_lp_values = 0.0;
// Variable lower bounds that can be inferred by assuming that the path's
// arc literals are true (with the binary relation repository).
absl::flat_hash_map<IntegerVariable, IntegerValue> bounds;
// The index of the next outgoing arc of `last_node` to explore (in
// outgoing_arcs_with_lp_values[last_node]).
int iteration;
int iteration = 0;
};
std::stack<State> states;
// Whether each node is on the current path. The current path is the one
@@ -2751,7 +2777,7 @@ void RoutingCutHelper::GenerateCutsForInfeasiblePaths(
std::vector<Literal> path_literals;
path_nodes[start_node] = true;
states.push({start_node, /*sum_of_lp_values=*/0.0, /*bounds=*/{}, 0});
states.push({start_node});
while (!states.empty()) {
State& state = states.top();
DCHECK_EQ(states.size(), path_literals.size() + 1);

View File

@@ -56,6 +56,53 @@ RoutingCumulExpressions DetectDimensionsAndCumulExpressions(
absl::Span<const Literal> literals,
const BinaryRelationRepository& binary_relation_repository);
// A coeff * var + offset affine expression, where `var` is always a positive
// reference (contrary to AffineExpression, where the coefficient is always
// positive).
struct NodeExpression {
IntegerVariable var;
IntegerValue coeff;
IntegerValue offset;
NodeExpression() : var(kNoIntegerVariable), coeff(0), offset(0) {}
NodeExpression(IntegerVariable var, IntegerValue coeff, IntegerValue offset)
: var(var), coeff(coeff), offset(offset) {}
explicit NodeExpression(const AffineExpression& expr) {
if (expr.var == kNoIntegerVariable || VariableIsPositive(expr.var)) {
var = expr.var;
coeff = expr.coeff;
} else {
var = PositiveVariable(expr.var);
coeff = -expr.coeff;
}
offset = expr.constant;
}
bool IsEmpty() const { return var == kNoIntegerVariable; }
IntegerValue ValueAt(IntegerValue x) const { return coeff * x + offset; }
NodeExpression Negated() const {
return NodeExpression(var, -coeff, -offset);
}
};
// Returns some bounds on y_expr - x_expr, based on the given relation and the
// given variable bounds. r.a (resp. r.b) must have the same variable as x_expr
// (resp. y_expr), which must not be kNoIntegerVariable. Moreover, the
// coefficients of these variables in x_expr, y_expr and r must not be zero.
//
// The returned bounds are generally not the best possible ones (computing them
// is equivalent to solving a MIP -- min(y_expr - x_expr) subject to r, x.var in
// x_var_bounds and y.var in y_var_bounds).
std::pair<IntegerValue, IntegerValue> GetDifferenceBounds(
const NodeExpression& x_expr, const NodeExpression& y_expr,
const sat::Relation& r,
const std::pair<IntegerValue, IntegerValue>& x_var_bounds,
const std::pair<IntegerValue, IntegerValue>& y_var_bounds);
// Helper to store the result of DetectDimensionsAndCumulExpressions() and also
// recover and store bounds on (node_expr[head] - node_expr[tail]) for each arc
// (tail -> head) assuming the arc is taken.
@@ -410,10 +457,11 @@ class MinOutgoingFlowHelper {
//
// TODO(user): the complexity also depends on the longest route and improves
// if routes fail quickly. Give a better estimate?
bool SubsetMightBeServedWithKRoutes(int k, absl::Span<const int> subset,
RouteRelationsHelper* helper = nullptr,
int special_node = -1,
bool use_outgoing = true);
bool SubsetMightBeServedWithKRoutes(
int k, absl::Span<const int> subset,
RouteRelationsHelper* helper = nullptr,
LinearConstraintManager* manager = nullptr, int special_node = -1,
bool use_outgoing = true);
// Advanced. If non-empty, and one of the functions above proved that a subset
// needs at least k vehicles to serve it, then these vector list the nodes
@@ -558,6 +606,7 @@ class MinOutgoingFlowHelper {
int64_t num_full_dp_calls_ = 0;
int64_t num_full_dp_early_abort_ = 0;
int64_t num_full_dp_work_abort_ = 0;
int64_t num_full_dp_rc_skip_ = 0;
absl::flat_hash_map<std::string, int> num_by_type_;
};

View File

@@ -56,6 +56,65 @@ using ::testing::Pair;
using ::testing::UnorderedElementsAre;
using HeadMinusTailBounds = RouteRelationsHelper::HeadMinusTailBounds;
std::pair<IntegerValue, IntegerValue> ExactDifferenceBounds(
const NodeExpression& x_expr, const NodeExpression& y_expr,
const sat::Relation& r,
const std::pair<IntegerValue, IntegerValue>& x_bounds,
const std::pair<IntegerValue, IntegerValue>& y_bounds) {
IntegerValue lb = kMaxIntegerValue;
IntegerValue ub = kMinIntegerValue;
for (IntegerValue x = x_bounds.first; x <= x_bounds.second; ++x) {
for (IntegerValue y = y_bounds.first; y <= y_bounds.second; ++y) {
const IntegerValue r_value = x * r.a.coeff + y * r.b.coeff;
if (r_value < r.lhs || r_value > r.rhs) continue;
const IntegerValue difference = y_expr.ValueAt(y) - x_expr.ValueAt(x);
lb = std::min(lb, difference);
ub = std::max(ub, difference);
}
}
return {lb, ub};
}
TEST(GetDifferenceBounds, RandomTest) {
absl::BitGen random;
const IntegerVariable x(0);
const IntegerVariable y(2);
const Literal lit(BooleanVariable(0), true);
const auto x_bounds = std::make_pair(IntegerValue(-5), IntegerValue(13));
const auto y_bounds = std::make_pair(IntegerValue(-7), IntegerValue(17));
for (int i = 0; i < 10000; ++i) {
const IntegerValue A(absl::Uniform<int64_t>(random, -10, 10));
const IntegerValue B(absl::Uniform<int64_t>(random, -10, 10));
const IntegerValue a(absl::Uniform<int64_t>(random, -10, 10));
const IntegerValue b(absl::Uniform<int64_t>(random, -10, 10));
if (a == 0 || b == 0 || A == 0 || B == 0) continue;
IntegerValue lhs = a * (a >= 0 ? x_bounds.first : x_bounds.second) +
b * (b >= 0 ? y_bounds.first : y_bounds.second);
IntegerValue rhs = a * (a >= 0 ? x_bounds.second : x_bounds.first) +
b * (b >= 0 ? y_bounds.second : y_bounds.first);
IntegerValue middle = (lhs + rhs) / 2;
lhs = IntegerValue(
absl::Uniform<int64_t>(random, lhs.value(), middle.value()));
rhs = IntegerValue(
absl::Uniform<int64_t>(random, middle.value(), rhs.value()));
const NodeExpression x_expr(x, A, absl::Uniform<int64_t>(random, -5, 5));
const NodeExpression y_expr(y, B, absl::Uniform<int64_t>(random, -5, 5));
const Relation r{
.enforcement = lit,
.a = LinearTerm(x, a),
.b = LinearTerm(y, b),
.lhs = lhs,
.rhs = rhs,
};
const auto [lb, ub] =
GetDifferenceBounds(x_expr, y_expr, r, x_bounds, y_bounds);
const auto [exact_lb, exact_ub] =
ExactDifferenceBounds(x_expr, y_expr, r, x_bounds, y_bounds);
EXPECT_LE(lb, exact_lb);
ASSERT_GE(ub, exact_ub);
}
}
TEST(MinOutgoingFlowHelperTest, TwoNodesWithoutConstraints) {
Model model;
const std::vector<int> tails = {0, 1};
@@ -1128,6 +1187,7 @@ TEST(MinOutgoingFlowHelperTest,
EXPECT_EQ(SolveTimeWindowProblemStartingFrom(i, optimal, tails, heads,
travel_times, time_windows),
helper.SubsetMightBeServedWithKRoutes(optimal, subset, nullptr,
nullptr,
/*special_node=*/i));
}
}
@@ -2317,6 +2377,9 @@ TEST(CreateCVRPCutGeneratorTest, InfeasiblePathCuts) {
1.0, 0.9, 0.1, 0.9, 0.1, 1.0};
Model model;
model.GetOrCreate<SatParameters>()
->set_routing_cut_subset_size_for_exact_binary_relation_bound(0);
std::vector<Literal> literals;
auto& lp_values = *model.GetOrCreate<ModelLpValues>();
lp_values.resize(32, 0.0);

View File

@@ -42,10 +42,10 @@ void SolveInterrupter::Interrupt() {
// of if this function has called it.
interrupted_ = true;
// We are holding the lock while calling callbacks. This make it impossible to
// call Interrupt(), AddInterruptionCallback(), or
// We are holding the lock while calling callbacks. This makes it impossible
// to call Interrupt(), AddInterruptionCallback(), or
// RemoveInterruptionCallback() from a callback but it ensures that external
// code that can modify callbacks_ will wait the end of Interrupt.
// code that can modify callbacks_ will wait until the end of Interrupt.
for (const auto& [callback_id, callback] : callbacks_) {
callback();
}