OR-Tools  9.3
encoding.h
Go to the documentation of this file.
1// Copyright 2010-2021 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14// Algorithms to encode constraints into their SAT representation. Currently,
15// this contains one possible encoding of a cardinality constraint as used by
16// the core-based optimization algorithm in optimization.h.
17//
18// This is also known as the incremental totalizer encoding in the literature.
19
20#ifndef OR_TOOLS_SAT_ENCODING_H_
21#define OR_TOOLS_SAT_ENCODING_H_
22
23#include <cstdint>
24#include <deque>
25#include <string>
26#include <vector>
27
30#include "ortools/base/macros.h"
31#include "ortools/sat/boolean_problem.pb.h"
36
37namespace operations_research {
38namespace sat {
39
40// This class represents a number in [0, ub]. The encoding uses ub binary
41// variables x_i with i in [0, ub) where x_i means that the number is > i. It is
42// called an EncodingNode, because it represents one node of the tree used to
43// encode a cardinality constraint.
44//
45// In practice, not all literals are explicitly created:
46// - Only the literals in [lb, current_ub) are "active" at a given time.
47// - The represented number is known to be >= lb.
48// - It may be greater than current_ub, but the extra literals will be only
49// created lazily. In all our solves, the literal current_ub - 1, will always
50// be assumed to false (i.e. the number will be <= current_ub - 1).
51// - Note that lb may increase and ub decrease as more information is learned
52// about this node by the sat solver.
53//
54// This is roughly based on the cardinality constraint encoding described in:
55// Bailleux and Yacine Boufkhad, "Efficient CNF Encoding of Boolean Cardinality
56// Constraints", In Proc. of CP 2003, pages 108-122, 2003.
58 public:
60
61 // Constructs a EncodingNode of size one, just formed by the given literal.
62 explicit EncodingNode(Literal l);
63
64 // Creates a "full" encoding node on n new variables, the represented number
65 // beeing in [lb, ub = lb + n). The variables are added to the given solver
66 // with the basic implications linking them:
67 // literal(0) >= ... >= literal(n-1)
69 SatSolver* solver);
70
71 // Creates a "lazy" encoding node representing the sum of a and b.
72 // Only one literals will be created by this operation. Note that no clauses
73 // linking it with a or b are added by this function.
77
78 // Returns a literal with the meaning 'this node number is > i'.
79 // The given i must be in [lb_, current_ub).
80 Literal GreaterThan(int i) const { return literal(i - lb_); }
81
82 // Accessors to size() and literals in [lb, current_ub).
83 int size() const { return literals_.size(); }
84 Literal literal(int i) const {
85 CHECK_GE(i, 0);
86 CHECK_LT(i, literals_.size());
87 return literals_[i];
88 }
89
90 // Sort by decreasing depth first and then by increasing variable index.
91 // This is meant to be used by the priority queue in MergeAllNodesWithPQ().
92 bool operator<(const EncodingNode& other) const {
93 return depth_ > other.depth_ ||
94 (depth_ == other.depth_ && other.for_sorting_ > for_sorting_);
95 }
96
97 // Creates a new literals and increases current_ub.
98 // Returns false if we were already at the upper bound for this node.
99 bool IncreaseCurrentUB(SatSolver* solver);
100
101 // Removes the left-side literals fixed to 1. Note that this increases lb_ and
102 // reduces the number of active literals. It also removes any right-side
103 // literals fixed to 0. If such a literal exists, ub is updated accordingly.
104 //
105 // Return the overall weight increase.
106 Coefficient Reduce(const SatSolver& solver);
107
108 // GetAssumption() might need to create new literals.
110 bool HasNoWeight() const;
111 void IncreaseWeightLb();
112
113 // Fix any literal that would cause the weight of this node to go over the
114 // gap.
116
118 weight_lb_ = lb_;
119 weight_ = w;
120 }
121 Coefficient weight() const { return weight_; }
122
123 // The depth is mainly used as an heuristic to decide which nodes to merge
124 // first. See the < operator.
125 void set_depth(int depth) { depth_ = depth; }
126 int depth() const { return depth_; }
127
128 int lb() const { return lb_; }
129 int current_ub() const { return lb_ + literals_.size(); }
130 int ub() const { return ub_; }
131 EncodingNode* child_a() const { return child_a_; }
132 EncodingNode* child_b() const { return child_b_; }
133
134 // We use the solver to display the current values of the literals.
135 std::string DebugString(const VariablesAssignment& assignment) const;
136
137 private:
138 int depth_ = 0;
139 int lb_ = 0;
140 int ub_ = 1;
141 BooleanVariable for_sorting_;
142
143 // The weight is only applies for literal >= this lb.
144 int weight_lb_ = 0;
145
146 Coefficient weight_;
147 EncodingNode* child_a_;
148 EncodingNode* child_b_;
149
150 // The literals of this node in order.
151 std::vector<Literal> literals_;
152};
153
154// Merges the two given EncodingNodes by creating a new node that corresponds to
155// the sum of the two given ones. Only the left-most binary variable is created
156// for the parent node, the other ones will be created later when needed.
157EncodingNode LazyMerge(EncodingNode* a, EncodingNode* b, SatSolver* solver);
158
159// Increases the size of the given node by one. To keep all the needed relations
160// with its children, we also need to increase their size by one, and so on
161// recursively. Also adds all the necessary clauses linking the newly added
162// literals.
163void IncreaseNodeSize(EncodingNode* node, SatSolver* solver);
164
165// Merges the two given EncodingNode by creating a new node that corresponds to
166// the sum of the two given ones. The given upper_bound is interpreted as a
167// bound on this sum, and allows creating fewer binary variables.
168EncodingNode FullMerge(Coefficient upper_bound, EncodingNode* a,
169 EncodingNode* b, SatSolver* solver);
170
171// Merges all the given nodes two by two until there is only one left. Returns
172// the final node which encodes the sum of all the given nodes.
174 const std::vector<EncodingNode*>& nodes,
175 SatSolver* solver,
176 std::deque<EncodingNode>* repository);
177
178// Same as MergeAllNodesWithDeque() but use a priority queue to merge in
179// priority nodes with smaller sizes. This also enforce that the sum of nodes
180// is greater than its lower bound.
182 Coefficient weight, const std::vector<EncodingNode*>& nodes,
183 SatSolver* solver, std::deque<EncodingNode>* repository);
184
185// Returns a vector with one new EncodingNode by variable in the given
186// objective. Sets the offset to the negated sum of the negative coefficient,
187// because in this case we negate the literals to have only positive
188// coefficients.
189std::vector<EncodingNode*> CreateInitialEncodingNodes(
190 const std::vector<Literal>& literals,
191 const std::vector<Coefficient>& coeffs, Coefficient* offset,
192 std::deque<EncodingNode>* repository);
193std::vector<EncodingNode*> CreateInitialEncodingNodes(
194 const LinearObjective& objective_proto, Coefficient* offset,
195 std::deque<EncodingNode>* repository);
196
197// Reduces the nodes using the now fixed literals, update the lower-bound, and
198// returns the set of assumptions for the next round of the core-based
199// algorithm. Returns an empty set of assumptions if everything is fixed.
200std::vector<Literal> ReduceNodesAndExtractAssumptions(
201 Coefficient upper_bound, Coefficient stratified_lower_bound,
202 Coefficient* lower_bound, std::vector<EncodingNode*>* nodes,
203 SatSolver* solver);
204
205// Returns the minimum weight of the nodes in the core. Note that the literal in
206// the core must appear in the same order as the one in nodes.
207Coefficient ComputeCoreMinWeight(const std::vector<EncodingNode*>& nodes,
208 const std::vector<Literal>& core);
209
210// Returns the maximum node weight under the given upper_bound. Returns zero if
211// no such weight exist (note that a node weight is strictly positive, so this
212// make sense).
213Coefficient MaxNodeWeightSmallerThan(const std::vector<EncodingNode*>& nodes,
215
216// Updates the encoding using the given core. The literals in the core must
217// match the order in nodes. Returns false if the model become infeasible.
218bool ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
219 std::deque<EncodingNode>* repository,
220 std::vector<EncodingNode*>* nodes, SatSolver* solver);
221
222// There is more than one way to create new assumptions and encode the
223// information from this core. This is slightly different from ProcessCore() and
224// follow the algorithm used by many of the top max-SAT solver under the name
225// incremental OLL. This is described in:
226// António Morgado, Carmine Dodaro, Joao Marques-Silva. "Core-Guided MaxSAT
227// with Soft Cardinality Constraints". CP 2014. pp. 564-573.
228// António Morgado, Alexey Ignatiev, Joao Marques-Silva. "MSCG: Robust
229// Core-Guided MaxSAT Solving." JSAT 9. 2014. pp. 129-134.
230//
231// TODO(user): The last time this was tested, it was however not as good as the
232// ProcessCore() version. That might change as we code/change more heuristic, so
233// we keep it around.
234bool ProcessCoreWithAlternativeEncoding(const std::vector<Literal>& core,
235 Coefficient min_weight,
236 std::deque<EncodingNode>* repository,
237 std::vector<EncodingNode*>* nodes,
238 SatSolver* solver);
239
240} // namespace sat
241} // namespace operations_research
242
243#endif // OR_TOOLS_SAT_ENCODING_H_
#define CHECK_LT(val1, val2)
Definition: base/logging.h:706
#define CHECK_GE(val1, val2)
Definition: base/logging.h:707
void InitializeLazyCoreNode(Coefficient weight, EncodingNode *a, EncodingNode *b)
Definition: encoding.cc:75
Literal GetAssumption(SatSolver *solver)
Definition: encoding.cc:134
bool IncreaseCurrentUB(SatSolver *solver)
Definition: encoding.cc:90
void ApplyWeightUpperBound(Coefficient gap, SatSolver *solver)
Definition: encoding.cc:121
Coefficient Reduce(const SatSolver &solver)
Definition: encoding.cc:101
EncodingNode * child_b() const
Definition: encoding.h:132
void InitializeLazyNode(EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition: encoding.cc:59
void InitializeFullNode(int n, EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition: encoding.cc:39
Literal literal(int i) const
Definition: encoding.h:84
std::string DebugString(const VariablesAssignment &assignment) const
Definition: encoding.cc:154
EncodingNode * child_a() const
Definition: encoding.h:131
Literal GreaterThan(int i) const
Definition: encoding.h:80
bool operator<(const EncodingNode &other) const
Definition: encoding.h:92
int64_t b
int64_t a
double upper_bound
double lower_bound
std::tuple< int64_t, int64_t, const double > Coefficient
Coefficient ComputeCoreMinWeight(const std::vector< EncodingNode * > &nodes, const std::vector< Literal > &core)
Definition: encoding.cc:501
EncodingNode * MergeAllNodesWithDeque(Coefficient upper_bound, const std::vector< EncodingNode * > &nodes, SatSolver *solver, std::deque< EncodingNode > *repository)
Definition: encoding.cc:335
EncodingNode * LazyMergeAllNodeWithPQAndIncreaseLb(Coefficient weight, const std::vector< EncodingNode * > &nodes, SatSolver *solver, std::deque< EncodingNode > *repository)
Definition: encoding.cc:357
std::vector< Literal > ReduceNodesAndExtractAssumptions(Coefficient upper_bound, Coefficient stratified_lower_bound, Coefficient *lower_bound, std::vector< EncodingNode * > *nodes, SatSolver *solver)
Definition: encoding.cc:447
void IncreaseNodeSize(EncodingNode *node, SatSolver *solver)
Definition: encoding.cc:189
EncodingNode LazyMerge(EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition: encoding.cc:179
EncodingNode FullMerge(Coefficient upper_bound, EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition: encoding.cc:284
bool ProcessCore(const std::vector< Literal > &core, Coefficient min_weight, std::deque< EncodingNode > *repository, std::vector< EncodingNode * > *nodes, SatSolver *solver)
Definition: encoding.cc:528
Coefficient MaxNodeWeightSmallerThan(const std::vector< EncodingNode * > &nodes, Coefficient upper_bound)
Definition: encoding.cc:516
bool ProcessCoreWithAlternativeEncoding(const std::vector< Literal > &core, Coefficient min_weight, std::deque< EncodingNode > *repository, std::vector< EncodingNode * > *nodes, SatSolver *solver)
Definition: encoding.cc:576
std::vector< EncodingNode * > CreateInitialEncodingNodes(const std::vector< Literal > &literals, const std::vector< Coefficient > &coeffs, Coefficient *offset, std::deque< EncodingNode > *repository)
Definition: encoding.cc:385
Collection of objects used to extend the Constraint Solver library.
int64_t weight
Definition: pack.cc:510
int nodes