OR-Tools  9.1
sat/util.h
Go to the documentation of this file.
1 // Copyright 2010-2021 Google LLC
2 // Licensed under the Apache License, Version 2.0 (the "License");
3 // you may not use this file except in compliance with the License.
4 // You may obtain a copy of the License at
5 //
6 // http://www.apache.org/licenses/LICENSE-2.0
7 //
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13 
14 #ifndef OR_TOOLS_SAT_UTIL_H_
15 #define OR_TOOLS_SAT_UTIL_H_
16 
17 #include <cstdint>
18 #include <deque>
19 
20 #include "absl/random/bit_gen_ref.h"
21 #include "absl/random/random.h"
22 #include "ortools/sat/model.h"
23 #include "ortools/sat/sat_base.h"
27 
28 #if !defined(__PORTABLE_PLATFORM__)
29 #include "google/protobuf/descriptor.h"
30 #endif // __PORTABLE_PLATFORM__
31 
32 namespace operations_research {
33 namespace sat {
34 
35 // The model "singleton" random engine used in the solver.
36 //
37 // In test, we usually set use_absl_random() so that the sequence is changed at
38 // each invocation. This way, clients do not relly on the wrong assumption that
39 // a particular optimal solution will be returned if they are many equivalent
40 // ones.
41 class ModelRandomGenerator : public absl::BitGenRef {
42  public:
43  // We seed the strategy at creation only. This should be enough for our use
44  // case since the SatParameters is set first before the solver is created. We
45  // also never really need to change the seed afterwards, it is just used to
46  // diversify solves with identical parameters on different Model objects.
48  : absl::BitGenRef(deterministic_random_) {
49  const auto& params = *model->GetOrCreate<SatParameters>();
50  deterministic_random_.seed(params.random_seed());
51  if (params.use_absl_random()) {
52  absl_random_ = absl::BitGen(absl::SeedSeq({params.random_seed()}));
53  absl::BitGenRef::operator=(absl::BitGenRef(absl_random_));
54  }
55  }
56 
57  // This is just used to display ABSL_RANDOM_SALT_OVERRIDE in the log so that
58  // it is possible to reproduce a failure more easily while looking at a solver
59  // log.
60  //
61  // TODO(user): I didn't find a cleaner way to log this.
62  void LogSalt() const {}
63 
64  private:
65  random_engine_t deterministic_random_;
66  absl::BitGen absl_random_;
67 };
68 
69 // The model "singleton" shared time limit.
71  public:
73  : SharedTimeLimit(model->GetOrCreate<TimeLimit>()) {}
74 };
75 
76 // Randomizes the decision heuristic of the given SatParameters.
77 template <typename URBG>
78 void RandomizeDecisionHeuristic(URBG* random, SatParameters* parameters);
79 
80 // Context: this function is not really generic, but required to be unit-tested.
81 // It is used in a clause minimization algorithm when we try to detect if any of
82 // the clause literals can be propagated by a subset of the other literal being
83 // false. For that, we want to enqueue in the solver all the subset of size n-1.
84 //
85 // This moves one of the unprocessed literal from literals to the last position.
86 // The function tries to do that while preserving the longest possible prefix of
87 // literals "amortized" through the calls assuming that we want to move each
88 // literal to the last position once.
89 //
90 // For a vector of size n, if we want to call this n times so that each literal
91 // is last at least once, the sum of the size of the changed suffixes will be
92 // O(n log n). If we were to use a simpler algorithm (like moving the last
93 // unprocessed literal to the last position), this sum would be O(n^2).
94 //
95 // Returns the size of the common prefix of literals before and after the move,
96 // or -1 if all the literals are already processed. The argument
97 // relevant_prefix_size is used as a hint when keeping more that this prefix
98 // size do not matter. The returned value will always be lower or equal to
99 // relevant_prefix_size.
100 int MoveOneUnprocessedLiteralLast(const std::set<LiteralIndex>& processed,
101  int relevant_prefix_size,
102  std::vector<Literal>* literals);
103 
104 // ============================================================================
105 // Implementation.
106 // ============================================================================
107 
108 template <typename URBG>
109 inline void RandomizeDecisionHeuristic(URBG* random,
111 #if !defined(__PORTABLE_PLATFORM__)
112  // Random preferred variable order.
113  const google::protobuf::EnumDescriptor* order_d =
116  static_cast<SatParameters::VariableOrder>(
117  order_d->value(absl::Uniform(*random, 0, order_d->value_count()))
118  ->number()));
119 
120  // Random polarity initial value.
121  const google::protobuf::EnumDescriptor* polarity_d =
123  parameters->set_initial_polarity(static_cast<SatParameters::Polarity>(
124  polarity_d->value(absl::Uniform(*random, 0, polarity_d->value_count()))
125  ->number()));
126 #endif // __PORTABLE_PLATFORM__
127  // Other random parameters.
128  parameters->set_use_phase_saving(absl::Bernoulli(*random, 0.5));
129  parameters->set_random_polarity_ratio(absl::Bernoulli(*random, 0.5) ? 0.01
130  : 0.0);
131  parameters->set_random_branches_ratio(absl::Bernoulli(*random, 0.5) ? 0.01
132  : 0.0);
133 }
134 
135 // Manages incremental averages.
137  public:
138  // Initializes the average with 'initial_average' and number of records to 0.
139  explicit IncrementalAverage(double initial_average)
140  : average_(initial_average) {}
142 
143  // Sets the number of records to 0 and average to 'reset_value'.
144  void Reset(double reset_value);
145 
146  double CurrentAverage() const { return average_; }
147  int64_t NumRecords() const { return num_records_; }
148 
149  void AddData(double new_record);
150 
151  private:
152  double average_ = 0.0;
153  int64_t num_records_ = 0;
154 };
155 
156 // Manages exponential moving averages defined as
157 // new_average = decaying_factor * old_average
158 // + (1 - decaying_factor) * new_record.
159 // where 0 < decaying_factor < 1.
161  public:
162  explicit ExponentialMovingAverage(double decaying_factor)
163  : decaying_factor_(decaying_factor) {
164  DCHECK_GE(decaying_factor, 0.0);
165  DCHECK_LE(decaying_factor, 1.0);
166  }
167 
168  // Returns exponential moving average for all the added data so far.
169  double CurrentAverage() const { return average_; }
170 
171  // Returns the total number of added records so far.
172  int64_t NumRecords() const { return num_records_; }
173 
174  void AddData(double new_record);
175 
176  private:
177  double average_ = 0.0;
178  int64_t num_records_ = 0;
179  const double decaying_factor_;
180 };
181 
182 // Utility to calculate percentile (First variant) for limited number of
183 // records. Reference: https://en.wikipedia.org/wiki/Percentile
184 //
185 // After the vector is sorted, we assume that the element with index i
186 // correspond to the percentile 100*(i+0.5)/size. For percentiles before the
187 // first element (resp. after the last one) we return the first element (resp.
188 // the last). And otherwise we do a linear interpolation between the two element
189 // around the asked percentile.
190 class Percentile {
191  public:
192  explicit Percentile(int record_limit) : record_limit_(record_limit) {}
193 
194  void AddRecord(double record);
195 
196  // Returns number of stored records.
197  int64_t NumRecords() const { return records_.size(); }
198 
199  // Note that this is not fast and runs in O(n log n) for n records.
200  double GetPercentile(double percent);
201 
202  private:
203  std::deque<double> records_;
204  const int record_limit_;
205 };
206 
207 // This method tries to compress a list of tuples by merging complementary
208 // tuples, that is a set of tuples that only differ on one variable, and that
209 // cover the domain of the variable. In that case, it will keep only one tuple,
210 // and replace the value for variable by any_value, the equivalent of '*' in
211 // regexps.
212 //
213 // This method is exposed for testing purposes.
214 void CompressTuples(absl::Span<const int64_t> domain_sizes, int64_t any_value,
215  std::vector<std::vector<int64_t>>* tuples);
216 
217 } // namespace sat
218 } // namespace operations_research
219 
220 #endif // OR_TOOLS_SAT_UTIL_H_
ExponentialMovingAverage(double decaying_factor)
Definition: sat/util.h:162
A simple class to enforce both an elapsed time limit and a deterministic time limit in the same threa...
Definition: time_limit.h:105
void RandomizeDecisionHeuristic(URBG *random, SatParameters *parameters)
Definition: sat/util.h:109
Class that owns everything related to a particular optimization model.
Definition: sat/model.h:38
void AddRecord(double record)
Definition: sat/util.cc:81
GRBmodel * model
Definition: cleanup.h:22
std::mt19937 random_engine_t
Definition: random_engine.h:23
#define DCHECK_GE(val1, val2)
Definition: base/logging.h:890
void set_initial_polarity(::operations_research::sat::SatParameters_Polarity value)
static const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor * Polarity_descriptor()
#define DCHECK_LE(val1, val2)
Definition: base/logging.h:888
static const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor * VariableOrder_descriptor()
Collection of objects used to extend the Constraint Solver library.
SatParameters parameters
IncrementalAverage(double initial_average)
Definition: sat/util.h:139
void CompressTuples(absl::Span< const int64_t > domain_sizes, int64_t any_value, std::vector< std::vector< int64_t >> *tuples)
Definition: sat/util.cc:113
int MoveOneUnprocessedLiteralLast(const std::set< LiteralIndex > &processed, int relevant_prefix_size, std::vector< Literal > *literals)
Definition: sat/util.cc:25
double GetPercentile(double percent)
Definition: sat/util.cc:88
void set_preferred_variable_order(::operations_research::sat::SatParameters_VariableOrder value)