[CP-SAT] Improve assumptions algorithm; tweak lb_tree_search

This commit is contained in:
Laurent Perron
2022-02-17 16:20:21 +01:00
parent 9594f58c1e
commit 29662f2006
7 changed files with 280 additions and 114 deletions

View File

@@ -76,12 +76,13 @@ void EncodingNode::InitializeLazyNode(EncodingNode* a, EncodingNode* b,
}
bool EncodingNode::IncreaseCurrentUB(SatSolver* solver) {
CHECK(!literals_.empty());
if (current_ub() == ub_) return false;
literals_.emplace_back(BooleanVariable(solver->NumVariables()), true);
solver->SetNumVariables(solver->NumVariables() + 1);
solver->AddBinaryClause(literals_.back().Negated(),
literals_[literals_.size() - 2]);
if (literals_.size() > 1) {
solver->AddBinaryClause(literals_.back().Negated(),
literals_[literals_.size() - 2]);
}
return true;
}
@@ -110,6 +111,29 @@ void EncodingNode::ApplyUpperBound(int64_t upper_bound, SatSolver* solver) {
ub_ = lb_ + literals_.size();
}
std::string EncodingNode::DebugString(
const VariablesAssignment& assignment) const {
std::string result;
absl::StrAppend(&result, "depth:", depth_);
absl::StrAppend(&result, " [", lb_, ",", lb_ + literals_.size(), "]");
absl::StrAppend(&result, " ub:", ub_);
absl::StrAppend(&result, " values:");
const size_t limit = 20;
int value = 0;
for (int i = 0; i < std::min(literals_.size(), limit); ++i) {
char c = '?';
if (assignment.LiteralIsTrue(literals_[i])) {
c = '1';
value = i + 1;
} else if (assignment.LiteralIsFalse(literals_[i])) {
c = '0';
}
result += c;
}
absl::StrAppend(&result, " val:", lb_ + value);
return result;
}
EncodingNode LazyMerge(EncodingNode* a, EncodingNode* b, SatSolver* solver) {
EncodingNode n;
n.InitializeLazyNode(a, b, solver);
@@ -142,7 +166,6 @@ void IncreaseNodeSize(EncodingNode* node, SatSolver* solver) {
// n->GreaterThan(target) is the new literal of n.
CHECK(a != nullptr);
CHECK(b != nullptr);
CHECK_GE(n->size(), 2);
const int target = n->current_ub() - 1;
// Add a literal to a if needed.
@@ -380,6 +403,13 @@ std::vector<Literal> ReduceNodesAndExtractAssumptions(
solver->Backtrack(0);
for (EncodingNode* n : *nodes) {
*lower_bound += n->Reduce(*solver) * n->weight();
// Tricky: When we solve a partial set of assumptions, some unused nodes
// might have literals that get fixed to one. If that happens we do need
// to create new literals so we can fix these nodes to their lower bound.
if (n->size() == 0 && n->lb() < n->ub()) {
IncreaseNodeSize(n, solver);
}
}
// Fix the nodes right-most variables that are above the gap.
@@ -450,20 +480,21 @@ Coefficient MaxNodeWeightSmallerThan(const std::vector<EncodingNode*>& nodes,
return result;
}
void ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
bool ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
std::deque<EncodingNode>* repository,
std::vector<EncodingNode*>* nodes, SatSolver* solver) {
// Backtrack to be able to add new constraints.
solver->Backtrack(0);
solver->ResetToLevelZero();
if (core.size() == 1) {
if (!solver->AddUnitClause(core[0].Negated())) return false;
// The core will be reduced at the beginning of the next loop.
// Find the associated node, and call IncreaseNodeSize() on it.
CHECK(solver->Assignment().LiteralIsFalse(core[0]));
for (EncodingNode* n : *nodes) {
if (n->literal(0).Negated() == core[0]) {
IncreaseNodeSize(n, solver);
return;
return true;
}
}
LOG(FATAL) << "Node with literal " << core[0] << " not found!";
@@ -506,7 +537,7 @@ void ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
nodes->push_back(LazyMergeAllNodeWithPQ(to_merge, solver, repository));
IncreaseNodeSize(nodes->back(), solver);
nodes->back()->set_weight(min_weight);
CHECK(solver->AddUnitClause(nodes->back()->literal(0)));
return solver->AddUnitClause(nodes->back()->literal(0));
}
} // namespace sat

View File

@@ -20,6 +20,7 @@
#include <cstdint>
#include <deque>
#include <string>
#include <vector>
#include "ortools/base/integral_types.h"
@@ -113,6 +114,9 @@ class EncodingNode {
EncodingNode* child_a() const { return child_a_; }
EncodingNode* child_b() const { return child_b_; }
// We use the solver to display the current values of the literals.
std::string DebugString(const VariablesAssignment& assignment) const;
private:
int depth_;
int lb_;
@@ -203,8 +207,8 @@ Coefficient MaxNodeWeightSmallerThan(const std::vector<EncodingNode*>& nodes,
Coefficient upper_bound);
// Updates the encoding using the given core. The literals in the core must
// match the order in nodes.
void ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
// match the order in nodes. Returns false if the model become infeasible.
bool ProcessCore(const std::vector<Literal>& core, Coefficient min_weight,
std::deque<EncodingNode>* repository,
std::vector<EncodingNode*>* nodes, SatSolver* solver);

View File

@@ -888,9 +888,12 @@ bool IntegerSearchHelper::TakeDecision(Literal decision) {
}
const int old_level = sat_solver_->CurrentDecisionLevel();
// Note that kUnsatTrailIndex might also mean ASSUMPTIONS_UNSAT.
//
// TODO(user): on some problems, this function can be quite long. Expand
// so that we can check the time limit at each step?
sat_solver_->EnqueueDecisionAndBackjumpOnConflict(decision);
const int index = sat_solver_->EnqueueDecisionAndBackjumpOnConflict(decision);
if (index == kUnsatTrailIndex) return false;
// Update the implied bounds each time we enqueue a literal at level zero.
// This is "almost free", so we might as well do it.

View File

@@ -163,18 +163,26 @@ SatSolver::Status LbTreeSearch::Search(
return sat_solver_->UnsatStatus();
}
// We currently restart the search tree from scratch a few time. This is to
// allow our "pseudo-cost" to kick in and experimentally result in smaller
// trees down the road.
// We currently restart the search tree from scratch from time to times:
// - every kNumBranchesBeforeInitialRestarts branches explored for at most
// kMaxNumInitialRestarts times
// - Every time we backtrack to level zero, we count how many nodes are
// worse than the best known objective lower bound. If this is true for more
// than half of the existing nodes, we restart and clear all nodes.
//
// This has 2 advantages:
// - It allows our "pseudo-cost" to kick in and experimentally result in
// smaller trees down the road.
// - It removes large inefficient search trees.
//
// TODO(user): a strong branching initial start, or allowing a few decision
// per nodes might be a better approach.
//
// TODO(user): It would also be cool to exploit the reason for the LB increase
// even more.
int64_t restart = 100;
int64_t num_restart = 1;
const int kNumRestart = 10;
const int64_t kNumBranchesBeforeInitialRestarts = 1000;
int64_t num_restarts = 0;
const int kMaxNumInitialRestarts = 10;
while (!time_limit_->LimitReached() && !shared_response_->ProblemIsSolved()) {
// This is the current bound we try to improve. We cache it here to avoid
@@ -238,10 +246,11 @@ SatSolver::Status LbTreeSearch::Search(
if (bound > current_objective_lb_) {
shared_response_->UpdateInnerObjectiveBounds(
absl::StrCat("lb_tree_search #nodes:", nodes_.size(),
" #rc:", num_rc_detected_, " #imports:", num_imports_),
" #rc:", num_rc_detected_, " #imports:", num_imports_,
" #restarts:", num_restarts),
bound, integer_trail_->LevelZeroUpperBound(objective_var_));
current_objective_lb_ = bound;
if (VLOG_IS_ON(2)) DebugDisplayTree(current_branch_[0]);
if (VLOG_IS_ON(3)) DebugDisplayTree(current_branch_[0]);
}
}
@@ -264,16 +273,22 @@ SatSolver::Status LbTreeSearch::Search(
sat_decision_->UpdateVariableActivityIncrement();
}
// Forget the whole tree and restart?
if (nodes_.size() > num_restart * restart && num_restart < kNumRestart) {
// Forget the whole tree and restart.
// We will do it periodically at the beginning of the search each time we
// cross the k * kNumBranchesBeforeInitialRestarts branches explored.
// This will happen at most kMaxNumInitialRestarts times.
if (num_decisions_taken_ >=
(num_restarts + 1) * kNumBranchesBeforeInitialRestarts &&
num_restarts < kMaxNumInitialRestarts) {
++num_restarts;
VLOG(2) << "lb_tree_search initial_restart nodes: " << nodes_.size()
<< ", branches:" << num_decisions_taken_
<< ", restarts: " << num_restarts;
nodes_.clear();
num_decisions_taken_at_last_import_ = num_decisions_taken_;
num_imports_++;
current_branch_.clear();
if (!sat_solver_->RestoreSolverToAssumptionLevel()) {
return sat_solver_->UnsatStatus();
}
++num_restart;
}
// Backtrack if needed.
@@ -299,7 +314,6 @@ SatSolver::Status LbTreeSearch::Search(
// Periodic restart.
if (num_decisions_taken_ >= num_decisions_taken_at_last_import_ + 10000) {
backtrack_level = 0;
num_imports_++;
num_decisions_taken_at_last_import_ = num_decisions_taken_;
}
@@ -310,10 +324,42 @@ SatSolver::Status LbTreeSearch::Search(
}
// This will import other workers bound if we are back to level zero.
if (sat_solver_->CurrentDecisionLevel() == 0) {
++num_imports_;
num_decisions_taken_at_last_import_ = num_decisions_taken_;
}
if (!search_helper_->BeforeTakingDecision()) {
return sat_solver_->UnsatStatus();
}
// If the search has not just been restarted (in which case nodes_ would be
// empty), and if we are at level zero (either naturally, or if the
// backtrack level was set to zero in the above code), let's run a different
// heuristic to decide whether to retart the search from scratch or not.
//
// We ignore small search trees.
if (sat_solver_->CurrentDecisionLevel() == 0 && nodes_.size() > 50) {
// Let's count how many nodes have worse objective bounds than the best
// known external objective lower bound.
const IntegerValue latest_lb =
shared_response_->GetInnerObjectiveLowerBound();
int num_nodes_with_lower_objective = 0;
for (const Node& node : nodes_) {
if (node.MinObjective() < latest_lb) num_nodes_with_lower_objective++;
}
if (num_nodes_with_lower_objective * 2 > nodes_.size()) {
num_restarts++;
VLOG(2) << "lb_tree_search restart nodes: "
<< num_nodes_with_lower_objective << "/" << nodes_.size()
<< " : "
<< 100.0 * num_nodes_with_lower_objective / nodes_.size() << "%"
<< ", branches:" << num_decisions_taken_
<< ", restarts: " << num_restarts;
nodes_.clear();
current_branch_.clear();
}
}
// Dive: Follow the branch with lowest objective.
// Note that we do not creates new nodes here.
while (current_branch_.size() == sat_solver_->CurrentDecisionLevel() + 1) {

View File

@@ -1239,6 +1239,11 @@ SatSolver::Status FindCores(std::vector<Literal> assumptions,
if (sat_solver->parameters().minimize_core()) {
MinimizeCoreWithPropagation(limit, sat_solver, &core);
}
if (core.size() == 1) {
if (!sat_solver->AddUnitClause(core[0].Negated())) {
return SatSolver::INFEASIBLE;
}
}
if (core.empty()) return sat_solver->UnsatStatus();
cores->push_back(core);
if (!sat_solver->parameters().find_multiple_cores()) break;

View File

@@ -531,12 +531,15 @@ bool ClauseSubsumption(const std::vector<Literal>& a, SatClause* b) {
int SatSolver::EnqueueDecisionAndBackjumpOnConflict(Literal true_literal) {
SCOPED_TIME_STAT(&stats_);
if (model_is_unsat_) return kUnsatTrailIndex;
CHECK(PropagationIsDone());
EnqueueNewDecision(true_literal);
while (!PropagateAndStopAfterOneConflictResolution()) {
if (model_is_unsat_) return kUnsatTrailIndex;
DCHECK(PropagationIsDone());
// We should never enqueue before the assumptions_.
if (DEBUG_MODE && !assumptions_.empty()) {
CHECK_GE(current_decision_level_, assumption_level_);
}
CHECK(PropagationIsDone());
EnqueueNewDecision(true_literal);
if (!FinishPropagation()) return kUnsatTrailIndex;
return last_decision_or_backtrack_trail_index_;
}
@@ -552,15 +555,26 @@ bool SatSolver::RestoreSolverToAssumptionLevel() {
bool SatSolver::FinishPropagation() {
if (model_is_unsat_) return false;
while (!PropagateAndStopAfterOneConflictResolution()) {
if (model_is_unsat_) return false;
while (true) {
const int old_decision_level = current_decision_level_;
if (!PropagateAndStopAfterOneConflictResolution()) {
if (model_is_unsat_) return false;
if (current_decision_level_ == old_decision_level) {
CHECK(!assumptions_.empty());
return false;
}
continue;
}
break;
}
CHECK(PropagationIsDone());
return true;
}
bool SatSolver::ResetToLevelZero() {
if (model_is_unsat_) return false;
assumption_level_ = 0;
assumptions_.clear();
Backtrack(0);
return FinishPropagation();
}
@@ -568,17 +582,22 @@ bool SatSolver::ResetToLevelZero() {
bool SatSolver::ResetWithGivenAssumptions(
const std::vector<Literal>& assumptions) {
if (!ResetToLevelZero()) return false;
if (assumptions.empty()) return true;
// Assuming there is no duplicate in assumptions, but they can be a literal
// and its negation (weird corner case), there will always be a conflict if we
// enqueue stricly more assumptions than the number of variables, so there is
// no point considering the end of the list. Note that there is no overflow
// since decisions_.size() == num_variables_ + 1;
assumption_level_ =
std::min<int>(assumptions.size(), num_variables_.value() + 1);
for (int i = 0; i < assumption_level_; ++i) {
decisions_[i].literal = assumptions[i];
// We do not use EnqueueNewDecision() here so we duplicate this.
//
// TODO(user): move somewhere in common?
const double kMinDeterministicTimeBetweenCleanups = 1.0;
if (num_processed_fixed_variables_ < trail_->Index() &&
deterministic_time() >
deterministic_time_of_last_fixed_variables_cleanup_ +
kMinDeterministicTimeBetweenCleanups) {
ProcessNewlyFixedVariables();
}
DCHECK(assumptions_.empty());
assumption_level_ = 1;
assumptions_ = assumptions;
return ReapplyAssumptionsIfNeeded();
}
@@ -587,6 +606,46 @@ bool SatSolver::ReapplyAssumptionsIfNeeded() {
if (model_is_unsat_) return false;
if (CurrentDecisionLevel() >= assumption_level_) return true;
if (CurrentDecisionLevel() == 0 && !assumptions_.empty()) {
// When assumptions_ is not empty, the first "decision" actually contains
// multiple one, and we should never use its literal.
CHECK_EQ(current_decision_level_, 0);
last_decision_or_backtrack_trail_index_ = trail_->Index();
decisions_[0] = Decision(trail_->Index(), Literal());
++current_decision_level_;
trail_->SetDecisionLevel(current_decision_level_);
// We enqueue all assumptions at once at decision level 1.
int num_decisions = 0;
for (const Literal lit : assumptions_) {
if (Assignment().LiteralIsTrue(lit)) continue;
if (Assignment().LiteralIsFalse(lit)) {
// See GetLastIncompatibleDecisions().
*trail_->MutableConflict() = {lit.Negated(), lit};
if (num_decisions == 0) {
// This is needed to avoid an empty level that cause some CHECK fail.
current_decision_level_ = 0;
trail_->SetDecisionLevel(0);
}
return false;
}
++num_decisions;
trail_->EnqueueSearchDecision(lit);
}
// Corner case: all assumptions are fixed at level zero, we ignore them.
if (num_decisions == 0) {
current_decision_level_ = 0;
trail_->SetDecisionLevel(0);
return ResetToLevelZero();
}
// Now that everything is enqueued, we propagate.
return FinishPropagation();
}
DCHECK(assumptions_.empty());
int unused = 0;
const int64_t old_num_branches = counters_.num_branches;
const SatSolver::Status status =
@@ -607,6 +666,18 @@ bool SatSolver::PropagateAndStopAfterOneConflictResolution() {
// A conflict occurred, compute a nice reason for this failure.
same_reason_identifier_.Clear();
const int max_trail_index = ComputeMaxTrailIndex(trail_->FailingClause());
if (!assumptions_.empty()) {
// If the failing clause only contains literal at the assumptions level,
// we cannot use the ComputeFirstUIPConflict() code as we might have more
// than one decision.
//
// TODO(user): We might still want to "learn" the clause, especially if
// it reduces to only one literal in which case we can just fix it.
const int highest_level =
DecisionLevel((*trail_)[max_trail_index].Variable());
if (highest_level == 1) return false;
}
ComputeFirstUIPConflict(max_trail_index, &learned_conflict_,
&reason_used_to_infer_the_conflict_,
&subsumed_clauses_);
@@ -848,6 +919,7 @@ bool SatSolver::PropagateAndStopAfterOneConflictResolution() {
SatSolver::Status SatSolver::ReapplyDecisionsUpTo(
int max_level, int* first_propagation_index) {
SCOPED_TIME_STAT(&stats_);
DCHECK(assumptions_.empty());
int decision_index = current_decision_level_;
while (decision_index <= max_level) {
DCHECK_GE(decision_index, current_decision_level_);
@@ -860,8 +932,9 @@ SatSolver::Status SatSolver::ReapplyDecisionsUpTo(
continue;
}
if (Assignment().LiteralIsFalse(previous_decision)) {
// Update decision so that GetLastIncompatibleDecisions() works.
decisions_[current_decision_level_].literal = previous_decision;
// See GetLastIncompatibleDecisions().
*trail_->MutableConflict() = {previous_decision.Negated(),
previous_decision};
return ASSUMPTIONS_UNSAT;
}
@@ -891,6 +964,7 @@ SatSolver::Status SatSolver::ReapplyDecisionsUpTo(
int SatSolver::EnqueueDecisionAndBacktrackOnConflict(Literal true_literal) {
SCOPED_TIME_STAT(&stats_);
CHECK(PropagationIsDone());
CHECK(assumptions_.empty());
if (model_is_unsat_) return kUnsatTrailIndex;
DCHECK_LT(CurrentDecisionLevel(), decisions_.size());
@@ -941,6 +1015,7 @@ void SatSolver::Backtrack(int target_level) {
--current_decision_level_;
target_trail_index = decisions_[current_decision_level_].trail_index;
}
Untrail(target_trail_index);
last_decision_or_backtrack_trail_index_ = trail_->Index();
}
@@ -987,6 +1062,12 @@ void SatSolver::SetAssumptionLevel(int assumption_level) {
CHECK_GE(assumption_level, 0);
CHECK_LE(assumption_level, CurrentDecisionLevel());
assumption_level_ = assumption_level;
// New assumption code.
if (!assumptions_.empty()) {
CHECK_EQ(assumption_level, 0);
assumptions_.clear();
}
}
SatSolver::Status SatSolver::SolveWithTimeLimit(TimeLimit* time_limit) {
@@ -1223,9 +1304,14 @@ SatSolver::Status SatSolver::SolveInternal(TimeLimit* time_limit) {
next_display = NextMultipleOf(num_failures(), kDisplayFrequency);
}
const int old_level = current_decision_level_;
if (!PropagateAndStopAfterOneConflictResolution()) {
// A conflict occurred, continue the loop.
if (model_is_unsat_) return StatusWithLog(INFEASIBLE);
if (old_level == current_decision_level_) {
CHECK(!assumptions_.empty());
return StatusWithLog(ASSUMPTIONS_UNSAT);
}
} else {
// We need to reapply any assumptions that are not currently applied.
if (!ReapplyAssumptionsIfNeeded()) return StatusWithLog(UnsatStatus());
@@ -1289,42 +1375,35 @@ void SatSolver::MinimizeSomeClauses(int decisions_budget) {
std::vector<Literal> SatSolver::GetLastIncompatibleDecisions() {
SCOPED_TIME_STAT(&stats_);
const Literal false_assumption = decisions_[CurrentDecisionLevel()].literal;
std::vector<Literal> unsat_assumptions;
if (!trail_->Assignment().LiteralIsFalse(false_assumption)) {
// This can only happen in some corner cases where: we enqueued
// false_assumption, it leads to a conflict, but after re-enqueing the
// decisions that were backjumped over, there is no conflict anymore. This
// can only happen in the presence of propagators that are non-monotonic
// and do not propagate the same thing when there is more literal on the
// trail.
//
// In this case, we simply return all the decisions since we know that is
// a valid conflict. Since this should be rare, it is okay to not "minimize"
// what we return like we do below.
//
// TODO(user): unit-test this case with a mock propagator.
unsat_assumptions.reserve(CurrentDecisionLevel());
for (int i = 0; i < CurrentDecisionLevel(); ++i) {
unsat_assumptions.push_back(decisions_[i].literal);
}
return unsat_assumptions;
}
unsat_assumptions.push_back(false_assumption);
// This will be used to mark all the literals inspected while we process the
// false_assumption and the reasons behind each of its variable assignments.
is_marked_.ClearAndResize(num_variables_);
is_marked_.Set(false_assumption.Variable());
int trail_index = trail_->Info(false_assumption.Variable()).trail_index;
int trail_index = 0;
int num_true = 0;
for (const Literal lit : trail_->FailingClause()) {
CHECK(Assignment().LiteralIsAssigned(lit));
if (Assignment().LiteralIsTrue(lit)) {
// literal at true in the conflict must be decision/assumptions that could
// not be taken.
++num_true;
unsat_assumptions.push_back(lit.Negated());
continue;
}
trail_index =
std::max(trail_index, trail_->Info(lit.Variable()).trail_index);
is_marked_.Set(lit.Variable());
}
CHECK_LE(num_true, 1);
// We just expand the conflict until we only have decisions.
const int limit =
CurrentDecisionLevel() > 0 ? decisions_[0].trail_index : trail_->Index();
CHECK_LT(trail_index, trail_->Index());
while (true) {
// Find next marked literal to expand from the trail.
while (trail_index >= 0 && !is_marked_[(*trail_)[trail_index].Variable()]) {
while (trail_index >= limit &&
!is_marked_[(*trail_)[trail_index].Variable()]) {
--trail_index;
}
if (trail_index < limit) break;
@@ -2229,9 +2308,10 @@ void SatSolver::MinimizeConflictRecursively(std::vector<Literal>* conflict) {
int index = 1;
for (int i = 1; i < conflict->size(); ++i) {
const BooleanVariable var = (*conflict)[i].Variable();
const AssignmentInfo& info = trail_->Info(var);
if (time_limit_->LimitReached() ||
trail_->Info(var).trail_index <=
min_trail_index_per_level_[DecisionLevel(var)] ||
info.type == AssignmentType::kSearchDecision ||
info.trail_index <= min_trail_index_per_level_[info.level] ||
!CanBeInferedFromConflictVariables(var)) {
// Mark the conflict variable as independent. Note that is_marked_[var]
// will still be true.
@@ -2279,12 +2359,12 @@ bool SatSolver::CanBeInferedFromConflictVariables(BooleanVariable variable) {
variable_to_process_.push_back(variable);
// First we expand the reason for the given variable.
for (Literal literal : trail_->Reason(variable)) {
for (const Literal literal : trail_->Reason(variable)) {
const BooleanVariable var = literal.Variable();
DCHECK_NE(var, variable);
if (is_marked_[var]) continue;
const int level = DecisionLevel(var);
if (level == 0) {
const AssignmentInfo& info = trail_->Info(var);
if (info.level == 0) {
// Note that this is not needed if the solver is not configured to produce
// an unsat proof. However, the (level == 0) test should always be false
// in this case because there will never be literals of level zero in any
@@ -2292,8 +2372,8 @@ bool SatSolver::CanBeInferedFromConflictVariables(BooleanVariable variable) {
is_marked_.Set(var);
continue;
}
if (trail_->Info(var).trail_index <= min_trail_index_per_level_[level] ||
is_independent_[var]) {
if (info.trail_index <= min_trail_index_per_level_[info.level] ||
info.type == AssignmentType::kSearchDecision || is_independent_[var]) {
return false;
}
variable_to_process_.push_back(var);
@@ -2342,9 +2422,10 @@ bool SatSolver::CanBeInferedFromConflictVariables(BooleanVariable variable) {
for (Literal literal : trail_->Reason(current_var)) {
const BooleanVariable var = literal.Variable();
DCHECK_NE(var, current_var);
const int level = DecisionLevel(var);
if (level == 0 || is_marked_[var]) continue;
if (trail_->Info(var).trail_index <= min_trail_index_per_level_[level] ||
const AssignmentInfo& info = trail_->Info(var);
if (info.level == 0 || is_marked_[var]) continue;
if (info.trail_index <= min_trail_index_per_level_[info.level] ||
info.type == AssignmentType::kSearchDecision ||
is_independent_[var]) {
abort_early = true;
break;
@@ -2569,33 +2650,18 @@ std::string SatStatusString(SatSolver::Status status) {
}
void MinimizeCore(SatSolver* solver, std::vector<Literal>* core) {
std::vector<Literal> temp = *core;
std::reverse(temp.begin(), temp.end());
solver->Backtrack(0);
solver->SetAssumptionLevel(0);
std::vector<Literal> result;
// Note that this Solve() is really fast, since the solver should detect that
// the assumptions are unsat with unit propagation only. This is just a
// convenient way to remove assumptions that are propagated by the one before
// them.
const SatSolver::Status status =
solver->ResetAndSolveWithGivenAssumptions(temp);
if (status != SatSolver::ASSUMPTIONS_UNSAT) {
if (status != SatSolver::LIMIT_REACHED) {
CHECK_NE(status, SatSolver::FEASIBLE);
// This should almost never happen, but it is not impossible. The reason
// is that the solver may delete some learned clauses required by the unit
// propagation to show that the core is unsat.
LOG(WARNING) << "This should only happen rarely! otherwise, investigate. "
<< "Returned status is " << SatStatusString(status);
}
return;
solver->ResetToLevelZero();
for (const Literal lit : *core) {
if (solver->Assignment().LiteralIsTrue(lit)) continue;
result.push_back(lit);
if (solver->Assignment().LiteralIsFalse(lit)) break;
if (!solver->EnqueueDecisionIfNotConflicting(lit)) break;
}
temp = solver->GetLastIncompatibleDecisions();
if (temp.size() < core->size()) {
VLOG(1) << "minimization " << core->size() << " -> " << temp.size();
std::reverse(temp.begin(), temp.end());
*core = temp;
if (result.size() < core->size()) {
VLOG(1) << "minimization " << core->size() << " -> " << result.size();
*core = result;
}
}

View File

@@ -237,6 +237,10 @@ class SatSolver {
// literal. If there is a conflict and the problem is detected to be UNSAT,
// returns kUnsatTrailIndex.
//
// Important: In the presence of assumptions, this also returns
// kUnsatTrailIndex on ASSUMPTION_UNSAT. One can know the difference with
// IsModelUnsat().
//
// A client can determine if there is a conflict by checking if the
// CurrentDecisionLevel() was increased by 1 or not.
//
@@ -299,6 +303,15 @@ class SatSolver {
// Changes the assumptions level and the current solver assumptions. Returns
// false if the model is UNSAT or ASSUMPTION_UNSAT, true otherwise.
//
// This uses the "new" assumptions handling, where all assumptions are
// enqueued at once at decision level 1 before we start to propagate. This has
// many advantages. In particular, because we propagate with the binary
// implications first, if we ever have assumption => not(other_assumptions) we
// are guaranteed to find it and returns a core of size 2.
//
// Paper: "Speeding Up Assumption-Based SAT", Randy Hickey and Fahiem Bacchus
// http://www.maxhs.org/docs/Hickey-Bacchus2019_Chapter_SpeedingUpAssumption-BasedSAT.pdf
bool ResetWithGivenAssumptions(const std::vector<Literal>& assumptions);
// Advanced usage. If the decision level is smaller than the assumption level,
@@ -448,6 +461,9 @@ class SatSolver {
// Calls Propagate() and returns true if no conflict occurred. Otherwise,
// learns the conflict, backtracks, enqueues the consequence of the learned
// conflict and returns false.
//
// When handling assumptions, this might return false without backtracking
// in case of ASSUMPTIONS_UNSAT.
bool PropagateAndStopAfterOneConflictResolution();
// All Solve() functions end up calling this one.
@@ -607,14 +623,6 @@ class SatSolver {
void ComputeUnionOfReasons(const std::vector<Literal>& input,
std::vector<Literal>* literals);
// Given an assumption (i.e. literal) currently assigned to false, this will
// returns the set of all assumptions that caused this particular assignment.
//
// This is useful to get a small set of assumptions that can't be all
// satisfied together.
void FillUnsatAssumptions(Literal false_assumption,
std::vector<Literal>* unsat_assumptions);
// Do the full pseudo-Boolean constraint analysis. This calls multiple
// time ResolvePBConflict() on the current conflict until we have a conflict
// that allow us to propagate more at a lower decision level. This level
@@ -744,6 +752,7 @@ class SatSolver {
// The assumption level. See SolveWithAssumptions().
int assumption_level_ = 0;
std::vector<Literal> assumptions_;
// The size of the trail when ProcessNewlyFixedVariables() was last called.
// Note that the trail contains only fixed literals (that is literals of
@@ -860,6 +869,8 @@ class SatSolver {
//
// Important: The given SatSolver must be the one that just produced the given
// core.
//
// TODO(user): One should use MinimizeCoreWithPropagation() instead.
void MinimizeCore(SatSolver* solver, std::vector<Literal>* core);
// ============================================================================