2025-01-10 11:35:44 +01:00
|
|
|
// Copyright 2010-2025 Google LLC
|
2014-01-27 15:05:30 +00:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
|
//
|
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
//
|
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
|
// limitations under the License.
|
2014-07-09 15:18:27 +00:00
|
|
|
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/sat/clause.h"
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2022-02-15 18:00:11 +01:00
|
|
|
#include <stddef.h>
|
|
|
|
|
|
2014-01-27 15:05:30 +00:00
|
|
|
#include <algorithm>
|
2021-03-04 18:26:01 +01:00
|
|
|
#include <cstdint>
|
2022-02-15 18:00:11 +01:00
|
|
|
#include <deque>
|
2024-12-04 17:47:10 +01:00
|
|
|
#include <new>
|
2023-07-09 13:45:48 +02:00
|
|
|
#include <queue>
|
2014-01-27 15:05:30 +00:00
|
|
|
#include <string>
|
2022-02-15 18:00:11 +01:00
|
|
|
#include <utility>
|
2014-01-27 15:05:30 +00:00
|
|
|
#include <vector>
|
|
|
|
|
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "absl/container/flat_hash_map.h"
|
|
|
|
|
#include "absl/container/flat_hash_set.h"
|
|
|
|
|
#include "absl/container/inlined_vector.h"
|
2023-05-24 11:42:11 +02:00
|
|
|
#include "absl/log/check.h"
|
2025-03-04 21:09:32 +01:00
|
|
|
#include "absl/log/log.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "absl/random/bit_gen_ref.h"
|
|
|
|
|
#include "absl/random/distributions.h"
|
|
|
|
|
#include "absl/types/span.h"
|
2017-04-26 17:30:25 +02:00
|
|
|
#include "ortools/base/logging.h"
|
|
|
|
|
#include "ortools/base/stl_util.h"
|
2020-12-11 17:50:03 +01:00
|
|
|
#include "ortools/base/strong_vector.h"
|
2020-05-06 18:22:10 +02:00
|
|
|
#include "ortools/base/timer.h"
|
2018-11-05 16:24:47 +01:00
|
|
|
#include "ortools/graph/strongly_connected_components.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/sat/drat_proof_handler.h"
|
2022-12-05 12:11:02 +01:00
|
|
|
#include "ortools/sat/inclusion.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/sat/model.h"
|
|
|
|
|
#include "ortools/sat/sat_base.h"
|
2023-12-04 15:06:08 +01:00
|
|
|
#include "ortools/sat/util.h"
|
2022-02-15 18:00:11 +01:00
|
|
|
#include "ortools/util/bitset.h"
|
|
|
|
|
#include "ortools/util/stats.h"
|
|
|
|
|
#include "ortools/util/strong_integers.h"
|
|
|
|
|
#include "ortools/util/time_limit.h"
|
2014-01-27 15:05:30 +00:00
|
|
|
namespace operations_research {
|
|
|
|
|
namespace sat {
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
|
|
// Returns true if the given watcher list contains the given clause.
|
|
|
|
|
template <typename Watcher>
|
2020-10-28 13:42:36 +01:00
|
|
|
bool WatcherListContains(const std::vector<Watcher>& list,
|
|
|
|
|
const SatClause& candidate) {
|
|
|
|
|
for (const Watcher& watcher : list) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (watcher.clause == &candidate) return true;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// A simple wrapper to simplify the erase(std::remove_if()) pattern.
|
|
|
|
|
template <typename Container, typename Predicate>
|
|
|
|
|
void RemoveIf(Container c, Predicate p) {
|
|
|
|
|
c->erase(std::remove_if(c->begin(), c->end(), p), c->end());
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
} // namespace
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
// ----- ClauseManager -----
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
ClauseManager::ClauseManager(Model* model)
|
|
|
|
|
: SatPropagator("ClauseManager"),
|
2020-05-13 11:30:15 +02:00
|
|
|
implication_graph_(model->GetOrCreate<BinaryImplicationGraph>()),
|
2020-10-22 23:36:58 +02:00
|
|
|
trail_(model->GetOrCreate<Trail>()),
|
|
|
|
|
num_inspected_clauses_(0),
|
|
|
|
|
num_inspected_clause_literals_(0),
|
|
|
|
|
num_watched_clauses_(0),
|
2023-12-21 09:37:59 +01:00
|
|
|
stats_("ClauseManager") {
|
2020-05-06 18:22:10 +02:00
|
|
|
trail_->RegisterPropagator(this);
|
2019-04-01 13:27:21 +02:00
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
ClauseManager::~ClauseManager() {
|
2018-04-11 13:00:30 +02:00
|
|
|
gtl::STLDeleteElements(&clauses_);
|
2014-01-27 15:05:30 +00:00
|
|
|
IF_STATS_ENABLED(LOG(INFO) << stats_.StatString());
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::Resize(int num_variables) {
|
2014-01-27 15:05:30 +00:00
|
|
|
DCHECK(is_clean_);
|
|
|
|
|
watchers_on_false_.resize(num_variables << 1);
|
2015-10-23 13:45:43 +02:00
|
|
|
reasons_.resize(num_variables);
|
2014-11-07 14:30:26 +00:00
|
|
|
needs_cleaning_.Resize(LiteralIndex(num_variables << 1));
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Note that this is the only place where we add Watcher so the DCHECK
|
|
|
|
|
// guarantees that there are no duplicates.
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::AttachOnFalse(Literal literal, Literal blocking_literal,
|
|
|
|
|
SatClause* clause) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
DCHECK(is_clean_);
|
2023-10-12 10:06:27 +02:00
|
|
|
DCHECK(!WatcherListContains(watchers_on_false_[literal], *clause));
|
|
|
|
|
watchers_on_false_[literal].push_back(Watcher(clause, blocking_literal));
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::PropagateOnFalse(Literal false_literal, Trail* trail) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
DCHECK(is_clean_);
|
2023-10-12 10:06:27 +02:00
|
|
|
std::vector<Watcher>& watchers = watchers_on_false_[false_literal];
|
2023-10-18 15:47:37 +02:00
|
|
|
const auto assignment = AssignmentView(trail->Assignment());
|
2014-01-27 15:05:30 +00:00
|
|
|
|
|
|
|
|
// Note(user): It sounds better to inspect the list in order, this is because
|
|
|
|
|
// small clauses like binary or ternary clauses will often propagate and thus
|
|
|
|
|
// stay at the beginning of the list.
|
2017-12-14 12:40:10 +01:00
|
|
|
auto new_it = watchers.begin();
|
|
|
|
|
const auto end = watchers.end();
|
|
|
|
|
while (new_it != end && assignment.LiteralIsTrue(new_it->blocking_literal)) {
|
|
|
|
|
++new_it;
|
|
|
|
|
}
|
|
|
|
|
for (auto it = new_it; it != end; ++it) {
|
2014-01-27 15:05:30 +00:00
|
|
|
// Don't even look at the clause memory if the blocking literal is true.
|
2015-10-23 13:45:43 +02:00
|
|
|
if (assignment.LiteralIsTrue(it->blocking_literal)) {
|
2014-03-12 17:30:11 +00:00
|
|
|
*new_it++ = *it;
|
2014-01-27 15:05:30 +00:00
|
|
|
continue;
|
|
|
|
|
}
|
2014-11-07 14:30:26 +00:00
|
|
|
++num_inspected_clauses_;
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2014-03-12 17:30:11 +00:00
|
|
|
// If the other watched literal is true, just change the blocking literal.
|
2017-12-14 12:40:10 +01:00
|
|
|
// Note that we use the fact that the first two literals of the clause are
|
|
|
|
|
// the ones currently watched.
|
2020-10-28 13:42:36 +01:00
|
|
|
Literal* literals = it->clause->literals();
|
2017-12-14 12:40:10 +01:00
|
|
|
const Literal other_watched_literal(
|
|
|
|
|
LiteralIndex(literals[0].Index().value() ^ literals[1].Index().value() ^
|
|
|
|
|
false_literal.Index().value()));
|
|
|
|
|
if (assignment.LiteralIsTrue(other_watched_literal)) {
|
2019-03-21 15:15:30 +01:00
|
|
|
*new_it = *it;
|
|
|
|
|
new_it->blocking_literal = other_watched_literal;
|
|
|
|
|
++new_it;
|
2014-11-07 14:30:26 +00:00
|
|
|
++num_inspected_clause_literals_;
|
2014-03-12 17:30:11 +00:00
|
|
|
continue;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2019-03-21 15:15:30 +01:00
|
|
|
// Look for another literal to watch. We go through the list in a cyclic
|
|
|
|
|
// fashion from start. The first two literals can be ignored as they are the
|
|
|
|
|
// watched ones.
|
2014-03-12 17:30:11 +00:00
|
|
|
{
|
2019-03-21 15:15:30 +01:00
|
|
|
const int start = it->start_index;
|
2020-06-11 10:01:48 +02:00
|
|
|
const int size = it->clause->size();
|
2019-03-21 15:15:30 +01:00
|
|
|
DCHECK_GE(start, 2);
|
|
|
|
|
|
|
|
|
|
int i = start;
|
2020-10-22 23:36:58 +02:00
|
|
|
while (i < size && assignment.LiteralIsFalse(literals[i])) ++i;
|
2019-03-21 15:15:30 +01:00
|
|
|
num_inspected_clause_literals_ += i - start + 2;
|
|
|
|
|
if (i >= size) {
|
|
|
|
|
i = 2;
|
2020-10-22 23:36:58 +02:00
|
|
|
while (i < start && assignment.LiteralIsFalse(literals[i])) ++i;
|
2019-03-21 15:15:30 +01:00
|
|
|
num_inspected_clause_literals_ += i - 2;
|
2020-10-22 23:36:58 +02:00
|
|
|
if (i >= start) i = size;
|
2019-03-21 15:15:30 +01:00
|
|
|
}
|
2014-03-12 17:30:11 +00:00
|
|
|
if (i < size) {
|
2017-12-14 12:40:10 +01:00
|
|
|
// literal[i] is unassigned or true, it's now the new literal to watch.
|
2014-03-12 17:30:11 +00:00
|
|
|
// Note that by convention, we always keep the two watched literals at
|
|
|
|
|
// the beginning of the clause.
|
|
|
|
|
literals[0] = other_watched_literal;
|
|
|
|
|
literals[1] = literals[i];
|
|
|
|
|
literals[i] = false_literal;
|
2023-10-12 10:06:27 +02:00
|
|
|
watchers_on_false_[literals[1]].emplace_back(
|
2020-10-22 23:36:58 +02:00
|
|
|
it->clause, other_watched_literal, i + 1);
|
2014-03-12 17:30:11 +00:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-14 12:40:10 +01:00
|
|
|
// At this point other_watched_literal is either false or unassigned, all
|
2014-03-12 17:30:11 +00:00
|
|
|
// other literals are false.
|
2015-10-23 13:45:43 +02:00
|
|
|
if (assignment.LiteralIsFalse(other_watched_literal)) {
|
2014-03-12 17:30:11 +00:00
|
|
|
// Conflict: All literals of it->clause are false.
|
2015-10-23 13:45:43 +02:00
|
|
|
//
|
|
|
|
|
// Note(user): we could avoid a copy here, but the conflict analysis
|
|
|
|
|
// complexity will be a lot higher than this anyway.
|
|
|
|
|
trail->MutableConflict()->assign(it->clause->begin(), it->clause->end());
|
|
|
|
|
trail->SetFailingSatClause(it->clause);
|
2014-11-07 14:30:26 +00:00
|
|
|
num_inspected_clause_literals_ += it - watchers.begin() + 1;
|
2014-03-12 17:30:11 +00:00
|
|
|
watchers.erase(new_it, it);
|
|
|
|
|
return false;
|
2014-01-27 15:05:30 +00:00
|
|
|
} else {
|
2017-12-14 12:40:10 +01:00
|
|
|
// Propagation: other_watched_literal is unassigned, set it to true and
|
2014-03-12 17:30:11 +00:00
|
|
|
// put it at position 0. Note that the position 0 is important because
|
|
|
|
|
// we will need later to recover the literal that was propagated from the
|
|
|
|
|
// clause using this convention.
|
|
|
|
|
literals[0] = other_watched_literal;
|
|
|
|
|
literals[1] = false_literal;
|
2015-10-23 13:45:43 +02:00
|
|
|
reasons_[trail->Index()] = it->clause;
|
|
|
|
|
trail->Enqueue(other_watched_literal, propagator_id_);
|
2014-03-12 17:30:11 +00:00
|
|
|
*new_it++ = *it;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
num_inspected_clause_literals_ += watchers.size(); // The blocking ones.
|
2017-12-14 12:40:10 +01:00
|
|
|
watchers.erase(new_it, end);
|
2014-01-27 15:05:30 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::Propagate(Trail* trail) {
|
2015-10-23 13:45:43 +02:00
|
|
|
const int old_index = trail->Index();
|
|
|
|
|
while (trail->Index() == old_index && propagation_trail_index_ < old_index) {
|
|
|
|
|
const Literal literal = (*trail)[propagation_trail_index_++];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!PropagateOnFalse(literal.Negated(), trail)) return false;
|
2015-10-23 13:45:43 +02:00
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
absl::Span<const Literal> ClauseManager::Reason(const Trail& /*trail*/,
|
2024-09-03 18:18:42 +02:00
|
|
|
int trail_index,
|
|
|
|
|
int64_t /*conflict_id*/) const {
|
2015-10-23 13:45:43 +02:00
|
|
|
return reasons_[trail_index]->PropagationReason();
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
SatClause* ClauseManager::ReasonClause(int trail_index) const {
|
2015-10-23 13:45:43 +02:00
|
|
|
return reasons_[trail_index];
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::AddClause(absl::Span<const Literal> literals) {
|
2024-07-12 13:56:11 +02:00
|
|
|
return AddClause(literals, trail_, -1);
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
|
2024-07-12 13:56:11 +02:00
|
|
|
bool ClauseManager::AddClause(absl::Span<const Literal> literals, Trail* trail,
|
|
|
|
|
int lbd) {
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* clause = SatClause::Create(literals);
|
2017-12-11 11:02:34 +01:00
|
|
|
clauses_.push_back(clause);
|
2024-07-12 13:56:11 +02:00
|
|
|
if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals);
|
2017-12-11 11:02:34 +01:00
|
|
|
return AttachAndPropagate(clause, trail);
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-24 11:18:42 -07:00
|
|
|
SatClause* ClauseManager::AddRemovableClause(absl::Span<const Literal> literals,
|
|
|
|
|
Trail* trail, int lbd) {
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* clause = SatClause::Create(literals);
|
2017-12-11 11:02:34 +01:00
|
|
|
clauses_.push_back(clause);
|
2024-07-12 13:56:11 +02:00
|
|
|
if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals);
|
2017-12-11 11:02:34 +01:00
|
|
|
CHECK(AttachAndPropagate(clause, trail));
|
|
|
|
|
return clause;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sets up the 2-watchers data structure. It selects two non-false literals
|
|
|
|
|
// and attaches the clause to the event: one of the watched literals become
|
|
|
|
|
// false. It returns false if the clause only contains literals assigned to
|
|
|
|
|
// false. If only one literals is not false, it propagates it to true if it
|
|
|
|
|
// is not already assigned.
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::AttachAndPropagate(SatClause* clause, Trail* trail) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2017-12-11 11:02:34 +01:00
|
|
|
|
2020-06-11 10:01:48 +02:00
|
|
|
const int size = clause->size();
|
2020-10-28 13:42:36 +01:00
|
|
|
Literal* literals = clause->literals();
|
2017-12-11 11:02:34 +01:00
|
|
|
|
|
|
|
|
// Select the first two literals that are not assigned to false and put them
|
|
|
|
|
// on position 0 and 1.
|
|
|
|
|
int num_literal_not_false = 0;
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
if (!trail->Assignment().LiteralIsFalse(literals[i])) {
|
|
|
|
|
std::swap(literals[i], literals[num_literal_not_false]);
|
|
|
|
|
++num_literal_not_false;
|
|
|
|
|
if (num_literal_not_false == 2) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Returns false if all the literals were false.
|
|
|
|
|
// This should only happen on an UNSAT problem, and there is no need to attach
|
|
|
|
|
// the clause in this case.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (num_literal_not_false == 0) return false;
|
2017-12-11 11:02:34 +01:00
|
|
|
|
|
|
|
|
if (num_literal_not_false == 1) {
|
|
|
|
|
// To maintain the validity of the 2-watcher algorithm, we need to watch
|
|
|
|
|
// the false literal with the highest decision level.
|
|
|
|
|
int max_level = trail->Info(literals[1].Variable()).level;
|
|
|
|
|
for (int i = 2; i < size; ++i) {
|
|
|
|
|
const int level = trail->Info(literals[i].Variable()).level;
|
|
|
|
|
if (level > max_level) {
|
|
|
|
|
max_level = level;
|
|
|
|
|
std::swap(literals[1], literals[i]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-14 12:40:10 +01:00
|
|
|
// Propagates literals[0] if it is unassigned.
|
2017-12-11 11:02:34 +01:00
|
|
|
if (!trail->Assignment().LiteralIsTrue(literals[0])) {
|
|
|
|
|
reasons_[trail->Index()] = clause;
|
|
|
|
|
trail->Enqueue(literals[0], propagator_id_);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-06 18:17:36 +01:00
|
|
|
++num_watched_clauses_;
|
2017-12-11 11:02:34 +01:00
|
|
|
AttachOnFalse(literals[0], literals[1], clause);
|
|
|
|
|
AttachOnFalse(literals[1], literals[0], clause);
|
|
|
|
|
return true;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::Attach(SatClause* clause, Trail* trail) {
|
2020-10-28 13:42:36 +01:00
|
|
|
Literal* literals = clause->literals();
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!trail->Assignment().LiteralIsAssigned(literals[0]));
|
|
|
|
|
DCHECK(!trail->Assignment().LiteralIsAssigned(literals[1]));
|
2017-12-11 11:02:34 +01:00
|
|
|
|
2018-03-06 18:17:36 +01:00
|
|
|
++num_watched_clauses_;
|
|
|
|
|
AttachOnFalse(literals[0], literals[1], clause);
|
|
|
|
|
AttachOnFalse(literals[1], literals[0], clause);
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::InternalDetach(SatClause* clause) {
|
2018-03-06 18:17:36 +01:00
|
|
|
--num_watched_clauses_;
|
2020-06-11 10:01:48 +02:00
|
|
|
const size_t size = clause->size();
|
2018-05-03 15:00:06 +02:00
|
|
|
if (drat_proof_handler_ != nullptr && size > 2) {
|
2020-10-22 23:36:58 +02:00
|
|
|
drat_proof_handler_->DeleteClause({clause->begin(), size});
|
2017-12-11 11:02:34 +01:00
|
|
|
}
|
|
|
|
|
clauses_info_.erase(clause);
|
2020-05-06 18:22:10 +02:00
|
|
|
clause->Clear();
|
2018-03-06 18:17:36 +01:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::LazyDetach(SatClause* clause) {
|
2018-03-06 18:17:36 +01:00
|
|
|
InternalDetach(clause);
|
2014-01-27 15:05:30 +00:00
|
|
|
is_clean_ = false;
|
2023-10-12 10:06:27 +02:00
|
|
|
needs_cleaning_.Set(clause->FirstLiteral());
|
|
|
|
|
needs_cleaning_.Set(clause->SecondLiteral());
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::Detach(SatClause* clause) {
|
2018-03-06 18:17:36 +01:00
|
|
|
InternalDetach(clause);
|
2020-10-22 23:36:58 +02:00
|
|
|
for (const Literal l : {clause->FirstLiteral(), clause->SecondLiteral()}) {
|
2023-10-12 10:06:27 +02:00
|
|
|
needs_cleaning_.Clear(l);
|
|
|
|
|
RemoveIf(&(watchers_on_false_[l]), [](const Watcher& watcher) {
|
2023-12-04 15:06:08 +01:00
|
|
|
return watcher.clause->IsRemoved();
|
2018-03-06 18:17:36 +01:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::DetachAllClauses() {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!all_clauses_are_attached_) return;
|
2020-05-06 18:22:10 +02:00
|
|
|
all_clauses_are_attached_ = false;
|
|
|
|
|
|
|
|
|
|
// This is easy, and this allows to reset memory if some watcher lists where
|
|
|
|
|
// really long at some point.
|
|
|
|
|
is_clean_ = true;
|
|
|
|
|
num_watched_clauses_ = 0;
|
|
|
|
|
watchers_on_false_.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::AttachAllClauses() {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (all_clauses_are_attached_) return;
|
2020-05-06 18:22:10 +02:00
|
|
|
all_clauses_are_attached_ = true;
|
|
|
|
|
|
2025-03-07 10:33:36 +01:00
|
|
|
needs_cleaning_.ResetAllToFalse(); // This doesn't resize it.
|
2020-05-06 18:22:10 +02:00
|
|
|
watchers_on_false_.resize(needs_cleaning_.size().value());
|
|
|
|
|
|
|
|
|
|
DeleteRemovedClauses();
|
2020-10-28 13:42:36 +01:00
|
|
|
for (SatClause* clause : clauses_) {
|
2020-05-06 18:22:10 +02:00
|
|
|
++num_watched_clauses_;
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_GE(clause->size(), 2);
|
2020-05-06 18:22:10 +02:00
|
|
|
AttachOnFalse(clause->FirstLiteral(), clause->SecondLiteral(), clause);
|
|
|
|
|
AttachOnFalse(clause->SecondLiteral(), clause->FirstLiteral(), clause);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-13 11:30:15 +02:00
|
|
|
// This one do not need the clause to be detached.
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::InprocessingFixLiteral(Literal true_literal) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
|
2020-05-06 18:22:10 +02:00
|
|
|
if (drat_proof_handler_ != nullptr) {
|
2020-10-22 23:36:58 +02:00
|
|
|
drat_proof_handler_->AddClause({true_literal});
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
// TODO(user): remove the test when the DRAT issue with fixed literal is
|
|
|
|
|
// resolved.
|
|
|
|
|
if (!trail_->Assignment().LiteralIsTrue(true_literal)) {
|
|
|
|
|
trail_->EnqueueWithUnitReason(true_literal);
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
// Even when all clauses are detached, we can propagate the implication
|
|
|
|
|
// graph and we do that right away.
|
|
|
|
|
return implication_graph_->Propagate(trail_);
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2020-06-03 12:07:07 +02:00
|
|
|
return true;
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
|
2020-05-13 11:30:15 +02:00
|
|
|
// TODO(user): We could do something slower if the clauses are attached like
|
|
|
|
|
// we do for InprocessingRewriteClause().
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::InprocessingRemoveClause(SatClause* clause) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!all_clauses_are_attached_);
|
2020-05-06 18:22:10 +02:00
|
|
|
if (drat_proof_handler_ != nullptr) {
|
|
|
|
|
drat_proof_handler_->DeleteClause(clause->AsSpan());
|
|
|
|
|
}
|
|
|
|
|
clauses_info_.erase(clause);
|
|
|
|
|
clause->Clear();
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
bool ClauseManager::InprocessingRewriteClause(
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* clause, absl::Span<const Literal> new_clause) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (new_clause.empty()) return false; // UNSAT.
|
2020-05-13 11:30:15 +02:00
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
if (DEBUG_MODE) {
|
|
|
|
|
for (const Literal l : new_clause) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!trail_->Assignment().LiteralIsAssigned(l));
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-13 11:30:15 +02:00
|
|
|
if (new_clause.size() == 1) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!InprocessingFixLiteral(new_clause[0])) return false;
|
2020-05-13 11:30:15 +02:00
|
|
|
InprocessingRemoveClause(clause);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (new_clause.size() == 2) {
|
2023-12-04 15:06:08 +01:00
|
|
|
CHECK(implication_graph_->AddBinaryClause(new_clause[0], new_clause[1]));
|
2020-05-13 11:30:15 +02:00
|
|
|
InprocessingRemoveClause(clause);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
if (drat_proof_handler_ != nullptr) {
|
|
|
|
|
// We must write the new clause before we delete the old one.
|
|
|
|
|
drat_proof_handler_->AddClause(new_clause);
|
|
|
|
|
drat_proof_handler_->DeleteClause(clause->AsSpan());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (all_clauses_are_attached_) {
|
|
|
|
|
// We can still rewrite the clause, but it is inefficient. We first
|
|
|
|
|
// detach it in a non-lazy way.
|
|
|
|
|
--num_watched_clauses_;
|
|
|
|
|
clause->Clear();
|
2020-10-22 23:36:58 +02:00
|
|
|
for (const Literal l : {clause->FirstLiteral(), clause->SecondLiteral()}) {
|
2023-10-12 10:06:27 +02:00
|
|
|
needs_cleaning_.Clear(l);
|
|
|
|
|
RemoveIf(&(watchers_on_false_[l]), [](const Watcher& watcher) {
|
2023-12-04 15:06:08 +01:00
|
|
|
return watcher.clause->IsRemoved();
|
2020-05-06 18:22:10 +02:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
clause->Rewrite(new_clause);
|
|
|
|
|
|
2023-12-04 15:06:08 +01:00
|
|
|
// And we reattach it.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (all_clauses_are_attached_) Attach(clause, trail_);
|
2020-05-13 11:30:15 +02:00
|
|
|
return true;
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
SatClause* ClauseManager::InprocessingAddClause(
|
2020-10-22 23:36:58 +02:00
|
|
|
absl::Span<const Literal> new_clause) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!new_clause.empty());
|
|
|
|
|
DCHECK(!all_clauses_are_attached_);
|
2020-06-03 12:07:07 +02:00
|
|
|
if (DEBUG_MODE) {
|
|
|
|
|
for (const Literal l : new_clause) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!trail_->Assignment().LiteralIsAssigned(l));
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (new_clause.size() == 1) {
|
|
|
|
|
// TODO(user): We should return false...
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!InprocessingFixLiteral(new_clause[0])) return nullptr;
|
2020-06-03 12:07:07 +02:00
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (new_clause.size() == 2) {
|
|
|
|
|
implication_graph_->AddBinaryClause(new_clause[0], new_clause[1]);
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* clause = SatClause::Create(new_clause);
|
2020-06-03 12:07:07 +02:00
|
|
|
clauses_.push_back(clause);
|
|
|
|
|
return clause;
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
void ClauseManager::CleanUpWatchers() {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2023-10-15 18:08:33 +02:00
|
|
|
for (const LiteralIndex index : needs_cleaning_.PositionsSetAtLeastOnce()) {
|
2014-11-07 14:30:26 +00:00
|
|
|
DCHECK(needs_cleaning_[index]);
|
2020-10-28 13:42:36 +01:00
|
|
|
RemoveIf(&(watchers_on_false_[index]), [](const Watcher& watcher) {
|
2023-12-04 15:06:08 +01:00
|
|
|
return watcher.clause->IsRemoved();
|
2018-03-06 18:17:36 +01:00
|
|
|
});
|
2014-11-07 14:30:26 +00:00
|
|
|
needs_cleaning_.Clear(index);
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
2014-11-07 14:30:26 +00:00
|
|
|
needs_cleaning_.NotifyAllClear();
|
2014-01-27 15:05:30 +00:00
|
|
|
is_clean_ = true;
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
// We also update to_minimize_index_/to_probe_index_ correctly.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): If more indices are needed, generalize the code to a vector of
|
|
|
|
|
// indices.
|
|
|
|
|
void ClauseManager::DeleteRemovedClauses() {
|
2017-12-11 11:02:34 +01:00
|
|
|
DCHECK(is_clean_);
|
2018-03-06 18:17:36 +01:00
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
int new_size = 0;
|
|
|
|
|
const int old_size = clauses_.size();
|
|
|
|
|
for (int i = 0; i < old_size; ++i) {
|
|
|
|
|
if (i == to_minimize_index_) to_minimize_index_ = new_size;
|
2024-10-08 16:04:53 +02:00
|
|
|
if (i == to_first_minimize_index_) to_first_minimize_index_ = new_size;
|
2023-12-21 09:37:59 +01:00
|
|
|
if (i == to_probe_index_) to_probe_index_ = new_size;
|
|
|
|
|
if (clauses_[i]->IsRemoved()) {
|
|
|
|
|
delete clauses_[i];
|
|
|
|
|
} else {
|
|
|
|
|
clauses_[new_size++] = clauses_[i];
|
|
|
|
|
}
|
2018-03-06 18:17:36 +01:00
|
|
|
}
|
2023-12-21 09:37:59 +01:00
|
|
|
clauses_.resize(new_size);
|
2020-10-22 23:36:58 +02:00
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
if (to_minimize_index_ > new_size) to_minimize_index_ = new_size;
|
2024-10-08 16:04:53 +02:00
|
|
|
if (to_first_minimize_index_ > new_size) to_first_minimize_index_ = new_size;
|
2023-12-21 09:37:59 +01:00
|
|
|
if (to_probe_index_ > new_size) to_probe_index_ = new_size;
|
2017-12-11 11:02:34 +01:00
|
|
|
}
|
|
|
|
|
|
2024-10-08 16:04:53 +02:00
|
|
|
SatClause* ClauseManager::NextNewClauseToMinimize() {
|
|
|
|
|
for (; to_first_minimize_index_ < clauses_.size();
|
|
|
|
|
++to_first_minimize_index_) {
|
|
|
|
|
if (clauses_[to_first_minimize_index_]->IsRemoved()) continue;
|
|
|
|
|
if (!IsRemovable(clauses_[to_first_minimize_index_])) {
|
|
|
|
|
// If the round-robin is in-sync with the new clauses, we may as well
|
|
|
|
|
// count this minimization as part of the round-robin and advance both
|
|
|
|
|
// indexes.
|
|
|
|
|
if (to_minimize_index_ == to_first_minimize_index_) {
|
|
|
|
|
++to_minimize_index_;
|
|
|
|
|
}
|
|
|
|
|
return clauses_[to_first_minimize_index_++];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
SatClause* ClauseManager::NextClauseToMinimize() {
|
|
|
|
|
for (; to_minimize_index_ < clauses_.size(); ++to_minimize_index_) {
|
|
|
|
|
if (clauses_[to_minimize_index_]->IsRemoved()) continue;
|
|
|
|
|
if (!IsRemovable(clauses_[to_minimize_index_])) {
|
|
|
|
|
return clauses_[to_minimize_index_++];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
SatClause* ClauseManager::NextClauseToProbe() {
|
|
|
|
|
for (; to_probe_index_ < clauses_.size(); ++to_probe_index_) {
|
|
|
|
|
if (clauses_[to_probe_index_]->IsRemoved()) continue;
|
|
|
|
|
return clauses_[to_probe_index_++];
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
2014-01-27 15:05:30 +00:00
|
|
|
// ----- BinaryImplicationGraph -----
|
|
|
|
|
|
|
|
|
|
void BinaryImplicationGraph::Resize(int num_variables) {
|
|
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2024-09-30 17:28:08 +02:00
|
|
|
bfs_stack_.resize(num_variables << 1);
|
2014-01-27 15:05:30 +00:00
|
|
|
implications_.resize(num_variables << 1);
|
2024-02-15 08:46:38 +01:00
|
|
|
implies_something_.resize(num_variables << 1);
|
2023-12-13 16:14:35 +01:00
|
|
|
might_have_dups_.resize(num_variables << 1);
|
2023-02-06 16:27:12 +01:00
|
|
|
is_redundant_.resize(implications_.size());
|
2020-06-03 12:07:07 +02:00
|
|
|
is_removed_.resize(implications_.size(), false);
|
|
|
|
|
estimated_sizes_.resize(implications_.size(), 0);
|
|
|
|
|
in_direct_implications_.resize(implications_.size(), false);
|
2015-10-23 13:45:43 +02:00
|
|
|
reasons_.resize(num_variables);
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2023-12-13 16:14:35 +01:00
|
|
|
void BinaryImplicationGraph::NotifyPossibleDuplicate(Literal a) {
|
|
|
|
|
if (might_have_dups_[a.Index()]) return;
|
|
|
|
|
might_have_dups_[a.Index()] = true;
|
|
|
|
|
to_clean_.push_back(a);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void BinaryImplicationGraph::RemoveDuplicates() {
|
|
|
|
|
for (const Literal l : to_clean_) {
|
|
|
|
|
might_have_dups_[l.Index()] = false;
|
|
|
|
|
gtl::STLSortAndRemoveDuplicates(&implications_[l.Index()]);
|
|
|
|
|
}
|
|
|
|
|
to_clean_.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// TODO(user): Not all of the solver knows about representative literal, do
|
2023-10-08 11:21:00 +02:00
|
|
|
// use them here to maintains invariant? Explore this when we start cleaning our
|
|
|
|
|
// clauses using equivalence during search. We can easily do it for every
|
|
|
|
|
// conflict we learn instead of here.
|
|
|
|
|
bool BinaryImplicationGraph::AddBinaryClause(Literal a, Literal b) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2023-10-08 11:21:00 +02:00
|
|
|
|
|
|
|
|
// Tricky: If this is the first clause, the propagator will be added and
|
|
|
|
|
// assumed to be in a "propagated" state. This makes sure this is the case.
|
|
|
|
|
if (IsEmpty()) propagation_trail_index_ = trail_->Index();
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
if (drat_proof_handler_ != nullptr) {
|
2020-10-22 23:36:58 +02:00
|
|
|
// TODO(user): Like this we will duplicate all binary clause from the
|
|
|
|
|
// problem. However this leads to a simpler API (since we don't need to
|
2020-10-28 13:42:36 +01:00
|
|
|
// special case the loading of the original clauses) and we mainly use drat
|
2020-10-22 23:36:58 +02:00
|
|
|
// proof for testing anyway.
|
|
|
|
|
drat_proof_handler_->AddClause({a, b});
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2023-10-08 11:21:00 +02:00
|
|
|
|
2023-12-04 15:06:08 +01:00
|
|
|
if (is_redundant_[a.Index()]) a = Literal(representative_of_[a.Index()]);
|
|
|
|
|
if (is_redundant_[b.Index()]) b = Literal(representative_of_[b.Index()]);
|
|
|
|
|
if (a == b.Negated()) return true;
|
|
|
|
|
if (a == b) return FixLiteral(a);
|
|
|
|
|
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(!is_removed_[a]);
|
|
|
|
|
DCHECK(!is_removed_[b]);
|
2020-06-03 12:07:07 +02:00
|
|
|
estimated_sizes_[a.NegatedIndex()]++;
|
|
|
|
|
estimated_sizes_[b.NegatedIndex()]++;
|
2018-11-05 16:24:47 +01:00
|
|
|
implications_[a.NegatedIndex()].push_back(b);
|
|
|
|
|
implications_[b.NegatedIndex()].push_back(a);
|
2024-02-15 08:46:38 +01:00
|
|
|
implies_something_.Set(a.NegatedIndex());
|
|
|
|
|
implies_something_.Set(b.NegatedIndex());
|
2023-12-13 16:14:35 +01:00
|
|
|
NotifyPossibleDuplicate(a);
|
|
|
|
|
NotifyPossibleDuplicate(b);
|
2018-11-05 16:24:47 +01:00
|
|
|
is_dag_ = false;
|
|
|
|
|
num_implications_ += 2;
|
2020-07-05 00:29:47 +02:00
|
|
|
|
2024-07-12 13:56:11 +02:00
|
|
|
if (enable_sharing_ && add_binary_callback_ != nullptr) {
|
|
|
|
|
add_binary_callback_(a, b);
|
|
|
|
|
}
|
2023-12-06 14:15:16 +01:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
const auto& assignment = trail_->Assignment();
|
2023-10-08 11:21:00 +02:00
|
|
|
if (trail_->CurrentDecisionLevel() == 0) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!assignment.LiteralIsAssigned(a));
|
|
|
|
|
DCHECK(!assignment.LiteralIsAssigned(b));
|
2023-10-08 11:21:00 +02:00
|
|
|
} else {
|
|
|
|
|
if (assignment.LiteralIsFalse(a)) {
|
|
|
|
|
if (assignment.LiteralIsAssigned(b)) {
|
|
|
|
|
if (assignment.LiteralIsFalse(b)) return false;
|
|
|
|
|
} else {
|
|
|
|
|
reasons_[trail_->Index()] = a;
|
|
|
|
|
trail_->Enqueue(b, propagator_id_);
|
|
|
|
|
}
|
|
|
|
|
} else if (assignment.LiteralIsFalse(b)) {
|
|
|
|
|
if (!assignment.LiteralIsAssigned(a)) {
|
|
|
|
|
reasons_[trail_->Index()] = b;
|
|
|
|
|
trail_->Enqueue(a, propagator_id_);
|
|
|
|
|
}
|
2020-07-05 00:29:47 +02:00
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
2023-10-08 11:21:00 +02:00
|
|
|
|
2020-07-05 00:29:47 +02:00
|
|
|
return true;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2023-12-04 15:06:08 +01:00
|
|
|
bool BinaryImplicationGraph::AddAtMostOne(
|
|
|
|
|
absl::Span<const Literal> at_most_one) {
|
|
|
|
|
DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (at_most_one.size() <= 1) return true;
|
2019-07-09 19:02:01 +02:00
|
|
|
|
|
|
|
|
// Temporarily copy the at_most_one constraint at the end of
|
|
|
|
|
// at_most_one_buffer_. It will be cleaned up and added by
|
|
|
|
|
// CleanUpAndAddAtMostOnes().
|
|
|
|
|
const int base_index = at_most_one_buffer_.size();
|
2024-03-04 18:07:17 +01:00
|
|
|
at_most_one_buffer_.push_back(Literal(LiteralIndex(at_most_one.size())));
|
2019-07-09 19:02:01 +02:00
|
|
|
at_most_one_buffer_.insert(at_most_one_buffer_.end(), at_most_one.begin(),
|
|
|
|
|
at_most_one.end());
|
|
|
|
|
|
|
|
|
|
is_dag_ = false;
|
2023-12-04 15:06:08 +01:00
|
|
|
return CleanUpAndAddAtMostOnes(base_index);
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
|
2023-12-21 09:37:59 +01:00
|
|
|
// TODO(user): remove dupl with ClauseManager::InprocessingFixLiteral().
|
2023-12-04 15:06:08 +01:00
|
|
|
//
|
|
|
|
|
// Note that we currently do not support calling this at a positive level since
|
|
|
|
|
// we might loose the fixing on backtrack otherwise.
|
2020-06-03 12:07:07 +02:00
|
|
|
bool BinaryImplicationGraph::FixLiteral(Literal true_literal) {
|
2023-12-04 15:06:08 +01:00
|
|
|
CHECK_EQ(trail_->CurrentDecisionLevel(), 0);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (trail_->Assignment().LiteralIsTrue(true_literal)) return true;
|
|
|
|
|
if (trail_->Assignment().LiteralIsFalse(true_literal)) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
if (drat_proof_handler_ != nullptr) {
|
2020-10-22 23:36:58 +02:00
|
|
|
drat_proof_handler_->AddClause({true_literal});
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2020-06-03 12:07:07 +02:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
trail_->EnqueueWithUnitReason(true_literal);
|
2020-06-03 12:07:07 +02:00
|
|
|
return Propagate(trail_);
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
|
2019-07-09 19:02:01 +02:00
|
|
|
// This works by doing a linear scan on the at_most_one_buffer_ and
|
|
|
|
|
// cleaning/copying the at most ones on the fly to the beginning of the same
|
|
|
|
|
// buffer.
|
2023-12-04 15:06:08 +01:00
|
|
|
bool BinaryImplicationGraph::CleanUpAndAddAtMostOnes(int base_index) {
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2019-07-09 19:02:01 +02:00
|
|
|
int local_end = base_index;
|
|
|
|
|
const int buffer_size = at_most_one_buffer_.size();
|
2024-03-04 18:07:17 +01:00
|
|
|
for (int i = base_index; i < buffer_size;) {
|
2019-07-09 19:02:01 +02:00
|
|
|
// Process a new at most one.
|
2024-03-04 18:07:17 +01:00
|
|
|
// It will be copied into buffer[local_start + 1, local_end].
|
|
|
|
|
// With its size at buffer[local_start].
|
2019-07-09 19:02:01 +02:00
|
|
|
const int local_start = local_end;
|
2023-12-04 15:06:08 +01:00
|
|
|
|
|
|
|
|
// Update the iterator now. Even if the current at_most_one is reduced away,
|
|
|
|
|
// local_start will still point to the next one, or to the end of the
|
|
|
|
|
// buffer.
|
|
|
|
|
if (i == at_most_one_iterator_) {
|
|
|
|
|
at_most_one_iterator_ = local_start;
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-04 18:07:17 +01:00
|
|
|
// We have an at_most_one starting at i, and we increment i to the next one.
|
|
|
|
|
const absl::Span<const Literal> initial_amo = AtMostOne(i);
|
|
|
|
|
i += initial_amo.size() + 1;
|
|
|
|
|
|
|
|
|
|
// Reserve space for size.
|
|
|
|
|
local_end++;
|
2019-07-22 11:03:53 -07:00
|
|
|
bool set_all_left_to_false = false;
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : initial_amo) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsFalse(l)) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_removed_[l]) continue;
|
2019-07-22 11:03:53 -07:00
|
|
|
if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
|
|
|
|
|
set_all_left_to_false = true;
|
2019-07-09 19:02:01 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
at_most_one_buffer_[local_end++] = RepresentativeOf(l);
|
|
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
at_most_one_buffer_[local_start] =
|
|
|
|
|
Literal(LiteralIndex(local_end - local_start - 1));
|
2019-07-09 19:02:01 +02:00
|
|
|
|
|
|
|
|
// Deal with duplicates.
|
|
|
|
|
// Any duplicate in an "at most one" must be false.
|
2021-04-08 18:26:14 +02:00
|
|
|
bool some_duplicates = false;
|
|
|
|
|
if (!set_all_left_to_false) {
|
2024-03-04 18:07:17 +01:00
|
|
|
// Sort the copied amo.
|
|
|
|
|
// We only want to expose a const AtMostOne() so we sort directly here.
|
|
|
|
|
Literal* pt = &at_most_one_buffer_[local_start + 1];
|
|
|
|
|
std::sort(pt, pt + AtMostOne(local_start).size());
|
|
|
|
|
|
2021-04-08 18:26:14 +02:00
|
|
|
LiteralIndex previous = kNoLiteralIndex;
|
|
|
|
|
bool remove_previous = false;
|
2024-03-04 18:07:17 +01:00
|
|
|
int new_local_end = local_start + 1;
|
|
|
|
|
for (const Literal l : AtMostOne(local_start)) {
|
2021-04-08 18:26:14 +02:00
|
|
|
if (l.Index() == previous) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsTrue(l)) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
if (!assignment.LiteralIsFalse(l)) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!FixLiteral(l.Negated())) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
2021-04-08 18:26:14 +02:00
|
|
|
remove_previous = true;
|
|
|
|
|
some_duplicates = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We need to pay attention to triplet or more of equal elements, so
|
|
|
|
|
// it is why we need this boolean and can't just remove it right away.
|
|
|
|
|
if (remove_previous) {
|
2019-07-09 19:02:01 +02:00
|
|
|
--new_local_end;
|
2021-04-08 18:26:14 +02:00
|
|
|
remove_previous = false;
|
|
|
|
|
}
|
|
|
|
|
previous = l.Index();
|
|
|
|
|
at_most_one_buffer_[new_local_end++] = l;
|
|
|
|
|
}
|
|
|
|
|
if (remove_previous) --new_local_end;
|
2024-03-04 18:07:17 +01:00
|
|
|
|
|
|
|
|
// Update local end and the amo size.
|
2021-04-08 18:26:14 +02:00
|
|
|
local_end = new_local_end;
|
2024-03-04 18:07:17 +01:00
|
|
|
at_most_one_buffer_[local_start] =
|
|
|
|
|
Literal(LiteralIndex(local_end - local_start - 1));
|
2021-04-08 18:26:14 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there was some duplicates, we need to rescan to see if a literal
|
|
|
|
|
// didn't become true because its negation was appearing twice!
|
|
|
|
|
if (some_duplicates) {
|
2024-03-04 18:07:17 +01:00
|
|
|
int new_local_end = local_start + 1;
|
|
|
|
|
for (const Literal l : AtMostOne(local_start)) {
|
2021-04-08 18:26:14 +02:00
|
|
|
if (assignment.LiteralIsFalse(l)) continue;
|
|
|
|
|
if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
|
|
|
|
|
set_all_left_to_false = true;
|
2019-07-09 19:02:01 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
at_most_one_buffer_[new_local_end++] = l;
|
|
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
|
|
|
|
|
// Update local end and the amo size.
|
2019-07-09 19:02:01 +02:00
|
|
|
local_end = new_local_end;
|
2024-03-04 18:07:17 +01:00
|
|
|
at_most_one_buffer_[local_start] =
|
|
|
|
|
Literal(LiteralIndex(local_end - local_start - 1));
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
|
2021-04-08 18:26:14 +02:00
|
|
|
// Deal with all false.
|
|
|
|
|
if (set_all_left_to_false) {
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(local_start)) {
|
2021-04-08 18:26:14 +02:00
|
|
|
if (assignment.LiteralIsFalse(l)) continue;
|
|
|
|
|
if (assignment.LiteralIsTrue(l)) return false;
|
|
|
|
|
if (!FixLiteral(l.Negated())) return false;
|
|
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
|
|
|
|
|
// Erase this at_most_one.
|
2021-04-08 18:26:14 +02:00
|
|
|
local_end = local_start;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-22 11:03:53 -07:00
|
|
|
// Create a Span<> to simplify the code below.
|
2024-03-04 18:07:17 +01:00
|
|
|
const absl::Span<const Literal> at_most_one = AtMostOne(local_start);
|
2019-07-09 19:02:01 +02:00
|
|
|
|
|
|
|
|
// We expand small sizes into implications.
|
|
|
|
|
// TODO(user): Investigate what the best threshold is.
|
2023-12-04 15:06:08 +01:00
|
|
|
if (at_most_one.size() <= std::max(2, at_most_one_max_expansion_size_)) {
|
2019-07-09 19:02:01 +02:00
|
|
|
for (const Literal a : at_most_one) {
|
|
|
|
|
for (const Literal b : at_most_one) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (a == b) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
implications_[a].push_back(b.Negated());
|
2024-02-15 08:46:38 +01:00
|
|
|
implies_something_.Set(a);
|
2023-12-13 16:14:35 +01:00
|
|
|
NotifyPossibleDuplicate(a);
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
num_implications_ += at_most_one.size() * (at_most_one.size() - 1);
|
|
|
|
|
|
|
|
|
|
// This will erase the at_most_one from the buffer.
|
|
|
|
|
local_end = local_start;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Index the new at most one.
|
|
|
|
|
for (const Literal l : at_most_one) {
|
|
|
|
|
if (l.Index() >= at_most_ones_.size()) {
|
|
|
|
|
at_most_ones_.resize(l.Index().value() + 1);
|
|
|
|
|
}
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_redundant_[l]);
|
2023-10-12 10:06:27 +02:00
|
|
|
at_most_ones_[l].push_back(local_start);
|
2024-02-15 08:46:38 +01:00
|
|
|
implies_something_.Set(l);
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2019-07-09 19:02:01 +02:00
|
|
|
|
|
|
|
|
at_most_one_buffer_.resize(local_end);
|
|
|
|
|
return true;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2024-11-28 15:34:01 +01:00
|
|
|
bool BinaryImplicationGraph::Propagate(Trail* trail) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2014-11-07 14:30:26 +00:00
|
|
|
|
2024-11-28 15:34:01 +01:00
|
|
|
if (IsEmpty()) {
|
|
|
|
|
propagation_trail_index_ = trail->Index();
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
trail->SetCurrentPropagatorId(propagator_id_);
|
|
|
|
|
|
2023-10-18 15:47:37 +02:00
|
|
|
const auto assignment = AssignmentView(trail->Assignment());
|
2024-11-28 15:34:01 +01:00
|
|
|
const auto implies_something = implies_something_.view();
|
|
|
|
|
auto* implications = implications_.data();
|
2014-01-27 15:05:30 +00:00
|
|
|
|
2024-11-28 15:34:01 +01:00
|
|
|
while (propagation_trail_index_ < trail->Index()) {
|
|
|
|
|
const Literal true_literal = (*trail)[propagation_trail_index_++];
|
|
|
|
|
DCHECK(assignment.LiteralIsTrue(true_literal));
|
|
|
|
|
if (!implies_something[true_literal]) continue;
|
|
|
|
|
|
|
|
|
|
// Note(user): This update is not exactly correct because in case of
|
|
|
|
|
// conflict we don't inspect that much clauses. But doing ++num_inspections_
|
|
|
|
|
// inside the loop does slow down the code by a few percent.
|
|
|
|
|
const absl::Span<const Literal> implied =
|
|
|
|
|
implications[true_literal.Index().value()];
|
|
|
|
|
num_inspections_ += implied.size();
|
|
|
|
|
for (const Literal literal : implied) {
|
|
|
|
|
if (assignment.LiteralIsTrue(literal)) {
|
|
|
|
|
// Note(user): I tried to update the reason here if the literal was
|
|
|
|
|
// enqueued after the true_literal on the trail. This property is
|
|
|
|
|
// important for ComputeFirstUIPConflict() to work since it needs the
|
|
|
|
|
// trail order to be a topological order for the deduction graph.
|
|
|
|
|
// But the performance was not too good...
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
++num_propagations_;
|
|
|
|
|
if (assignment.LiteralIsFalse(literal)) {
|
|
|
|
|
// Conflict.
|
|
|
|
|
*(trail->MutableConflict()) = {true_literal.Negated(), literal};
|
|
|
|
|
return false;
|
|
|
|
|
} else {
|
|
|
|
|
// Propagation.
|
|
|
|
|
reasons_[trail->Index()] = true_literal.Negated();
|
|
|
|
|
trail->FastEnqueue(literal);
|
|
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
2019-07-09 19:02:01 +02:00
|
|
|
|
2024-11-28 15:34:01 +01:00
|
|
|
// Propagate the at_most_one constraints.
|
|
|
|
|
if (true_literal.Index() < at_most_ones_.size()) {
|
|
|
|
|
for (const int start : at_most_ones_[true_literal]) {
|
|
|
|
|
bool seen = false;
|
|
|
|
|
for (const Literal literal : AtMostOne(start)) {
|
|
|
|
|
++num_inspections_;
|
|
|
|
|
if (literal == true_literal) {
|
|
|
|
|
if (DEBUG_MODE) {
|
|
|
|
|
DCHECK(!seen);
|
|
|
|
|
seen = true;
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if (assignment.LiteralIsFalse(literal)) continue;
|
|
|
|
|
|
|
|
|
|
++num_propagations_;
|
|
|
|
|
if (assignment.LiteralIsTrue(literal)) {
|
|
|
|
|
// Conflict.
|
|
|
|
|
*(trail->MutableConflict()) = {true_literal.Negated(),
|
|
|
|
|
literal.Negated()};
|
|
|
|
|
return false;
|
|
|
|
|
} else {
|
|
|
|
|
// Propagation.
|
|
|
|
|
reasons_[trail->Index()] = true_literal.Negated();
|
|
|
|
|
trail->FastEnqueue(literal.Negated());
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-01-27 15:05:30 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
absl::Span<const Literal> BinaryImplicationGraph::Reason(
|
2024-09-03 18:18:42 +02:00
|
|
|
const Trail& /*trail*/, int trail_index, int64_t /*conflict_id*/) const {
|
2020-10-22 23:36:58 +02:00
|
|
|
return {&reasons_[trail_index], 1};
|
2015-10-23 13:45:43 +02:00
|
|
|
}
|
|
|
|
|
|
2014-03-12 17:30:11 +00:00
|
|
|
// Here, we remove all the literal whose negation are implied by the negation of
|
|
|
|
|
// the 1-UIP literal (which always appear first in the given conflict). Note
|
|
|
|
|
// that this algorithm is "optimal" in the sense that it leads to a minimized
|
|
|
|
|
// conflict with a backjump level as low as possible. However, not all possible
|
|
|
|
|
// literals are removed.
|
2019-07-09 19:02:01 +02:00
|
|
|
//
|
|
|
|
|
// TODO(user): Also consider at most one?
|
2014-03-12 17:30:11 +00:00
|
|
|
void BinaryImplicationGraph::MinimizeConflictWithReachability(
|
2020-10-28 13:42:36 +01:00
|
|
|
std::vector<Literal>* conflict) {
|
2014-03-12 17:30:11 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
dfs_stack_.clear();
|
|
|
|
|
|
|
|
|
|
// Compute the reachability from the literal "not(conflict->front())" using
|
|
|
|
|
// an iterative dfs.
|
|
|
|
|
const LiteralIndex root_literal_index = conflict->front().NegatedIndex();
|
|
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
is_marked_.Set(root_literal_index);
|
|
|
|
|
|
|
|
|
|
// TODO(user): This sounds like a good idea, but somehow it seems better not
|
|
|
|
|
// to do that even though it is almost for free. Investigate more.
|
|
|
|
|
//
|
|
|
|
|
// The idea here is that since we already compute the reachability from the
|
|
|
|
|
// root literal, we can use this computation to remove any implication
|
|
|
|
|
// root_literal => b if there is already root_literal => a and b is reachable
|
|
|
|
|
// from a.
|
|
|
|
|
const bool also_prune_direct_implication_list = false;
|
|
|
|
|
|
|
|
|
|
// We treat the direct implications differently so we can also remove the
|
|
|
|
|
// redundant implications from this list at the same time.
|
2020-10-28 13:42:36 +01:00
|
|
|
auto& direct_implications = implications_[root_literal_index];
|
2014-03-12 17:30:11 +00:00
|
|
|
for (const Literal l : direct_implications) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_marked_[l]) continue;
|
2014-03-12 17:30:11 +00:00
|
|
|
dfs_stack_.push_back(l);
|
|
|
|
|
while (!dfs_stack_.empty()) {
|
|
|
|
|
const LiteralIndex index = dfs_stack_.back().Index();
|
|
|
|
|
dfs_stack_.pop_back();
|
|
|
|
|
if (!is_marked_[index]) {
|
|
|
|
|
is_marked_.Set(index);
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal implied : implications_[index]) {
|
|
|
|
|
if (!is_marked_[implied]) dfs_stack_.push_back(implied);
|
2014-03-12 17:30:11 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The "trick" is to unmark 'l'. This way, if we explore it twice, it means
|
|
|
|
|
// that this l is reachable from some other 'l' from the direct implication
|
|
|
|
|
// list. Remarks:
|
|
|
|
|
// - We don't loose too much complexity when this happen since a literal
|
|
|
|
|
// can be unmarked only once, so in the worst case we loop twice over its
|
|
|
|
|
// children. Moreover, this literal will be pruned for later calls.
|
|
|
|
|
// - This is correct, i.e. we can't prune too many literals because of a
|
|
|
|
|
// strongly connected component. Proof by contradiction: If we take the
|
|
|
|
|
// first (in direct_implications) literal from a removed SCC, it must
|
|
|
|
|
// have marked all the others. But because they are marked, they will not
|
|
|
|
|
// be explored again and so can't mark the first literal.
|
|
|
|
|
if (also_prune_direct_implication_list) {
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Clear(l);
|
2014-03-12 17:30:11 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Now we can prune the direct implications list and make sure are the
|
|
|
|
|
// literals there are marked.
|
|
|
|
|
if (also_prune_direct_implication_list) {
|
|
|
|
|
int new_size = 0;
|
|
|
|
|
for (const Literal l : direct_implications) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_marked_[l]) {
|
|
|
|
|
is_marked_.Set(l);
|
2014-03-12 17:30:11 +00:00
|
|
|
direct_implications[new_size] = l;
|
|
|
|
|
++new_size;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (new_size < direct_implications.size()) {
|
|
|
|
|
num_redundant_implications_ += direct_implications.size() - new_size;
|
|
|
|
|
direct_implications.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
RemoveRedundantLiterals(conflict);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Same as MinimizeConflictWithReachability() but also mark (in the given
|
|
|
|
|
// SparseBitset) the reachable literal already assigned to false. These literals
|
|
|
|
|
// will be implied if the 1-UIP literal is assigned to false, and the classic
|
|
|
|
|
// minimization algorithm can take advantage of that.
|
|
|
|
|
void BinaryImplicationGraph::MinimizeConflictFirst(
|
2020-10-28 13:42:36 +01:00
|
|
|
const Trail& trail, std::vector<Literal>* conflict,
|
|
|
|
|
SparseBitset<BooleanVariable>* marked) {
|
2014-03-12 17:30:11 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!conflict->empty());
|
2014-03-12 17:30:11 +00:00
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
2019-07-09 19:02:01 +02:00
|
|
|
MarkDescendants(conflict->front().Negated());
|
|
|
|
|
for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
|
2024-01-05 17:28:07 +01:00
|
|
|
// TODO(user): if this is false, then we actually have a conflict of size 2.
|
|
|
|
|
// This can only happen if the binary clause was not propagated properly
|
2024-09-30 17:28:08 +02:00
|
|
|
// if for instance we do chronological bactracking without re-enqueuing the
|
2024-01-05 17:28:07 +01:00
|
|
|
// consequence of a binary clause.
|
|
|
|
|
if (trail.Assignment().LiteralIsTrue(Literal(i))) {
|
2019-07-09 19:02:01 +02:00
|
|
|
marked->Set(Literal(i).Variable());
|
2014-03-12 17:30:11 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
RemoveRedundantLiterals(conflict);
|
|
|
|
|
}
|
|
|
|
|
|
2015-02-03 10:47:21 +00:00
|
|
|
// Same as MinimizeConflictFirst() but take advantage of this reachability
|
|
|
|
|
// computation to remove redundant implication in the implication list of the
|
|
|
|
|
// first UIP conflict.
|
|
|
|
|
void BinaryImplicationGraph::MinimizeConflictFirstWithTransitiveReduction(
|
2023-12-04 15:06:08 +01:00
|
|
|
const Trail& /*trail*/, std::vector<Literal>* conflict,
|
2022-04-06 17:33:00 +02:00
|
|
|
absl::BitGenRef random) {
|
2015-02-03 10:47:21 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
const LiteralIndex root_literal_index = conflict->front().NegatedIndex();
|
|
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
is_marked_.Set(root_literal_index);
|
|
|
|
|
|
|
|
|
|
int new_size = 0;
|
2020-10-28 13:42:36 +01:00
|
|
|
auto& direct_implications = implications_[root_literal_index];
|
2015-02-03 10:47:21 +00:00
|
|
|
|
|
|
|
|
// The randomization allow to find more redundant implication since to find
|
|
|
|
|
// a => b and remove b, a must be before b in direct_implications. Note that
|
|
|
|
|
// a std::reverse() could work too. But randomization seems to work better.
|
|
|
|
|
// Probably because it has other impact on the search tree.
|
2020-12-18 10:16:56 +01:00
|
|
|
std::shuffle(direct_implications.begin(), direct_implications.end(), random);
|
2015-02-03 10:47:21 +00:00
|
|
|
dfs_stack_.clear();
|
|
|
|
|
for (const Literal l : direct_implications) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_marked_[l]) {
|
2015-02-03 10:47:21 +00:00
|
|
|
// The literal is already marked! so it must be implied by one of the
|
|
|
|
|
// previous literal in the direct_implications list. We can safely remove
|
|
|
|
|
// it.
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
direct_implications[new_size++] = l;
|
|
|
|
|
dfs_stack_.push_back(l);
|
|
|
|
|
while (!dfs_stack_.empty()) {
|
|
|
|
|
const LiteralIndex index = dfs_stack_.back().Index();
|
|
|
|
|
dfs_stack_.pop_back();
|
|
|
|
|
if (!is_marked_[index]) {
|
|
|
|
|
is_marked_.Set(index);
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal implied : implications_[index]) {
|
|
|
|
|
if (!is_marked_[implied]) dfs_stack_.push_back(implied);
|
2015-02-03 10:47:21 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (new_size < direct_implications.size()) {
|
|
|
|
|
num_redundant_implications_ += direct_implications.size() - new_size;
|
|
|
|
|
direct_implications.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
RemoveRedundantLiterals(conflict);
|
|
|
|
|
}
|
|
|
|
|
|
2014-03-12 17:30:11 +00:00
|
|
|
void BinaryImplicationGraph::RemoveRedundantLiterals(
|
2020-10-28 13:42:36 +01:00
|
|
|
std::vector<Literal>* conflict) {
|
2014-03-12 17:30:11 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
int new_index = 1;
|
|
|
|
|
for (int i = 1; i < conflict->size(); ++i) {
|
|
|
|
|
if (!is_marked_[(*conflict)[i].NegatedIndex()]) {
|
|
|
|
|
(*conflict)[new_index] = (*conflict)[i];
|
|
|
|
|
++new_index;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (new_index < conflict->size()) {
|
|
|
|
|
++num_minimization_;
|
|
|
|
|
num_literals_removed_ += conflict->size() - new_index;
|
|
|
|
|
conflict->resize(new_index);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-09 19:02:01 +02:00
|
|
|
// TODO(user): Also consider at most one?
|
2014-03-12 17:30:11 +00:00
|
|
|
void BinaryImplicationGraph::MinimizeConflictExperimental(
|
2020-10-28 13:42:36 +01:00
|
|
|
const Trail& trail, std::vector<Literal>* conflict) {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
|
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
2020-06-03 12:07:07 +02:00
|
|
|
is_simplified_.ClearAndResize(LiteralIndex(implications_.size()));
|
2023-10-15 18:08:33 +02:00
|
|
|
for (const Literal lit : *conflict) {
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Set(lit);
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Identify and remove the redundant literals from the given conflict.
|
|
|
|
|
// 1/ If a -> b then a can be removed from the conflict clause.
|
|
|
|
|
// This is because not b -> not a.
|
|
|
|
|
// 2/ a -> b can only happen if level(a) <= level(b).
|
|
|
|
|
// 3/ Because of 2/, cycles can appear only at the same level.
|
2020-06-03 12:07:07 +02:00
|
|
|
// The vector is_simplified_ is used to avoid removing all elements of a
|
2014-01-27 15:05:30 +00:00
|
|
|
// cycle. Note that this is not optimal in the sense that we may not remove
|
|
|
|
|
// a literal that can be removed.
|
|
|
|
|
//
|
2014-03-12 17:30:11 +00:00
|
|
|
// Note that there is no need to explore the unique literal of the highest
|
|
|
|
|
// decision level since it can't be removed. Because this is a conflict, such
|
|
|
|
|
// literal is always at position 0, so we start directly at 1.
|
|
|
|
|
int index = 1;
|
|
|
|
|
for (int i = 1; i < conflict->size(); ++i) {
|
2014-01-27 15:05:30 +00:00
|
|
|
const Literal lit = (*conflict)[i];
|
|
|
|
|
const int lit_level = trail.Info(lit.Variable()).level;
|
|
|
|
|
bool keep_literal = true;
|
2023-10-15 18:08:33 +02:00
|
|
|
for (const Literal implied : implications_[lit]) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_marked_[implied]) {
|
2014-01-27 15:05:30 +00:00
|
|
|
DCHECK_LE(lit_level, trail.Info(implied.Variable()).level);
|
|
|
|
|
if (lit_level == trail.Info(implied.Variable()).level &&
|
2023-10-12 10:06:27 +02:00
|
|
|
is_simplified_[implied]) {
|
2014-01-27 15:05:30 +00:00
|
|
|
continue;
|
2015-04-17 15:52:52 +02:00
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
keep_literal = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (keep_literal) {
|
|
|
|
|
(*conflict)[index] = lit;
|
|
|
|
|
++index;
|
|
|
|
|
} else {
|
2023-10-12 10:06:27 +02:00
|
|
|
is_simplified_.Set(lit);
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (index < conflict->size()) {
|
|
|
|
|
++num_minimization_;
|
|
|
|
|
num_literals_removed_ += conflict->size() - index;
|
|
|
|
|
conflict->erase(conflict->begin() + index, conflict->end());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
void BinaryImplicationGraph::RemoveFixedVariables() {
|
2014-01-27 15:05:30 +00:00
|
|
|
SCOPED_TIME_STAT(&stats_);
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
|
2021-04-08 18:26:14 +02:00
|
|
|
if (IsEmpty()) return;
|
2020-05-06 18:22:10 +02:00
|
|
|
|
|
|
|
|
// Nothing to do if nothing changed since last call.
|
2020-06-03 12:07:07 +02:00
|
|
|
const int new_num_fixed = trail_->Index();
|
2021-04-08 18:26:14 +02:00
|
|
|
DCHECK_EQ(propagation_trail_index_, new_num_fixed);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (num_processed_fixed_variables_ == new_num_fixed) return;
|
2020-05-06 18:22:10 +02:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2014-01-27 15:05:30 +00:00
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
2020-05-06 18:22:10 +02:00
|
|
|
for (; num_processed_fixed_variables_ < new_num_fixed;
|
|
|
|
|
++num_processed_fixed_variables_) {
|
|
|
|
|
const Literal true_literal = (*trail_)[num_processed_fixed_variables_];
|
|
|
|
|
if (DEBUG_MODE) {
|
|
|
|
|
// The code assumes that everything is already propagated.
|
|
|
|
|
// Otherwise we will remove implications that didn't propagate yet!
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal lit : implications_[true_literal]) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(trail_->Assignment().LiteralIsTrue(lit));
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-02-03 10:47:21 +00:00
|
|
|
// If b is true and a -> b then because not b -> not a, all the
|
|
|
|
|
// implications list that contains b will be marked by this process.
|
2020-05-06 18:22:10 +02:00
|
|
|
// And the ones that contains not(b) should correspond to a false literal!
|
2015-02-03 10:47:21 +00:00
|
|
|
//
|
2020-06-03 12:07:07 +02:00
|
|
|
// TODO(user): This might not be true if we remove implication by
|
|
|
|
|
// transitive reduction and the process was aborted due to the computation
|
|
|
|
|
// limit. I think it will be good to maintain that invariant though,
|
|
|
|
|
// otherwise fixed literals might never be removed from these lists...
|
2020-05-06 18:22:10 +02:00
|
|
|
for (const Literal lit : implications_[true_literal.NegatedIndex()]) {
|
2023-10-15 18:08:33 +02:00
|
|
|
if (lit.NegatedIndex() < representative_of_.size() &&
|
|
|
|
|
representative_of_[lit.Negated()] != kNoLiteralIndex) {
|
|
|
|
|
// We mark its representative instead.
|
|
|
|
|
is_marked_.Set(representative_of_[lit.Negated()]);
|
|
|
|
|
} else {
|
|
|
|
|
is_marked_.Set(lit.NegatedIndex());
|
|
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
2023-10-12 10:06:27 +02:00
|
|
|
gtl::STLClearObject(&(implications_[true_literal]));
|
2018-04-11 13:00:30 +02:00
|
|
|
gtl::STLClearObject(&(implications_[true_literal.NegatedIndex()]));
|
2019-07-09 19:02:01 +02:00
|
|
|
|
|
|
|
|
if (true_literal.Index() < at_most_ones_.size()) {
|
2023-10-12 10:06:27 +02:00
|
|
|
gtl::STLClearObject(&(at_most_ones_[true_literal]));
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
if (true_literal.NegatedIndex() < at_most_ones_.size()) {
|
|
|
|
|
gtl::STLClearObject(&(at_most_ones_[true_literal.NegatedIndex()]));
|
|
|
|
|
}
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
2015-02-03 10:47:21 +00:00
|
|
|
for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
|
2020-10-28 13:42:36 +01:00
|
|
|
RemoveIf(&implications_[i], [&assignment](const Literal& lit) {
|
2017-10-16 11:20:54 +02:00
|
|
|
return assignment.LiteralIsTrue(lit);
|
|
|
|
|
});
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
|
2019-07-09 19:02:01 +02:00
|
|
|
// TODO(user): This might be a bit slow. Do not call all the time if needed,
|
|
|
|
|
// this shouldn't change the correctness of the code.
|
|
|
|
|
at_most_ones_.clear();
|
2020-10-22 23:36:58 +02:00
|
|
|
CleanUpAndAddAtMostOnes(/*base_index=*/0);
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(InvariantsAreOk());
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2019-10-18 14:39:25 +02:00
|
|
|
class SccGraph {
|
2020-10-22 23:36:58 +02:00
|
|
|
public:
|
2024-03-04 18:07:17 +01:00
|
|
|
using Implications =
|
2024-07-12 13:56:11 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, absl::InlinedVector<Literal, 6>>;
|
2024-03-04 18:07:17 +01:00
|
|
|
using AtMostOnes =
|
2024-07-12 13:56:11 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, absl::InlinedVector<int32_t, 6>>;
|
2020-10-22 23:36:58 +02:00
|
|
|
using SccFinder =
|
2021-03-04 18:26:01 +01:00
|
|
|
StronglyConnectedComponentsFinder<int32_t, SccGraph,
|
2024-01-12 16:31:18 +01:00
|
|
|
CompactVectorVector<int32_t, int32_t>>;
|
2020-10-21 00:21:54 +02:00
|
|
|
|
2024-03-04 18:07:17 +01:00
|
|
|
explicit SccGraph(SccFinder* finder, Implications* graph,
|
|
|
|
|
AtMostOnes* at_most_ones,
|
2020-10-28 13:42:36 +01:00
|
|
|
std::vector<Literal>* at_most_one_buffer)
|
2020-10-22 23:36:58 +02:00
|
|
|
: finder_(*finder),
|
|
|
|
|
implications_(*graph),
|
|
|
|
|
at_most_ones_(*at_most_ones),
|
2019-07-09 19:02:01 +02:00
|
|
|
at_most_one_buffer_(*at_most_one_buffer) {}
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2021-03-04 18:26:01 +01:00
|
|
|
const std::vector<int32_t>& operator[](int32_t node) const {
|
2018-11-05 16:24:47 +01:00
|
|
|
tmp_.clear();
|
2019-07-09 19:02:01 +02:00
|
|
|
for (const Literal l : implications_[LiteralIndex(node)]) {
|
2018-11-05 16:24:47 +01:00
|
|
|
tmp_.push_back(l.Index().value());
|
2019-10-18 14:39:25 +02:00
|
|
|
if (finder_.NodeIsInCurrentDfsPath(l.NegatedIndex().value())) {
|
|
|
|
|
to_fix_.push_back(l);
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2019-07-09 19:02:01 +02:00
|
|
|
if (node < at_most_ones_.size()) {
|
|
|
|
|
for (const int start : at_most_ones_[LiteralIndex(node)]) {
|
2019-10-18 14:39:25 +02:00
|
|
|
if (start >= at_most_one_already_explored_.size()) {
|
|
|
|
|
at_most_one_already_explored_.resize(start + 1, false);
|
|
|
|
|
previous_node_to_explore_at_most_one_.resize(start + 1);
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-06 17:33:00 +02:00
|
|
|
// In the presence of at_most_ones_ constraints, expanding them
|
|
|
|
|
// implicitly to implications in the SCC computation can result in a
|
2019-10-18 14:39:25 +02:00
|
|
|
// quadratic complexity rather than a linear one in term of the input
|
|
|
|
|
// data structure size. So this test here is critical on problem with
|
|
|
|
|
// large at_most ones like the "ivu06-big.mps.gz" where without it, the
|
|
|
|
|
// full FindStronglyConnectedComponents() take more than on hour instead
|
|
|
|
|
// of less than a second!
|
|
|
|
|
if (at_most_one_already_explored_[start]) {
|
|
|
|
|
// We never expand a node twice.
|
|
|
|
|
const int first_node = previous_node_to_explore_at_most_one_[start];
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_NE(node, first_node);
|
2019-10-18 14:39:25 +02:00
|
|
|
|
|
|
|
|
if (finder_.NodeIsInCurrentDfsPath(first_node)) {
|
|
|
|
|
// If the first node is not settled, then we do explore the
|
|
|
|
|
// at_most_one constraint again. In "Mixed-Integer-Programming:
|
|
|
|
|
// Analyzing 12 years of progress", Tobias Achterberg and Roland
|
|
|
|
|
// Wunderling explains that an at most one need to be looped over at
|
|
|
|
|
// most twice. I am not sure exactly how that works, so for now we
|
|
|
|
|
// are not fully linear, but on actual instances, we only rarely
|
|
|
|
|
// run into this case.
|
|
|
|
|
//
|
|
|
|
|
// Note that we change the previous node to explore at most one
|
|
|
|
|
// since the current node will be settled before the old ones.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): avoid looping more than twice on the same at most one
|
|
|
|
|
// constraints? Note that the second time we loop we have x => y =>
|
|
|
|
|
// not(x), so we can already detect that x must be false which we
|
|
|
|
|
// detect below.
|
|
|
|
|
previous_node_to_explore_at_most_one_[start] = node;
|
|
|
|
|
} else {
|
|
|
|
|
// The first node is already settled and so are all its child. Only
|
|
|
|
|
// not(first_node) might still need exploring.
|
2020-10-22 23:36:58 +02:00
|
|
|
tmp_.push_back(
|
|
|
|
|
Literal(LiteralIndex(first_node)).NegatedIndex().value());
|
2019-10-18 14:39:25 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
at_most_one_already_explored_[start] = true;
|
|
|
|
|
previous_node_to_explore_at_most_one_[start] = node;
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-04 18:07:17 +01:00
|
|
|
const absl::Span<const Literal> amo =
|
|
|
|
|
absl::MakeSpan(&at_most_one_buffer_[start + 1],
|
|
|
|
|
at_most_one_buffer_[start].Index().value());
|
|
|
|
|
for (const Literal l : amo) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (l.Index() == node) continue;
|
2019-07-09 19:02:01 +02:00
|
|
|
tmp_.push_back(l.NegatedIndex().value());
|
2019-10-18 14:39:25 +02:00
|
|
|
if (finder_.NodeIsInCurrentDfsPath(l.Index().value())) {
|
|
|
|
|
to_fix_.push_back(l.Negated());
|
|
|
|
|
}
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-06 18:22:10 +02:00
|
|
|
work_done_ += tmp_.size();
|
2018-11-05 16:24:47 +01:00
|
|
|
return tmp_;
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-18 14:39:25 +02:00
|
|
|
// All these literals where detected to be true during the SCC computation.
|
|
|
|
|
mutable std::vector<Literal> to_fix_;
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// For the deterministic time.
|
2021-03-04 18:26:01 +01:00
|
|
|
mutable int64_t work_done_ = 0;
|
2020-05-06 18:22:10 +02:00
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
private:
|
2020-10-28 13:42:36 +01:00
|
|
|
const SccFinder& finder_;
|
2024-03-04 18:07:17 +01:00
|
|
|
const Implications& implications_;
|
|
|
|
|
const AtMostOnes& at_most_ones_;
|
2020-10-28 13:42:36 +01:00
|
|
|
const std::vector<Literal>& at_most_one_buffer_;
|
2019-10-18 14:39:25 +02:00
|
|
|
|
2021-03-04 18:26:01 +01:00
|
|
|
mutable std::vector<int32_t> tmp_;
|
2019-10-18 14:39:25 +02:00
|
|
|
|
|
|
|
|
// Used to get a non-quadratic complexity in the presence of at most ones.
|
|
|
|
|
mutable std::vector<bool> at_most_one_already_explored_;
|
|
|
|
|
mutable std::vector<int> previous_node_to_explore_at_most_one_;
|
2018-11-05 16:24:47 +01:00
|
|
|
};
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
bool BinaryImplicationGraph::DetectEquivalences(bool log_info) {
|
|
|
|
|
// This was already called, and no new constraint where added. Note that new
|
2023-09-13 18:16:28 +02:00
|
|
|
// fixed variable cannot create new equivalence, only new binary clauses do.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (is_dag_) return true;
|
2020-05-06 18:22:10 +02:00
|
|
|
WallTimer wall_timer;
|
|
|
|
|
wall_timer.Start();
|
|
|
|
|
log_info |= VLOG_IS_ON(1);
|
|
|
|
|
|
|
|
|
|
// Lets remove all fixed variables first.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!Propagate(trail_)) return false;
|
2020-05-06 18:22:10 +02:00
|
|
|
RemoveFixedVariables();
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(InvariantsAreOk());
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// TODO(user): We could just do it directly though.
|
2021-03-04 18:26:01 +01:00
|
|
|
const int32_t size(implications_.size());
|
2024-01-12 16:31:18 +01:00
|
|
|
CompactVectorVector<int32_t, int32_t> scc;
|
|
|
|
|
scc.reserve(size);
|
2020-05-06 18:22:10 +02:00
|
|
|
double dtime = 0.0;
|
2019-10-18 14:39:25 +02:00
|
|
|
{
|
|
|
|
|
SccGraph::SccFinder finder;
|
|
|
|
|
SccGraph graph(&finder, &implications_, &at_most_ones_,
|
|
|
|
|
&at_most_one_buffer_);
|
|
|
|
|
finder.FindStronglyConnectedComponents(size, graph, &scc);
|
2020-05-06 18:22:10 +02:00
|
|
|
dtime += 4e-8 * graph.work_done_;
|
2019-10-18 14:39:25 +02:00
|
|
|
|
|
|
|
|
for (const Literal l : graph.to_fix_) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsFalse(l)) return false;
|
|
|
|
|
if (assignment.LiteralIsTrue(l)) continue;
|
|
|
|
|
if (!FixLiteral(l)) return false;
|
2019-10-18 14:39:25 +02:00
|
|
|
}
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// The old values will still be valid.
|
|
|
|
|
representative_of_.resize(size, kNoLiteralIndex);
|
2023-02-06 16:27:12 +01:00
|
|
|
is_redundant_.resize(size);
|
2020-05-06 18:22:10 +02:00
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
int num_equivalences = 0;
|
|
|
|
|
reverse_topological_order_.clear();
|
2024-01-12 16:31:18 +01:00
|
|
|
for (int index = 0; index < scc.size(); ++index) {
|
|
|
|
|
const absl::Span<int32_t> component = scc[index];
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// If one is fixed then all must be fixed. Note that the reason why the
|
|
|
|
|
// propagation didn't already do that and we don't always get fixed
|
|
|
|
|
// component of size 1 is because of the potential newly fixed literals
|
|
|
|
|
// above.
|
|
|
|
|
//
|
|
|
|
|
// In any case, all fixed literals are marked as redundant.
|
|
|
|
|
{
|
|
|
|
|
bool all_fixed = false;
|
|
|
|
|
bool all_true = false;
|
2021-03-04 18:26:01 +01:00
|
|
|
for (const int32_t i : component) {
|
2020-05-06 18:22:10 +02:00
|
|
|
const Literal l = Literal(LiteralIndex(i));
|
|
|
|
|
if (trail_->Assignment().LiteralIsAssigned(l)) {
|
|
|
|
|
all_fixed = true;
|
|
|
|
|
all_true = trail_->Assignment().LiteralIsTrue(l);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (all_fixed) {
|
2021-03-04 18:26:01 +01:00
|
|
|
for (const int32_t i : component) {
|
2020-05-06 18:22:10 +02:00
|
|
|
const Literal l = Literal(LiteralIndex(i));
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_redundant_[l]) {
|
2020-05-06 18:22:10 +02:00
|
|
|
++num_redundant_literals_;
|
2023-10-12 10:06:27 +02:00
|
|
|
is_redundant_.Set(l);
|
2024-01-02 13:38:08 +01:00
|
|
|
representative_of_[l] = l.Index();
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
const Literal to_fix = all_true ? l : l.Negated();
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsFalse(to_fix)) return false;
|
|
|
|
|
if (assignment.LiteralIsTrue(to_fix)) continue;
|
|
|
|
|
if (!FixLiteral(l)) return false;
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Next component.
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
// We ignore variable that appear in no constraints.
|
|
|
|
|
if (component.size() == 1 && is_removed_[LiteralIndex(component[0])]) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
// We always take the smallest literal index (which also corresponds to the
|
|
|
|
|
// smallest BooleanVariable index) as a representative. This make sure that
|
|
|
|
|
// the representative of a literal l and the one of not(l) will be the
|
|
|
|
|
// negation of each other. There is also reason to think that it is
|
|
|
|
|
// heuristically better to use a BooleanVariable that was created first.
|
|
|
|
|
std::sort(component.begin(), component.end());
|
|
|
|
|
const LiteralIndex representative(component[0]);
|
|
|
|
|
reverse_topological_order_.push_back(representative);
|
2020-05-06 18:22:10 +02:00
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
if (component.size() == 1) {
|
2020-05-06 18:22:10 +02:00
|
|
|
// Note that because we process list in reverse topological order, this
|
|
|
|
|
// is only needed if there is any equivalence before this point.
|
|
|
|
|
if (num_equivalences > 0) {
|
2020-10-28 13:42:36 +01:00
|
|
|
auto& representative_list = implications_[representative];
|
|
|
|
|
for (Literal& ref : representative_list) {
|
2023-10-12 10:06:27 +02:00
|
|
|
const LiteralIndex rep = representative_of_[ref];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (rep == representative) continue;
|
|
|
|
|
if (rep == kNoLiteralIndex) continue;
|
2020-05-06 18:22:10 +02:00
|
|
|
ref = Literal(rep);
|
|
|
|
|
}
|
|
|
|
|
gtl::STLSortAndRemoveDuplicates(&representative_list);
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// Sets the representative.
|
2018-11-05 16:24:47 +01:00
|
|
|
for (int i = 1; i < component.size(); ++i) {
|
|
|
|
|
const Literal literal = Literal(LiteralIndex(component[i]));
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_redundant_[literal]) {
|
2020-05-06 18:22:10 +02:00
|
|
|
++num_redundant_literals_;
|
2023-10-12 10:06:27 +02:00
|
|
|
is_redundant_.Set(literal);
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2023-10-12 10:06:27 +02:00
|
|
|
representative_of_[literal] = representative;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
|
|
|
|
// Detect if x <=> not(x) which means unsat. Note that we relly on the
|
|
|
|
|
// fact that when sorted, they will both be consecutive in the list.
|
|
|
|
|
if (Literal(LiteralIndex(component[i - 1])).Negated() == literal) {
|
2020-05-06 18:22:10 +02:00
|
|
|
LOG_IF(INFO, log_info) << "Trivially UNSAT in DetectEquivalences()";
|
2018-11-05 16:24:47 +01:00
|
|
|
return false;
|
|
|
|
|
}
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// Merge all the lists in implications_[representative].
|
|
|
|
|
// Note that we do not want representative in its own list.
|
2020-10-28 13:42:36 +01:00
|
|
|
auto& representative_list = implications_[representative];
|
2020-05-13 11:30:15 +02:00
|
|
|
int new_size = 0;
|
|
|
|
|
for (const Literal l : representative_list) {
|
|
|
|
|
const Literal rep = RepresentativeOf(l);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (rep.Index() == representative) continue;
|
2020-05-13 11:30:15 +02:00
|
|
|
representative_list[new_size++] = rep;
|
2020-05-06 18:22:10 +02:00
|
|
|
}
|
2020-05-13 11:30:15 +02:00
|
|
|
representative_list.resize(new_size);
|
2020-05-06 18:22:10 +02:00
|
|
|
for (int i = 1; i < component.size(); ++i) {
|
|
|
|
|
const Literal literal = Literal(LiteralIndex(component[i]));
|
2023-10-12 10:06:27 +02:00
|
|
|
auto& ref = implications_[literal];
|
2018-11-05 16:24:47 +01:00
|
|
|
for (const Literal l : ref) {
|
2020-05-06 18:22:10 +02:00
|
|
|
const Literal rep = RepresentativeOf(l);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (rep.Index() != representative) representative_list.push_back(rep);
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add representative <=> literal.
|
2020-05-06 18:22:10 +02:00
|
|
|
//
|
|
|
|
|
// Remark: this relation do not need to be added to a DRAT proof since
|
|
|
|
|
// the redundant variables should never be used again for a pure SAT
|
|
|
|
|
// problem.
|
2018-11-05 16:24:47 +01:00
|
|
|
representative_list.push_back(literal);
|
|
|
|
|
ref.clear();
|
|
|
|
|
ref.push_back(Literal(representative));
|
|
|
|
|
}
|
|
|
|
|
gtl::STLSortAndRemoveDuplicates(&representative_list);
|
2020-05-06 18:22:10 +02:00
|
|
|
num_equivalences += component.size() - 1;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2019-07-22 11:03:53 -07:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
is_dag_ = true;
|
|
|
|
|
if (num_equivalences != 0) {
|
|
|
|
|
// Remap all at most ones. Remove fixed variables, process duplicates. Note
|
|
|
|
|
// that this might result in more implications when we expand small at most
|
|
|
|
|
// one.
|
|
|
|
|
at_most_ones_.clear();
|
2024-12-16 14:20:09 +01:00
|
|
|
int saved_trail_index = propagation_trail_index_;
|
2020-10-22 23:36:58 +02:00
|
|
|
CleanUpAndAddAtMostOnes(/*base_index=*/0);
|
2024-12-16 14:20:09 +01:00
|
|
|
// This might have run the propagation on a few variables without taking
|
|
|
|
|
// into account the AMOs. Propagate again.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Maybe a better alternative is to not propagate when we fix
|
|
|
|
|
// variables inside CleanUpAndAddAtMostOnes().
|
|
|
|
|
if (propagation_trail_index_ != saved_trail_index) {
|
|
|
|
|
propagation_trail_index_ = saved_trail_index;
|
|
|
|
|
Propagate(trail_);
|
|
|
|
|
}
|
2020-05-06 18:22:10 +02:00
|
|
|
|
|
|
|
|
num_implications_ = 0;
|
|
|
|
|
for (LiteralIndex i(0); i < size; ++i) {
|
|
|
|
|
num_implications_ += implications_[i].size();
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2020-05-06 18:22:10 +02:00
|
|
|
dtime += 2e-8 * num_implications_;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
time_limit_->AdvanceDeterministicTime(dtime);
|
2023-11-16 19:46:56 +01:00
|
|
|
const int num_fixed_during_scc =
|
|
|
|
|
trail_->Index() - num_processed_fixed_variables_;
|
|
|
|
|
RemoveFixedVariables();
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(InvariantsAreOk());
|
2020-10-22 23:36:58 +02:00
|
|
|
LOG_IF(INFO, log_info) << "SCC. " << num_equivalences
|
|
|
|
|
<< " redundant equivalent literals. "
|
|
|
|
|
<< num_fixed_during_scc << " fixed. "
|
|
|
|
|
<< num_implications_ << " implications left. "
|
|
|
|
|
<< implications_.size() << " literals."
|
|
|
|
|
<< " size of at_most_one buffer = "
|
|
|
|
|
<< at_most_one_buffer_.size() << "."
|
|
|
|
|
<< " dtime: " << dtime
|
|
|
|
|
<< " wtime: " << wall_timer.Get();
|
2018-11-05 16:24:47 +01:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
// Note that as a side effect this also do a full "failed literal probing"
|
|
|
|
|
// using the binary implication graph only.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Track which literal have new implications, and only process
|
2024-10-16 14:12:38 +02:00
|
|
|
// the antecedents of these.
|
2020-05-06 18:22:10 +02:00
|
|
|
bool BinaryImplicationGraph::ComputeTransitiveReduction(bool log_info) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
|
2024-10-16 14:12:38 +02:00
|
|
|
if (time_limit_->LimitReached()) return true;
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!DetectEquivalences()) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
// TODO(user): the situation with fixed variable is not really "clean".
|
|
|
|
|
// Simplify the code so we are sure we don't run into issue or have to deal
|
|
|
|
|
// with any of that here.
|
2024-10-16 14:12:38 +02:00
|
|
|
if (time_limit_->LimitReached()) return true;
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!Propagate(trail_)) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
RemoveFixedVariables();
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(InvariantsAreOk());
|
2024-10-16 14:12:38 +02:00
|
|
|
if (time_limit_->LimitReached()) return true;
|
2020-06-03 12:07:07 +02:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
log_info |= VLOG_IS_ON(1);
|
|
|
|
|
WallTimer wall_timer;
|
|
|
|
|
wall_timer.Start();
|
2019-07-22 11:03:53 -07:00
|
|
|
|
2021-03-04 18:26:01 +01:00
|
|
|
int64_t num_fixed = 0;
|
|
|
|
|
int64_t num_new_redundant_implications = 0;
|
2020-05-06 18:22:10 +02:00
|
|
|
bool aborted = false;
|
2023-10-15 18:08:33 +02:00
|
|
|
tmp_removed_.clear();
|
2018-11-05 16:24:47 +01:00
|
|
|
work_done_in_mark_descendants_ = 0;
|
2020-06-03 12:07:07 +02:00
|
|
|
int marked_index = 0;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
|
|
|
|
// For each node we do a graph traversal and only keep the literals
|
|
|
|
|
// at maximum distance 1. This only works because we have a DAG when ignoring
|
2020-05-06 18:22:10 +02:00
|
|
|
// the "redundant" literal marked by DetectEquivalences(). Note that we also
|
|
|
|
|
// need no duplicates in the implications list for correctness which is also
|
|
|
|
|
// guaranteed by DetectEquivalences().
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): We should be able to reuse some propagation like it is done for
|
|
|
|
|
// tree-look. Once a node is processed, we just need to process a node that
|
|
|
|
|
// implies it. Test if we can make this faster. Alternatively, only clear
|
|
|
|
|
// a part of is_marked_ (after the first child of root in reverse topo order).
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Can we exploit the fact that the implication graph is a
|
|
|
|
|
// skew-symmetric graph (isomorphic to its transposed) so that we do less
|
2023-10-15 18:08:33 +02:00
|
|
|
// work?
|
2018-11-05 16:24:47 +01:00
|
|
|
const LiteralIndex size(implications_.size());
|
2020-05-06 18:22:10 +02:00
|
|
|
LiteralIndex previous = kNoLiteralIndex;
|
|
|
|
|
for (const LiteralIndex root : reverse_topological_order_) {
|
2020-06-03 12:07:07 +02:00
|
|
|
// In most situation reverse_topological_order_ contains no redundant, fixed
|
|
|
|
|
// or removed variables. But the reverse_topological_order_ is only
|
|
|
|
|
// recomputed when new binary are added to the graph, not when new variable
|
|
|
|
|
// are fixed.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (is_redundant_[root]) continue;
|
|
|
|
|
if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
|
2020-05-06 18:22:10 +02:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
auto& direct_implications = implications_[root];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (direct_implications.empty()) continue;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// This is a "poor" version of the tree look stuff, but it does show good
|
|
|
|
|
// improvement. If we just processed one of the child of root, we don't
|
|
|
|
|
// need to re-explore it.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Another optim we can do is that we never need to expand
|
|
|
|
|
// any node with a reverse topo order smaller or equal to the min of the
|
|
|
|
|
// ones in this list.
|
|
|
|
|
bool clear_previous_reachability = true;
|
|
|
|
|
for (const Literal direct_child : direct_implications) {
|
|
|
|
|
if (direct_child.Index() == previous) {
|
|
|
|
|
clear_previous_reachability = false;
|
|
|
|
|
is_marked_.Clear(previous);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-03 12:07:07 +02:00
|
|
|
if (clear_previous_reachability) {
|
|
|
|
|
is_marked_.ClearAndResize(size);
|
|
|
|
|
marked_index = 0;
|
|
|
|
|
}
|
2020-05-06 18:22:10 +02:00
|
|
|
previous = root;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
for (const Literal direct_child : direct_implications) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_redundant_[direct_child]) continue;
|
|
|
|
|
if (is_marked_[direct_child]) continue;
|
2019-07-22 11:03:53 -07:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// This is a corner case where because of equivalent literal, root
|
|
|
|
|
// appear in implications_[root], we will remove it below.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (direct_child.Index() == root) continue;
|
2019-07-22 11:03:53 -07:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
// When this happens, then root must be false, we handle this just after
|
|
|
|
|
// the loop.
|
|
|
|
|
if (direct_child.NegatedIndex() == root) {
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Set(direct_child);
|
2020-05-06 18:22:10 +02:00
|
|
|
break;
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
MarkDescendants(direct_child);
|
|
|
|
|
|
|
|
|
|
// We have a DAG, so direct_child could only be marked first.
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Clear(direct_child);
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_marked_[root])
|
2020-05-06 18:22:10 +02:00
|
|
|
<< "DetectEquivalences() should have removed cycles!";
|
2020-06-03 12:07:07 +02:00
|
|
|
is_marked_.Set(root);
|
2019-07-22 11:03:53 -07:00
|
|
|
|
2023-10-15 18:08:33 +02:00
|
|
|
// Also mark all the ones reachable through the root AMOs.
|
|
|
|
|
if (root < at_most_ones_.size()) {
|
2024-09-30 17:28:08 +02:00
|
|
|
auto is_marked = is_marked_.BitsetView();
|
2023-10-15 18:08:33 +02:00
|
|
|
for (const int start : at_most_ones_[root]) {
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2023-10-15 18:08:33 +02:00
|
|
|
if (l.Index() == root) continue;
|
2024-09-30 17:28:08 +02:00
|
|
|
if (!is_marked[l.Negated()] && !is_redundant_[l.Negated()]) {
|
|
|
|
|
is_marked_.SetUnsafe(is_marked, l.Negated());
|
2023-10-15 18:08:33 +02:00
|
|
|
MarkDescendants(l.Negated());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
// Failed literal probing. If both x and not(x) are marked then root must be
|
|
|
|
|
// false. Note that because we process "roots" in reverse topological order,
|
|
|
|
|
// we will fix the LCA of x and not(x) first.
|
2020-10-28 13:42:36 +01:00
|
|
|
const auto& marked_positions = is_marked_.PositionsSetAtLeastOnce();
|
2020-06-03 12:07:07 +02:00
|
|
|
for (; marked_index < marked_positions.size(); ++marked_index) {
|
|
|
|
|
const LiteralIndex i = marked_positions[marked_index];
|
|
|
|
|
if (is_marked_[Literal(i).NegatedIndex()]) {
|
|
|
|
|
// We tested that at the beginning.
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!trail_->Assignment().LiteralIsAssigned(Literal(root)));
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
// We propagate right away the binary implications so that we do not
|
2023-12-04 15:06:08 +01:00
|
|
|
// need to consider all antecedents of root in the transitive
|
2020-06-03 12:07:07 +02:00
|
|
|
// reduction.
|
|
|
|
|
++num_fixed;
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!FixLiteral(Literal(root).Negated())) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
break;
|
2019-07-22 11:03:53 -07:00
|
|
|
}
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
// Note that direct_implications will be cleared by
|
|
|
|
|
// RemoveFixedVariables() that will need to inspect it to completely
|
|
|
|
|
// remove Literal(root) from all lists.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
|
2020-06-03 12:07:07 +02:00
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
// Only keep the non-marked literal (and the redundant one which are never
|
2020-05-06 18:22:10 +02:00
|
|
|
// marked). We mark root to remove it in the corner case where it was
|
|
|
|
|
// there.
|
2018-11-05 16:24:47 +01:00
|
|
|
int new_size = 0;
|
|
|
|
|
for (const Literal l : direct_implications) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_marked_[l]) {
|
2018-11-05 16:24:47 +01:00
|
|
|
direct_implications[new_size++] = l;
|
2019-07-22 11:03:53 -07:00
|
|
|
} else {
|
2023-10-15 18:08:33 +02:00
|
|
|
tmp_removed_.push_back({Literal(root), l});
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_redundant_[l]);
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
const int diff = direct_implications.size() - new_size;
|
|
|
|
|
direct_implications.resize(new_size);
|
2018-10-31 16:18:18 +01:00
|
|
|
direct_implications.shrink_to_fit();
|
2020-05-06 18:22:10 +02:00
|
|
|
num_new_redundant_implications += diff;
|
2018-11-05 16:24:47 +01:00
|
|
|
num_implications_ -= diff;
|
|
|
|
|
|
|
|
|
|
// Abort if the computation involved is too big.
|
2020-05-06 18:22:10 +02:00
|
|
|
if (work_done_in_mark_descendants_ > 1e8) {
|
|
|
|
|
aborted = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2020-05-06 18:22:10 +02:00
|
|
|
is_marked_.ClearAndResize(size);
|
|
|
|
|
|
2023-10-15 18:08:33 +02:00
|
|
|
// If we aborted early, we might no longer have both a=>b and not(b)=>not(a).
|
2023-12-04 15:06:08 +01:00
|
|
|
// This is not desirable has some algo relies on this invariant. We fix this
|
|
|
|
|
// here.
|
2023-10-15 18:08:33 +02:00
|
|
|
if (aborted) {
|
|
|
|
|
absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> removed;
|
|
|
|
|
for (const auto [a, b] : tmp_removed_) {
|
|
|
|
|
removed.insert({a.Index(), b.Index()});
|
|
|
|
|
}
|
|
|
|
|
for (LiteralIndex i(0); i < implications_.size(); ++i) {
|
|
|
|
|
int new_size = 0;
|
|
|
|
|
const LiteralIndex negated_i = Literal(i).NegatedIndex();
|
|
|
|
|
auto& implication = implications_[i];
|
|
|
|
|
for (const Literal l : implication) {
|
|
|
|
|
if (removed.contains({l.NegatedIndex(), negated_i})) continue;
|
|
|
|
|
implication[new_size++] = l;
|
|
|
|
|
}
|
|
|
|
|
implication.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-12-04 15:06:08 +01:00
|
|
|
if (num_fixed > 0) {
|
|
|
|
|
RemoveFixedVariables();
|
|
|
|
|
}
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(InvariantsAreOk());
|
|
|
|
|
|
|
|
|
|
gtl::STLClearObject(&tmp_removed_);
|
2020-05-06 18:22:10 +02:00
|
|
|
const double dtime = 1e-8 * work_done_in_mark_descendants_;
|
|
|
|
|
time_limit_->AdvanceDeterministicTime(dtime);
|
|
|
|
|
num_redundant_implications_ += num_new_redundant_implications;
|
2020-10-22 23:36:58 +02:00
|
|
|
LOG_IF(INFO, log_info) << "Transitive reduction removed "
|
|
|
|
|
<< num_new_redundant_implications << " literals. "
|
|
|
|
|
<< num_fixed << " fixed. " << num_implications_
|
|
|
|
|
<< " implications left. " << implications_.size()
|
2024-01-22 16:46:54 +01:00
|
|
|
<< " literals." << " dtime: " << dtime
|
2020-10-22 23:36:58 +02:00
|
|
|
<< " wtime: " << wall_timer.Get()
|
|
|
|
|
<< (aborted ? " Aborted." : "");
|
2018-11-05 16:24:47 +01:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
2024-02-16 23:13:09 +01:00
|
|
|
int ElementInIntersectionOrMinusOne(absl::Span<const int> a,
|
|
|
|
|
absl::Span<const int> b) {
|
2018-11-05 16:24:47 +01:00
|
|
|
DCHECK(std::is_sorted(a.begin(), a.end()));
|
|
|
|
|
DCHECK(std::is_sorted(b.begin(), b.end()));
|
2023-09-13 18:16:28 +02:00
|
|
|
if (a.empty() || b.empty()) return -1;
|
2018-11-05 16:24:47 +01:00
|
|
|
int i = 0;
|
|
|
|
|
int j = 0;
|
2023-09-13 18:16:28 +02:00
|
|
|
while (true) {
|
2022-12-05 12:11:02 +01:00
|
|
|
if (a[i] == b[j]) return a[i];
|
2018-11-05 16:24:47 +01:00
|
|
|
if (a[i] < b[j]) {
|
2023-09-13 18:16:28 +02:00
|
|
|
if (++i == a.size()) return -1;
|
2018-11-05 16:24:47 +01:00
|
|
|
} else {
|
2023-09-13 18:16:28 +02:00
|
|
|
if (++j == b.size()) return -1;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
} // namespace
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2024-11-15 07:27:30 +01:00
|
|
|
std::vector<std::pair<int, int>>
|
|
|
|
|
BinaryImplicationGraph::FilterAndSortAtMostOnes(
|
|
|
|
|
absl::Span<std::vector<Literal>> at_most_ones) {
|
2020-10-22 23:36:58 +02:00
|
|
|
// We starts by processing larger constraints first.
|
2022-01-08 14:44:59 +01:00
|
|
|
// But we want the output order to be stable.
|
|
|
|
|
std::vector<std::pair<int, int>> index_size_vector;
|
2024-11-15 07:27:30 +01:00
|
|
|
const int num_amos = at_most_ones.size();
|
|
|
|
|
index_size_vector.reserve(num_amos);
|
|
|
|
|
for (int index = 0; index < num_amos; ++index) {
|
|
|
|
|
std::vector<Literal>& clique = at_most_ones[index];
|
2022-12-05 12:11:02 +01:00
|
|
|
if (clique.size() <= 1) continue;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
|
|
|
|
// Note(user): Because we always use literal with the smallest variable
|
|
|
|
|
// indices as representative, this make sure that if possible, we express
|
|
|
|
|
// the clique in term of user provided variable (that are always created
|
|
|
|
|
// first).
|
2022-12-05 12:11:02 +01:00
|
|
|
//
|
|
|
|
|
// Remap the clique to only use representative.
|
2020-10-28 13:42:36 +01:00
|
|
|
for (Literal& ref : clique) {
|
2020-02-03 16:21:57 +01:00
|
|
|
DCHECK_LT(ref.Index(), representative_of_.size());
|
2023-10-12 10:06:27 +02:00
|
|
|
const LiteralIndex rep = representative_of_[ref];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (rep == kNoLiteralIndex) continue;
|
2018-11-05 16:24:47 +01:00
|
|
|
ref = Literal(rep);
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-05 12:11:02 +01:00
|
|
|
// We skip anything that can be presolved further as the code below do
|
|
|
|
|
// not handle duplicate well.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Shall we presolve it here?
|
|
|
|
|
bool skip = false;
|
|
|
|
|
std::sort(clique.begin(), clique.end());
|
|
|
|
|
for (int i = 1; i < clique.size(); ++i) {
|
|
|
|
|
if (clique[i] == clique[i - 1] || clique[i] == clique[i - i].Negated()) {
|
|
|
|
|
skip = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (skip) continue;
|
|
|
|
|
|
|
|
|
|
index_size_vector.push_back({index, clique.size()});
|
|
|
|
|
}
|
|
|
|
|
std::stable_sort(
|
|
|
|
|
index_size_vector.begin(), index_size_vector.end(),
|
|
|
|
|
[](const std::pair<int, int> a, const std::pair<int, int>& b) {
|
|
|
|
|
return a.second > b.second;
|
|
|
|
|
});
|
2024-11-15 07:27:30 +01:00
|
|
|
return index_size_vector;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool BinaryImplicationGraph::TransformIntoMaxCliques(
|
|
|
|
|
std::vector<std::vector<Literal>>* at_most_ones,
|
|
|
|
|
int64_t max_num_explored_nodes) {
|
|
|
|
|
// The code below assumes a DAG.
|
|
|
|
|
if (!DetectEquivalences()) return false;
|
|
|
|
|
work_done_in_mark_descendants_ = 0;
|
|
|
|
|
|
|
|
|
|
int num_extended = 0;
|
|
|
|
|
int num_removed = 0;
|
|
|
|
|
int num_added = 0;
|
|
|
|
|
|
|
|
|
|
// Data to detect inclusion of base amo into extend amo.
|
|
|
|
|
std::vector<int> detector_clique_index;
|
|
|
|
|
CompactVectorVector<int> storage;
|
|
|
|
|
InclusionDetector detector(storage, time_limit_);
|
|
|
|
|
detector.SetWorkLimit(1e9);
|
|
|
|
|
|
|
|
|
|
std::vector<int> dense_index_to_index;
|
|
|
|
|
util_intops::StrongVector<LiteralIndex, std::vector<int>>
|
|
|
|
|
max_cliques_containing(implications_.size());
|
|
|
|
|
|
|
|
|
|
const std::vector<std::pair<int, int>> index_size_vector =
|
|
|
|
|
FilterAndSortAtMostOnes(absl::MakeSpan(*at_most_ones));
|
2022-12-05 12:11:02 +01:00
|
|
|
|
|
|
|
|
absl::flat_hash_set<int> cannot_be_removed;
|
|
|
|
|
std::vector<bool> was_extended(at_most_ones->size(), false);
|
|
|
|
|
for (const auto& [index, old_size] : index_size_vector) {
|
|
|
|
|
std::vector<Literal>& clique = (*at_most_ones)[index];
|
|
|
|
|
if (time_limit_->LimitReached()) break;
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
// Special case for clique of size 2, we don't expand them if they
|
|
|
|
|
// are included in an already added clique.
|
2022-12-05 12:11:02 +01:00
|
|
|
if (clique.size() == 2) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_NE(clique[0], clique[1]);
|
2022-12-05 12:11:02 +01:00
|
|
|
const int dense_index = ElementInIntersectionOrMinusOne(
|
2023-10-12 10:06:27 +02:00
|
|
|
max_cliques_containing[clique[0]], max_cliques_containing[clique[1]]);
|
2022-12-05 12:11:02 +01:00
|
|
|
if (dense_index >= 0) {
|
|
|
|
|
const int superset_index = dense_index_to_index[dense_index];
|
2024-11-15 07:27:30 +01:00
|
|
|
if (was_extended[superset_index]) {
|
2022-12-05 12:11:02 +01:00
|
|
|
cannot_be_removed.insert(superset_index);
|
2024-11-15 07:27:30 +01:00
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
++num_removed;
|
|
|
|
|
clique.clear();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-05 12:11:02 +01:00
|
|
|
// Save the non-extended version as possible subset.
|
|
|
|
|
// TODO(user): Detect on the fly is superset already exist.
|
|
|
|
|
detector_clique_index.push_back(index);
|
|
|
|
|
detector.AddPotentialSubset(storage.AddLiterals(clique));
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
// We only expand the clique as long as we didn't spend too much time.
|
2019-05-29 22:39:15 +02:00
|
|
|
if (work_done_in_mark_descendants_ < max_num_explored_nodes) {
|
2021-11-03 19:29:01 +01:00
|
|
|
clique = ExpandAtMostOne(clique, max_num_explored_nodes);
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2022-12-05 12:11:02 +01:00
|
|
|
// Save the extended version as possible superset.
|
|
|
|
|
detector_clique_index.push_back(index);
|
|
|
|
|
detector.AddPotentialSuperset(storage.AddLiterals(clique));
|
|
|
|
|
|
|
|
|
|
// Also index clique for size 2 quick lookup.
|
|
|
|
|
const int dense_index = dense_index_to_index.size();
|
|
|
|
|
dense_index_to_index.push_back(index);
|
2018-11-05 16:24:47 +01:00
|
|
|
for (const Literal l : clique) {
|
2023-10-12 10:06:27 +02:00
|
|
|
max_cliques_containing[l].push_back(dense_index);
|
2022-12-05 12:11:02 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (clique.size() > old_size) {
|
|
|
|
|
was_extended[index] = true;
|
|
|
|
|
++num_extended;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
++num_added;
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-05 12:11:02 +01:00
|
|
|
// Remove clique (before extension) that are included in an extended one.
|
|
|
|
|
detector.DetectInclusions([&](int subset, int superset) {
|
|
|
|
|
const int subset_index = detector_clique_index[subset];
|
|
|
|
|
const int superset_index = detector_clique_index[superset];
|
|
|
|
|
if (subset_index == superset_index) return;
|
|
|
|
|
|
|
|
|
|
// Abort if one was already deleted.
|
|
|
|
|
if ((*at_most_ones)[subset_index].empty()) return;
|
|
|
|
|
if ((*at_most_ones)[superset_index].empty()) return;
|
|
|
|
|
|
|
|
|
|
// If an extended clique already cover a deleted one, we cannot try to
|
|
|
|
|
// remove it by looking at its non-extended version.
|
|
|
|
|
if (cannot_be_removed.contains(subset_index)) return;
|
|
|
|
|
|
|
|
|
|
++num_removed;
|
|
|
|
|
(*at_most_ones)[subset_index].clear();
|
|
|
|
|
if (was_extended[superset_index]) cannot_be_removed.insert(superset_index);
|
|
|
|
|
});
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
if (num_extended > 0 || num_removed > 0 || num_added > 0) {
|
|
|
|
|
VLOG(1) << "Clique Extended: " << num_extended
|
|
|
|
|
<< " Removed: " << num_removed << " Added: " << num_added
|
2019-05-29 22:39:15 +02:00
|
|
|
<< (work_done_in_mark_descendants_ > max_num_explored_nodes
|
|
|
|
|
? " (Aborted)"
|
|
|
|
|
: "");
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2020-02-03 16:21:57 +01:00
|
|
|
return true;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
2024-11-15 07:27:30 +01:00
|
|
|
bool BinaryImplicationGraph::MergeAtMostOnes(
|
|
|
|
|
absl::Span<std::vector<Literal>> at_most_ones,
|
|
|
|
|
int64_t max_num_explored_nodes, double* dtime) {
|
|
|
|
|
// The code below assumes a DAG.
|
|
|
|
|
if (!DetectEquivalences()) return false;
|
|
|
|
|
work_done_in_mark_descendants_ = 0;
|
|
|
|
|
|
|
|
|
|
const std::vector<std::pair<int, int>> index_size_vector =
|
|
|
|
|
FilterAndSortAtMostOnes(at_most_ones);
|
|
|
|
|
|
|
|
|
|
// Data to detect inclusion of base amo into extend amo.
|
|
|
|
|
std::vector<int> detector_clique_index;
|
|
|
|
|
CompactVectorVector<int> storage;
|
|
|
|
|
for (const auto& [index, old_size] : index_size_vector) {
|
|
|
|
|
if (time_limit_->LimitReached()) break;
|
|
|
|
|
detector_clique_index.push_back(index);
|
2024-11-15 15:27:50 +01:00
|
|
|
storage.AddLiterals(at_most_ones[index]);
|
2024-11-15 07:27:30 +01:00
|
|
|
}
|
2024-11-15 15:27:50 +01:00
|
|
|
|
|
|
|
|
// We use an higher limit here as the code is faster.
|
|
|
|
|
SubsetsDetector detector(storage, time_limit_);
|
|
|
|
|
detector.SetWorkLimit(10 * max_num_explored_nodes);
|
|
|
|
|
detector.IndexAllStorageAsSubsets();
|
2024-11-15 07:27:30 +01:00
|
|
|
|
|
|
|
|
// Now try to expand one by one.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): We should process clique with elements in common together so
|
|
|
|
|
// that we can reuse MarkDescendants() which is slow. We should be able to
|
|
|
|
|
// "cache" a few of the last calls.
|
|
|
|
|
std::vector<int> intersection;
|
|
|
|
|
const int num_to_consider = index_size_vector.size();
|
|
|
|
|
for (int subset_index = 0; subset_index < num_to_consider; ++subset_index) {
|
|
|
|
|
const int index = index_size_vector[subset_index].first;
|
|
|
|
|
std::vector<Literal>& clique = at_most_ones[index];
|
|
|
|
|
if (clique.empty()) continue; // Was deleted.
|
|
|
|
|
|
|
|
|
|
if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
|
|
|
|
|
if (detector.Stopped()) break;
|
|
|
|
|
|
|
|
|
|
// We start with the clique in the "intersection".
|
|
|
|
|
// This prefix will never change.
|
|
|
|
|
int clique_i = 0;
|
|
|
|
|
int next_index_to_try = 0;
|
|
|
|
|
intersection.clear();
|
|
|
|
|
tmp_bitset_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
for (const Literal l : clique) {
|
|
|
|
|
intersection.push_back(l.Index().value());
|
|
|
|
|
tmp_bitset_.Set(l);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
|
|
|
|
|
if (detector.Stopped()) break;
|
|
|
|
|
|
|
|
|
|
// Compute the intersection of all the element (or the new ones) of this
|
|
|
|
|
// clique.
|
|
|
|
|
//
|
|
|
|
|
// Optimization: if clique_i > 0 && intersection.size() == clique.size()
|
|
|
|
|
// we already know that we performed the max possible extension.
|
|
|
|
|
if (clique_i > 0 && intersection.size() == clique.size()) {
|
|
|
|
|
clique_i = clique.size();
|
|
|
|
|
}
|
|
|
|
|
for (; clique_i < clique.size(); ++clique_i) {
|
|
|
|
|
const Literal l = clique[clique_i];
|
|
|
|
|
|
|
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
MarkDescendants(l);
|
|
|
|
|
|
|
|
|
|
if (clique_i == 0) {
|
|
|
|
|
// Initially we have the clique + the negation of everything
|
|
|
|
|
// propagated by l.
|
|
|
|
|
for (const LiteralIndex index :
|
|
|
|
|
is_marked_.PositionsSetAtLeastOnce()) {
|
|
|
|
|
const Literal lit = Literal(index).Negated();
|
|
|
|
|
if (!tmp_bitset_[lit]) {
|
|
|
|
|
intersection.push_back(lit.Index().value());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// We intersect we the negation of everything propagated by not(l).
|
|
|
|
|
// Note that we always keep the clique in case some implication where
|
|
|
|
|
// not added to the graph.
|
|
|
|
|
int new_size = 0;
|
|
|
|
|
const int old_size = intersection.size();
|
|
|
|
|
for (int i = 0; i < old_size; ++i) {
|
|
|
|
|
if (i == next_index_to_try) {
|
|
|
|
|
next_index_to_try = new_size;
|
|
|
|
|
}
|
|
|
|
|
const int index = intersection[i];
|
|
|
|
|
const Literal lit = Literal(LiteralIndex(index));
|
|
|
|
|
if (tmp_bitset_[lit] || is_marked_[lit.Negated()]) {
|
|
|
|
|
intersection[new_size++] = index;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
intersection.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We can abort early as soon as there is no extra literal than the
|
|
|
|
|
// initial clique.
|
|
|
|
|
if (intersection.size() <= clique.size()) break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Should contains the original clique. If there are no more entry, then
|
|
|
|
|
// we will not extend this clique. However, we still call FindSubsets() in
|
|
|
|
|
// order to remove fully included ones.
|
|
|
|
|
CHECK_GE(intersection.size(), clique.size());
|
|
|
|
|
|
|
|
|
|
// Look for element included in the intersection.
|
|
|
|
|
// Note that we clear element fully included at the same time.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): next_index_to_try help, but we might still rescan most of
|
|
|
|
|
// the one-watcher list of intersection[next_index_to_try], we could be
|
|
|
|
|
// a bit faster here.
|
|
|
|
|
int num_extra = 0;
|
|
|
|
|
detector.FindSubsets(intersection, &next_index_to_try, [&](int subset) {
|
|
|
|
|
if (subset == subset_index) {
|
|
|
|
|
detector.StopProcessingCurrentSubset();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
num_extra = 0;
|
|
|
|
|
for (const int index : storage[subset]) {
|
|
|
|
|
const LiteralIndex lit_index = LiteralIndex(index);
|
|
|
|
|
if (tmp_bitset_[lit_index]) continue; // In clique.
|
|
|
|
|
tmp_bitset_.Set(lit_index);
|
|
|
|
|
clique.push_back(Literal(lit_index)); // extend.
|
|
|
|
|
++num_extra;
|
|
|
|
|
}
|
|
|
|
|
if (num_extra == 0) {
|
|
|
|
|
// Fully included -- remove.
|
|
|
|
|
at_most_ones[detector_clique_index[subset]].clear();
|
|
|
|
|
detector.StopProcessingCurrentSubset();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
detector.StopProcessingCurrentSuperset(); // Finish.
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// No extension: end loop.
|
|
|
|
|
if (num_extra == 0) break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (dtime != nullptr) {
|
|
|
|
|
*dtime +=
|
2024-11-15 15:27:50 +01:00
|
|
|
1e-8 * work_done_in_mark_descendants_ + 1e-9 * detector.work_done();
|
2024-11-15 07:27:30 +01:00
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2022-02-24 14:17:43 +01:00
|
|
|
template <bool use_weight>
|
2020-10-01 18:08:34 +02:00
|
|
|
std::vector<Literal> BinaryImplicationGraph::ExpandAtMostOneWithWeight(
|
|
|
|
|
const absl::Span<const Literal> at_most_one,
|
2024-07-12 13:56:11 +02:00
|
|
|
const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
|
|
|
|
|
const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values) {
|
2020-10-01 18:08:34 +02:00
|
|
|
std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
|
|
|
|
|
std::vector<LiteralIndex> intersection;
|
2021-03-04 18:26:01 +01:00
|
|
|
const int64_t old_work = work_done_in_mark_descendants_;
|
2020-10-01 18:08:34 +02:00
|
|
|
for (int i = 0; i < clique.size(); ++i) {
|
2020-10-18 16:38:25 +02:00
|
|
|
// Do not spend too much time here.
|
2020-10-22 23:36:58 +02:00
|
|
|
if (work_done_in_mark_descendants_ - old_work > 1e8) break;
|
2020-10-18 16:38:25 +02:00
|
|
|
|
2020-10-01 18:08:34 +02:00
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
MarkDescendants(clique[i]);
|
|
|
|
|
if (i == 0) {
|
|
|
|
|
for (const LiteralIndex index : is_marked_.PositionsSetAtLeastOnce()) {
|
2022-02-16 14:57:41 +01:00
|
|
|
if (can_be_included[Literal(index).NegatedIndex()]) {
|
|
|
|
|
intersection.push_back(index);
|
|
|
|
|
}
|
2020-10-01 18:08:34 +02:00
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
|
2020-10-01 18:08:34 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int new_size = 0;
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Clear(clique[i]);
|
2020-10-01 18:08:34 +02:00
|
|
|
is_marked_.Clear(clique[i].NegatedIndex());
|
|
|
|
|
for (const LiteralIndex index : intersection) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!is_marked_[index]) continue;
|
2020-10-01 18:08:34 +02:00
|
|
|
intersection[new_size++] = index;
|
|
|
|
|
}
|
|
|
|
|
intersection.resize(new_size);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (intersection.empty()) break;
|
2020-10-01 18:08:34 +02:00
|
|
|
|
|
|
|
|
// Expand? The negation of any literal in the intersection is a valid way
|
|
|
|
|
// to extend the clique.
|
|
|
|
|
if (i + 1 == clique.size()) {
|
|
|
|
|
// Heuristic: use literal with largest lp value. We randomize slightly.
|
|
|
|
|
int index = -1;
|
|
|
|
|
double max_lp = 0.0;
|
|
|
|
|
for (int j = 0; j < intersection.size(); ++j) {
|
2022-02-24 14:17:43 +01:00
|
|
|
// If we don't use weight, we prefer variable that comes first.
|
|
|
|
|
const double lp =
|
2024-09-30 17:28:08 +02:00
|
|
|
use_weight
|
|
|
|
|
? expanded_lp_values[Literal(intersection[j]).NegatedIndex()] +
|
|
|
|
|
absl::Uniform<double>(*random_, 0.0, 1e-4)
|
|
|
|
|
: can_be_included.size() - intersection[j].value();
|
2020-10-01 18:08:34 +02:00
|
|
|
if (index == -1 || lp > max_lp) {
|
|
|
|
|
index = j;
|
|
|
|
|
max_lp = lp;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (index != -1) {
|
|
|
|
|
clique.push_back(Literal(intersection[index]).Negated());
|
|
|
|
|
std::swap(intersection.back(), intersection[index]);
|
|
|
|
|
intersection.pop_back();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return clique;
|
|
|
|
|
}
|
|
|
|
|
|
2022-02-24 14:17:43 +01:00
|
|
|
// Make sure both version are compiled.
|
2024-07-12 13:56:11 +02:00
|
|
|
template std::vector<Literal>
|
|
|
|
|
BinaryImplicationGraph::ExpandAtMostOneWithWeight<true>(
|
|
|
|
|
const absl::Span<const Literal> at_most_one,
|
|
|
|
|
const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
|
|
|
|
|
const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
|
|
|
|
|
template std::vector<Literal>
|
|
|
|
|
BinaryImplicationGraph::ExpandAtMostOneWithWeight<false>(
|
|
|
|
|
const absl::Span<const Literal> at_most_one,
|
|
|
|
|
const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
|
|
|
|
|
const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
|
2022-02-24 14:17:43 +01:00
|
|
|
|
2024-09-30 17:28:08 +02:00
|
|
|
// This function and the generated cut serves two purpose:
|
|
|
|
|
// 1/ If a new clause of size 2 was discovered and not included in the LP, we
|
|
|
|
|
// will add it.
|
|
|
|
|
// 2/ The more classical clique cut separation algorithm
|
|
|
|
|
//
|
|
|
|
|
// Note that once 1/ Is performed, any literal close to 1.0 in the lp shouldn't
|
|
|
|
|
// be in the same clique as a literal with positive weight. So for step 2, we
|
|
|
|
|
// only really need to consider fractional variables.
|
2020-10-28 13:42:36 +01:00
|
|
|
const std::vector<std::vector<Literal>>&
|
|
|
|
|
BinaryImplicationGraph::GenerateAtMostOnesWithLargeWeight(
|
2024-09-30 17:28:08 +02:00
|
|
|
absl::Span<const Literal> literals, absl::Span<const double> lp_values,
|
|
|
|
|
absl::Span<const double> reduced_costs) {
|
2020-10-01 18:08:34 +02:00
|
|
|
// We only want to generate a cut with literals from the LP, not extra ones.
|
|
|
|
|
const int num_literals = implications_.size();
|
2024-07-12 13:56:11 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, bool> can_be_included(num_literals,
|
|
|
|
|
false);
|
|
|
|
|
util_intops::StrongVector<LiteralIndex, double> expanded_lp_values(
|
|
|
|
|
num_literals, 0.0);
|
2024-09-30 17:28:08 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, double> heuristic_weights(
|
|
|
|
|
num_literals, 0.0);
|
2020-10-01 18:08:34 +02:00
|
|
|
const int size = literals.size();
|
|
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
const Literal l = literals[i];
|
2023-10-12 10:06:27 +02:00
|
|
|
can_be_included[l] = true;
|
2020-10-01 18:08:34 +02:00
|
|
|
can_be_included[l.NegatedIndex()] = true;
|
|
|
|
|
|
|
|
|
|
const double value = lp_values[i];
|
2023-10-12 10:06:27 +02:00
|
|
|
expanded_lp_values[l] = value;
|
2020-10-01 18:08:34 +02:00
|
|
|
expanded_lp_values[l.NegatedIndex()] = 1.0 - value;
|
2024-09-30 17:28:08 +02:00
|
|
|
|
|
|
|
|
// This is used for extending clique-cuts.
|
|
|
|
|
// In most situation, we will only extend the cuts with literal at zero,
|
|
|
|
|
// and we prefer "low" reduced cost first, so we negate it. Variable with
|
|
|
|
|
// high reduced costs will likely stay that way and are of less interest in
|
|
|
|
|
// a clique cut. At least that is my interpretation.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): For large problems or when we base the clique from a newly
|
|
|
|
|
// added and violated 2-clique, we might consider only a subset of
|
|
|
|
|
// fractional variables, so we might need to include fractional variable
|
|
|
|
|
// first, but then their rc should be zero, so it should be already kind of
|
|
|
|
|
// doing that.
|
|
|
|
|
//
|
|
|
|
|
// Remark: This seems to have a huge impact to the cut performance!
|
|
|
|
|
const double rc = reduced_costs[i];
|
|
|
|
|
heuristic_weights[l] = -rc;
|
|
|
|
|
heuristic_weights[l.NegatedIndex()] = rc;
|
2020-10-01 18:08:34 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We want highest sum first.
|
|
|
|
|
struct Candidate {
|
|
|
|
|
Literal a;
|
|
|
|
|
Literal b;
|
|
|
|
|
double sum;
|
2020-10-28 13:42:36 +01:00
|
|
|
bool operator<(const Candidate& other) const { return sum > other.sum; }
|
2020-10-01 18:08:34 +02:00
|
|
|
};
|
|
|
|
|
std::vector<Candidate> candidates;
|
|
|
|
|
|
|
|
|
|
// First heuristic. Currently we only consider violated at most one of size 2,
|
|
|
|
|
// and extend them. Right now, the code is a bit slow to try too many at every
|
|
|
|
|
// LP node so it is why we are defensive like this. Note also that because we
|
|
|
|
|
// currently still statically add the initial implications, this will only add
|
|
|
|
|
// cut based on newly learned binary clause. Or the one that were not added
|
|
|
|
|
// to the relaxation in the first place.
|
2024-09-30 17:28:08 +02:00
|
|
|
std::vector<Literal> fractional_literals;
|
2020-10-01 18:08:34 +02:00
|
|
|
for (int i = 0; i < size; ++i) {
|
|
|
|
|
Literal current_literal = literals[i];
|
|
|
|
|
double current_value = lp_values[i];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (trail_->Assignment().LiteralIsAssigned(current_literal)) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_redundant_[current_literal]) continue;
|
2020-10-01 18:08:34 +02:00
|
|
|
|
|
|
|
|
if (current_value < 0.5) {
|
|
|
|
|
current_literal = current_literal.Negated();
|
|
|
|
|
current_value = 1.0 - current_value;
|
|
|
|
|
}
|
|
|
|
|
|
2024-09-30 17:28:08 +02:00
|
|
|
if (current_value < 0.99) {
|
|
|
|
|
fractional_literals.push_back(current_literal);
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-01 18:08:34 +02:00
|
|
|
// We consider only one candidate for each current_literal.
|
|
|
|
|
LiteralIndex best = kNoLiteralIndex;
|
|
|
|
|
double best_value = 0.0;
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal l : implications_[current_literal]) {
|
|
|
|
|
if (!can_be_included[l]) continue;
|
2020-10-01 18:08:34 +02:00
|
|
|
const double activity =
|
|
|
|
|
current_value + expanded_lp_values[l.NegatedIndex()];
|
2020-10-22 23:36:58 +02:00
|
|
|
if (activity <= 1.01) continue;
|
2020-10-01 18:08:34 +02:00
|
|
|
const double v = activity + absl::Uniform<double>(*random_, 0.0, 1e-4);
|
|
|
|
|
if (best == kNoLiteralIndex || v > best_value) {
|
|
|
|
|
best_value = v;
|
|
|
|
|
best = l.NegatedIndex();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (best != kNoLiteralIndex) {
|
|
|
|
|
const double activity = current_value + expanded_lp_values[best];
|
2020-10-22 23:36:58 +02:00
|
|
|
candidates.push_back({current_literal, Literal(best), activity});
|
2020-10-01 18:08:34 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Do not genate too many cut at once.
|
2023-02-06 16:27:12 +01:00
|
|
|
const int kMaxNumberOfCutPerCall = 10;
|
2020-10-01 18:08:34 +02:00
|
|
|
std::sort(candidates.begin(), candidates.end());
|
|
|
|
|
if (candidates.size() > kMaxNumberOfCutPerCall) {
|
|
|
|
|
candidates.resize(kMaxNumberOfCutPerCall);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Expand to a maximal at most one each candidates before returning them.
|
|
|
|
|
// Note that we only expand using literal from the LP.
|
|
|
|
|
tmp_cuts_.clear();
|
2020-10-28 13:42:36 +01:00
|
|
|
for (const Candidate& candidate : candidates) {
|
2024-09-30 17:28:08 +02:00
|
|
|
tmp_cuts_.push_back(ExpandAtMostOneWithWeight(
|
|
|
|
|
{candidate.a, candidate.b}, can_be_included, heuristic_weights));
|
2020-10-01 18:08:34 +02:00
|
|
|
}
|
2024-09-30 17:28:08 +02:00
|
|
|
|
|
|
|
|
// Once we processed new implications, also add "proper" clique cuts.
|
|
|
|
|
// We can generate a small graph and separate cut efficiently there.
|
|
|
|
|
if (fractional_literals.size() > 1) {
|
|
|
|
|
// Lets permute this randomly and truncate if we have too many variables.
|
|
|
|
|
// Since we use bitset it is good to have a multiple of 64 there.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Prefer more fractional variables.
|
|
|
|
|
const int max_graph_size = 1024;
|
|
|
|
|
if (fractional_literals.size() > max_graph_size) {
|
|
|
|
|
std::shuffle(fractional_literals.begin(), fractional_literals.end(),
|
|
|
|
|
*random_);
|
|
|
|
|
fractional_literals.resize(max_graph_size);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bron_kerbosch_.Initialize(fractional_literals.size() * 2);
|
|
|
|
|
|
|
|
|
|
// Prepare a dense mapping.
|
|
|
|
|
int i = 0;
|
|
|
|
|
tmp_mapping_.resize(implications_.size(), -1);
|
|
|
|
|
for (const Literal l : fractional_literals) {
|
|
|
|
|
bron_kerbosch_.SetWeight(i, expanded_lp_values[l]);
|
|
|
|
|
tmp_mapping_[l] = i++;
|
|
|
|
|
bron_kerbosch_.SetWeight(i, expanded_lp_values[l.Negated()]);
|
|
|
|
|
tmp_mapping_[l.Negated()] = i++;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Copy the implication subgraph and remap it to a dense indexing.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Treat at_most_one more efficiently. We can collect them
|
|
|
|
|
// and scan each of them just once.
|
|
|
|
|
for (const Literal base : fractional_literals) {
|
|
|
|
|
for (const Literal l : {base, base.Negated()}) {
|
|
|
|
|
const int from = tmp_mapping_[l];
|
|
|
|
|
for (const Literal next : DirectImplications(l)) {
|
|
|
|
|
// l => next so (l + not(next) <= 1).
|
|
|
|
|
const int to = tmp_mapping_[next.Negated()];
|
|
|
|
|
if (to != -1) {
|
|
|
|
|
bron_kerbosch_.AddEdge(from, to);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Before running the algo, compute the transitive closure.
|
|
|
|
|
// The graph shouldn't be too large, so this should be fast enough.
|
|
|
|
|
bron_kerbosch_.TakeTransitiveClosureOfImplicationGraph();
|
|
|
|
|
|
|
|
|
|
bron_kerbosch_.SetWorkLimit(1e8);
|
|
|
|
|
bron_kerbosch_.SetMinimumWeight(1.001);
|
|
|
|
|
std::vector<std::vector<int>> cliques = bron_kerbosch_.Run();
|
|
|
|
|
|
|
|
|
|
// If we have many candidates, we will only expand the first few with
|
|
|
|
|
// maximum weights.
|
|
|
|
|
const int max_num_per_batch = 5;
|
|
|
|
|
std::vector<std::pair<int, double>> with_weight =
|
|
|
|
|
bron_kerbosch_.GetMutableIndexAndWeight();
|
|
|
|
|
if (with_weight.size() > max_num_per_batch) {
|
|
|
|
|
std::sort(
|
|
|
|
|
with_weight.begin(), with_weight.end(),
|
|
|
|
|
[](const std::pair<int, double>& a, const std::pair<int, double>& b) {
|
|
|
|
|
return a.second > b.second;
|
|
|
|
|
});
|
|
|
|
|
with_weight.resize(max_num_per_batch);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<Literal> at_most_one;
|
|
|
|
|
for (const auto [index, weight] : with_weight) {
|
|
|
|
|
// Convert.
|
|
|
|
|
at_most_one.clear();
|
|
|
|
|
for (const int i : cliques[index]) {
|
|
|
|
|
const Literal l = fractional_literals[i / 2];
|
|
|
|
|
at_most_one.push_back(i % 2 == 1 ? l.Negated() : l);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Expand and add clique.
|
|
|
|
|
//
|
|
|
|
|
// TODO(user): Expansion is pretty slow. Given that the base clique can
|
|
|
|
|
// share literal beeing part of the same amo, we should be able to speed
|
|
|
|
|
// that up, we don't want to scan an amo twice basically.
|
|
|
|
|
tmp_cuts_.push_back(ExpandAtMostOneWithWeight(
|
|
|
|
|
at_most_one, can_be_included, heuristic_weights));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clear the dense mapping
|
|
|
|
|
for (const Literal l : fractional_literals) {
|
|
|
|
|
tmp_mapping_[l] = -1;
|
|
|
|
|
tmp_mapping_[l.Negated()] = -1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-01 18:08:34 +02:00
|
|
|
return tmp_cuts_;
|
|
|
|
|
}
|
|
|
|
|
|
2023-07-09 13:45:48 +02:00
|
|
|
// TODO(user): Use deterministic limit.
|
|
|
|
|
// TODO(user): Explore the graph instead of just looking at full amo, especially
|
|
|
|
|
// since we expand small ones.
|
|
|
|
|
std::vector<absl::Span<const Literal>>
|
|
|
|
|
BinaryImplicationGraph::HeuristicAmoPartition(std::vector<Literal>* literals) {
|
|
|
|
|
std::vector<absl::Span<const Literal>> result;
|
|
|
|
|
|
2024-07-12 13:56:11 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, bool> to_consider(
|
|
|
|
|
implications_.size(), false);
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal l : *literals) to_consider[l] = true;
|
2023-07-09 13:45:48 +02:00
|
|
|
|
|
|
|
|
// Priority queue of (intersection_size, start_of_amo).
|
|
|
|
|
std::priority_queue<std::pair<int, int>> pq;
|
|
|
|
|
|
|
|
|
|
// Compute for each at most one that might overlap, its overlaping size and
|
|
|
|
|
// stores its start in the at_most_one_buffer_.
|
|
|
|
|
//
|
|
|
|
|
// This is in O(num_literal in amo).
|
|
|
|
|
absl::flat_hash_set<int> explored_amo;
|
|
|
|
|
for (const Literal l : *literals) {
|
|
|
|
|
if (l.Index() >= at_most_ones_.size()) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const int start : at_most_ones_[l]) {
|
2023-07-09 13:45:48 +02:00
|
|
|
const auto [_, inserted] = explored_amo.insert(start);
|
|
|
|
|
if (!inserted) continue;
|
|
|
|
|
|
|
|
|
|
int intersection_size = 0;
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (to_consider[l]) ++intersection_size;
|
2023-07-09 13:45:48 +02:00
|
|
|
}
|
|
|
|
|
if (intersection_size > 1) {
|
|
|
|
|
pq.push({intersection_size, start});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Abort early if we are done.
|
|
|
|
|
if (intersection_size == literals->size()) break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Consume AMOs, update size.
|
|
|
|
|
int num_processed = 0;
|
|
|
|
|
while (!pq.empty()) {
|
|
|
|
|
const auto [old_size, start] = pq.top();
|
|
|
|
|
pq.pop();
|
|
|
|
|
|
|
|
|
|
// Recompute size.
|
|
|
|
|
int intersection_size = 0;
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (to_consider[l]) ++intersection_size;
|
2023-07-09 13:45:48 +02:00
|
|
|
}
|
|
|
|
|
if (intersection_size != old_size) {
|
|
|
|
|
// re-add with new size.
|
|
|
|
|
if (intersection_size > 1) {
|
|
|
|
|
pq.push({intersection_size, start});
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Mark the literal of that at most one at false.
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2023-10-12 10:06:27 +02:00
|
|
|
to_consider[l] = false;
|
2023-07-09 13:45:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Extract the intersection by moving it in
|
|
|
|
|
// [num_processed, num_processed + intersection_size).
|
|
|
|
|
const int span_start = num_processed;
|
|
|
|
|
for (int i = num_processed; i < literals->size(); ++i) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (to_consider[(*literals)[i]]) continue;
|
2023-07-09 13:45:48 +02:00
|
|
|
std::swap((*literals)[num_processed], (*literals)[i]);
|
|
|
|
|
++num_processed;
|
|
|
|
|
}
|
|
|
|
|
result.push_back(absl::MakeSpan(literals->data() + span_start,
|
|
|
|
|
num_processed - span_start));
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
void BinaryImplicationGraph::MarkDescendants(Literal root) {
|
2023-02-06 16:27:12 +01:00
|
|
|
auto* const stack = bfs_stack_.data();
|
2024-09-30 17:28:08 +02:00
|
|
|
auto is_marked = is_marked_.BitsetView();
|
2023-02-06 16:27:12 +01:00
|
|
|
auto is_redundant = is_redundant_.const_view();
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_redundant[root]) return;
|
2023-02-06 16:27:12 +01:00
|
|
|
|
|
|
|
|
int stack_size = 1;
|
|
|
|
|
stack[0] = root;
|
2023-10-12 10:06:27 +02:00
|
|
|
is_marked_.Set(root);
|
2024-09-30 17:28:08 +02:00
|
|
|
const int amo_size = static_cast<int>(at_most_ones_.size());
|
|
|
|
|
auto implies_something = implies_something_.const_view();
|
2023-02-06 16:27:12 +01:00
|
|
|
for (int j = 0; j < stack_size; ++j) {
|
|
|
|
|
const Literal current = stack[j];
|
2024-09-30 17:28:08 +02:00
|
|
|
if (!implies_something[current]) continue;
|
2024-02-15 08:46:38 +01:00
|
|
|
|
2025-01-24 14:10:52 +01:00
|
|
|
work_done_in_mark_descendants_ += implications_[current].size();
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal l : implications_[current]) {
|
|
|
|
|
if (!is_marked[l] && !is_redundant[l]) {
|
2024-09-30 17:28:08 +02:00
|
|
|
is_marked_.SetUnsafe(is_marked, l);
|
2023-02-06 16:27:12 +01:00
|
|
|
stack[stack_size++] = l;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
}
|
2019-07-09 19:02:01 +02:00
|
|
|
|
2023-02-06 16:27:12 +01:00
|
|
|
if (current.Index() >= amo_size) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const int start : at_most_ones_[current]) {
|
2025-01-24 14:10:52 +01:00
|
|
|
work_done_in_mark_descendants_ += AtMostOne(start).size();
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (l == current) continue;
|
2023-02-06 16:27:12 +01:00
|
|
|
if (!is_marked[l.NegatedIndex()] && !is_redundant[l.NegatedIndex()]) {
|
2024-09-30 17:28:08 +02:00
|
|
|
is_marked_.SetUnsafe(is_marked, l.NegatedIndex());
|
2023-02-06 16:27:12 +01:00
|
|
|
stack[stack_size++] = l.Negated();
|
2019-07-09 19:02:01 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
2023-02-06 16:27:12 +01:00
|
|
|
work_done_in_mark_descendants_ += stack_size;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<Literal> BinaryImplicationGraph::ExpandAtMostOne(
|
2021-11-03 19:29:01 +01:00
|
|
|
const absl::Span<const Literal> at_most_one,
|
|
|
|
|
int64_t max_num_explored_nodes) {
|
2018-11-05 16:24:47 +01:00
|
|
|
std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
|
|
|
|
|
|
|
|
|
|
// Optim.
|
2024-09-30 17:28:08 +02:00
|
|
|
for (const Literal l : clique) {
|
2024-11-15 07:27:30 +01:00
|
|
|
if (!implies_something_[l]) {
|
2018-11-05 16:24:47 +01:00
|
|
|
return clique;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-02-06 16:27:12 +01:00
|
|
|
// TODO(user): Improve algorithm. If we do a dfs, we can know if a literal
|
|
|
|
|
// has no descendant in the current intersection. We can keep such literal
|
|
|
|
|
// marked.
|
2018-11-05 16:24:47 +01:00
|
|
|
std::vector<LiteralIndex> intersection;
|
|
|
|
|
for (int i = 0; i < clique.size(); ++i) {
|
2021-11-03 19:29:01 +01:00
|
|
|
if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
|
2018-11-05 16:24:47 +01:00
|
|
|
is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
|
|
|
|
|
MarkDescendants(clique[i]);
|
2021-11-03 19:29:01 +01:00
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
if (i == 0) {
|
|
|
|
|
intersection = is_marked_.PositionsSetAtLeastOnce();
|
2020-10-22 23:36:58 +02:00
|
|
|
for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int new_size = 0;
|
2020-10-22 23:36:58 +02:00
|
|
|
is_marked_.Clear(clique[i].NegatedIndex()); // TODO(user): explain.
|
2018-11-05 16:24:47 +01:00
|
|
|
for (const LiteralIndex index : intersection) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (is_marked_[index]) intersection[new_size++] = index;
|
2018-11-05 16:24:47 +01:00
|
|
|
}
|
|
|
|
|
intersection.resize(new_size);
|
2020-10-22 23:36:58 +02:00
|
|
|
if (intersection.empty()) break;
|
2018-11-05 16:24:47 +01:00
|
|
|
|
2023-02-06 16:27:12 +01:00
|
|
|
// TODO(user): If the intersection is small compared to the members of the
|
|
|
|
|
// clique left to explore, we could look at the descendants of the negated
|
|
|
|
|
// intersection instead.
|
|
|
|
|
|
2018-11-05 16:24:47 +01:00
|
|
|
// Expand?
|
|
|
|
|
if (i + 1 == clique.size()) {
|
|
|
|
|
clique.push_back(Literal(intersection.back()).Negated());
|
|
|
|
|
intersection.pop_back();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return clique;
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
// TODO(user): lazy cleanup the lists on is_removed_?
|
|
|
|
|
// TODO(user): Mark fixed variable as is_removed_ for faster iteration?
|
2020-10-28 13:42:36 +01:00
|
|
|
const std::vector<Literal>& BinaryImplicationGraph::DirectImplications(
|
2020-10-22 23:36:58 +02:00
|
|
|
Literal literal) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_removed_[literal]);
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
// Clear old state.
|
|
|
|
|
for (const Literal l : direct_implications_) {
|
2023-10-12 10:06:27 +02:00
|
|
|
in_direct_implications_[l] = false;
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
direct_implications_.clear();
|
|
|
|
|
|
|
|
|
|
// Fill new state.
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!assignment.LiteralIsAssigned(literal));
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const Literal l : implications_[literal]) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (l == literal) continue;
|
|
|
|
|
if (assignment.LiteralIsAssigned(l)) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_removed_[l] && !in_direct_implications_[l]) {
|
|
|
|
|
in_direct_implications_[l] = true;
|
2020-06-03 12:07:07 +02:00
|
|
|
direct_implications_.push_back(l);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (literal.Index() < at_most_ones_.size()) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (is_redundant_[literal]) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(at_most_ones_[literal].empty());
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
2023-10-12 10:06:27 +02:00
|
|
|
for (const int start : at_most_ones_[literal]) {
|
2024-03-04 18:07:17 +01:00
|
|
|
for (const Literal l : AtMostOne(start)) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (l == literal) continue;
|
|
|
|
|
if (assignment.LiteralIsAssigned(l)) continue;
|
2023-10-12 10:06:27 +02:00
|
|
|
if (!is_removed_[l] && !in_direct_implications_[l.NegatedIndex()]) {
|
2020-06-03 12:07:07 +02:00
|
|
|
in_direct_implications_[l.NegatedIndex()] = true;
|
|
|
|
|
direct_implications_.push_back(l.Negated());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-10-12 10:06:27 +02:00
|
|
|
estimated_sizes_[literal] = direct_implications_.size();
|
2020-06-03 12:07:07 +02:00
|
|
|
return direct_implications_;
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-04 18:07:17 +01:00
|
|
|
absl::Span<const Literal> BinaryImplicationGraph::AtMostOne(int start) const {
|
|
|
|
|
const int size = at_most_one_buffer_[start].Index().value();
|
|
|
|
|
return absl::MakeSpan(&at_most_one_buffer_[start + 1], size);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LiteralIndex BinaryImplicationGraph::RandomImpliedLiteral(Literal lhs) {
|
|
|
|
|
const int size1 = implications_[lhs].size();
|
|
|
|
|
const int size2 =
|
|
|
|
|
lhs.Index() < at_most_ones_.size() ? at_most_ones_[lhs].size() : 0;
|
|
|
|
|
if (size1 + size2 == 0) return kNoLiteralIndex;
|
|
|
|
|
|
|
|
|
|
const int choice = absl::Uniform<int>(*random_, 0, size1 + size2);
|
|
|
|
|
if (choice < size1) {
|
|
|
|
|
return implications_[lhs][choice].Index();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const absl::Span<const Literal> amo =
|
|
|
|
|
AtMostOne(at_most_ones_[lhs][choice - size1]);
|
|
|
|
|
CHECK_GE(amo.size(), 2);
|
|
|
|
|
const int first_choice = absl::Uniform<int>(*random_, 0, amo.size());
|
|
|
|
|
const Literal lit = amo[first_choice];
|
|
|
|
|
if (lit != lhs) return lit.NegatedIndex();
|
|
|
|
|
|
|
|
|
|
// We are unlucky and just picked the wrong literal: take a different one.
|
|
|
|
|
int next_choice = absl::Uniform<int>(*random_, 0, amo.size() - 1);
|
|
|
|
|
if (next_choice >= first_choice) {
|
|
|
|
|
next_choice += 1;
|
|
|
|
|
}
|
|
|
|
|
CHECK_NE(amo[next_choice], lhs);
|
|
|
|
|
return amo[next_choice].NegatedIndex();
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
bool BinaryImplicationGraph::FindFailedLiteralAroundVar(BooleanVariable var,
|
2020-10-28 13:42:36 +01:00
|
|
|
bool* is_unsat) {
|
2020-06-03 12:07:07 +02:00
|
|
|
const int saved_index = propagation_trail_index_;
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_EQ(propagation_trail_index_, trail_->Index()); // Propagation done.
|
2020-06-03 12:07:07 +02:00
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.VariableIsAssigned(var)) return false;
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
const Literal literal(var, true);
|
|
|
|
|
direct_implications_of_negated_literal_ =
|
|
|
|
|
DirectImplications(literal.Negated());
|
2020-10-22 23:36:58 +02:00
|
|
|
DirectImplications(literal); // Fill in_direct_implications_.
|
2020-06-03 12:07:07 +02:00
|
|
|
for (const Literal l : direct_implications_of_negated_literal_) {
|
2023-10-12 10:06:27 +02:00
|
|
|
if (in_direct_implications_[l]) {
|
2020-06-03 12:07:07 +02:00
|
|
|
// not(l) => literal => l.
|
|
|
|
|
if (!FixLiteral(l)) {
|
|
|
|
|
*is_unsat = true;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return propagation_trail_index_ > saved_index;
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-04 18:26:01 +01:00
|
|
|
int64_t BinaryImplicationGraph::NumImplicationOnVariableRemoval(
|
2020-10-22 23:36:58 +02:00
|
|
|
BooleanVariable var) {
|
2020-06-03 12:07:07 +02:00
|
|
|
const Literal literal(var, true);
|
2021-03-04 18:26:01 +01:00
|
|
|
int64_t result = 0;
|
2020-06-03 12:07:07 +02:00
|
|
|
direct_implications_of_negated_literal_ =
|
|
|
|
|
DirectImplications(literal.Negated());
|
2021-03-04 18:26:01 +01:00
|
|
|
const int64_t s1 = DirectImplications(literal).size();
|
2020-06-03 12:07:07 +02:00
|
|
|
for (const Literal l : direct_implications_of_negated_literal_) {
|
|
|
|
|
result += s1;
|
|
|
|
|
|
|
|
|
|
// We should have dealt with that in FindFailedLiteralAroundVar().
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!in_direct_implications_[l]);
|
2020-06-03 12:07:07 +02:00
|
|
|
|
|
|
|
|
// l => literal => l: equivalent variable!
|
2020-10-22 23:36:58 +02:00
|
|
|
if (in_direct_implications_[l.NegatedIndex()]) result--;
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For all possible a => var => b, add a => b.
|
|
|
|
|
void BinaryImplicationGraph::RemoveBooleanVariable(
|
2020-10-28 13:42:36 +01:00
|
|
|
BooleanVariable var, std::deque<std::vector<Literal>>* postsolve_clauses) {
|
2020-06-03 12:07:07 +02:00
|
|
|
const Literal literal(var, true);
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_removed_[literal.Index()]);
|
|
|
|
|
DCHECK(!is_redundant_[literal.Index()]);
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
direct_implications_of_negated_literal_ =
|
|
|
|
|
DirectImplications(literal.Negated());
|
|
|
|
|
for (const Literal b : DirectImplications(literal)) {
|
2023-10-15 18:08:33 +02:00
|
|
|
if (is_removed_[b]) continue;
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_redundant_[b]);
|
2020-06-03 12:07:07 +02:00
|
|
|
estimated_sizes_[b.NegatedIndex()]--;
|
|
|
|
|
for (const Literal a_negated : direct_implications_of_negated_literal_) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (a_negated.Negated() == b) continue;
|
2023-10-15 18:08:33 +02:00
|
|
|
if (is_removed_[a_negated]) continue;
|
2020-06-03 12:07:07 +02:00
|
|
|
AddImplication(a_negated.Negated(), b);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (const Literal a_negated : direct_implications_of_negated_literal_) {
|
2023-10-15 18:08:33 +02:00
|
|
|
if (is_removed_[a_negated]) continue;
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!is_redundant_[a_negated]);
|
2020-06-03 12:07:07 +02:00
|
|
|
estimated_sizes_[a_negated.NegatedIndex()]--;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Notify the deletion to the proof checker and the postsolve.
|
|
|
|
|
// Note that we want var first in these clauses for the postsolve.
|
|
|
|
|
for (const Literal b : direct_implications_) {
|
|
|
|
|
if (drat_proof_handler_ != nullptr) {
|
2020-10-22 23:36:58 +02:00
|
|
|
drat_proof_handler_->DeleteClause({Literal(var, false), b});
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
postsolve_clauses->push_back({Literal(var, false), b});
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
for (const Literal a_negated : direct_implications_of_negated_literal_) {
|
|
|
|
|
if (drat_proof_handler_ != nullptr) {
|
2020-10-22 23:36:58 +02:00
|
|
|
drat_proof_handler_->DeleteClause({Literal(var, true), a_negated});
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
2020-10-22 23:36:58 +02:00
|
|
|
postsolve_clauses->push_back({Literal(var, true), a_negated});
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
// We need to remove any occurrence of var in our implication lists, this will
|
2020-10-22 23:36:58 +02:00
|
|
|
// be delayed to the CleanupAllRemovedVariables() call.
|
2023-10-15 18:08:33 +02:00
|
|
|
for (const LiteralIndex index : {literal.Index(), literal.NegatedIndex()}) {
|
2020-06-03 12:07:07 +02:00
|
|
|
is_removed_[index] = true;
|
2023-10-15 18:08:33 +02:00
|
|
|
implications_[index].clear();
|
2020-06-03 12:07:07 +02:00
|
|
|
if (!is_redundant_[index]) {
|
|
|
|
|
++num_redundant_literals_;
|
2023-02-06 16:27:12 +01:00
|
|
|
is_redundant_.Set(index);
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-04 15:06:08 +01:00
|
|
|
void BinaryImplicationGraph::RemoveAllRedundantVariables(
|
|
|
|
|
std::deque<std::vector<Literal>>* postsolve_clauses) {
|
|
|
|
|
for (LiteralIndex a(0); a < implications_.size(); ++a) {
|
|
|
|
|
if (is_redundant_[a] && !is_removed_[a]) {
|
|
|
|
|
postsolve_clauses->push_back(
|
|
|
|
|
{Literal(a), Literal(RepresentativeOf(Literal(a))).Negated()});
|
|
|
|
|
is_removed_[a] = true;
|
|
|
|
|
gtl::STLClearObject(&(implications_[a]));
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int new_size = 0;
|
|
|
|
|
auto& implication = implications_[a];
|
|
|
|
|
for (const Literal l : implication) {
|
|
|
|
|
if (!is_redundant_[l]) {
|
|
|
|
|
implication[new_size++] = l;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
implication.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void BinaryImplicationGraph::CleanupAllRemovedAndFixedVariables() {
|
|
|
|
|
const VariablesAssignment& assignment = trail_->Assignment();
|
2023-10-15 18:08:33 +02:00
|
|
|
for (LiteralIndex a(0); a < implications_.size(); ++a) {
|
2023-12-04 15:06:08 +01:00
|
|
|
if (is_removed_[a] || assignment.LiteralIsAssigned(Literal(a))) {
|
|
|
|
|
if (DEBUG_MODE && assignment.LiteralIsTrue(Literal(a))) {
|
|
|
|
|
// The code assumes that everything is already propagated.
|
|
|
|
|
// Otherwise we will remove implications that didn't propagate yet!
|
|
|
|
|
for (const Literal lit : implications_[a]) {
|
|
|
|
|
DCHECK(trail_->Assignment().LiteralIsTrue(lit));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
gtl::STLClearObject(&(implications_[a]));
|
2023-10-15 18:08:33 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 12:07:07 +02:00
|
|
|
int new_size = 0;
|
2023-10-15 18:08:33 +02:00
|
|
|
auto& implication = implications_[a];
|
2020-06-03 12:07:07 +02:00
|
|
|
for (const Literal l : implication) {
|
2023-12-04 15:06:08 +01:00
|
|
|
if (!is_removed_[l] && !assignment.LiteralIsTrue(l)) {
|
|
|
|
|
implication[new_size++] = l;
|
|
|
|
|
}
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
implication.resize(new_size);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clean-up at most ones.
|
|
|
|
|
at_most_ones_.clear();
|
2020-10-22 23:36:58 +02:00
|
|
|
CleanUpAndAddAtMostOnes(/*base_index=*/0);
|
2023-12-04 15:06:08 +01:00
|
|
|
|
|
|
|
|
// Note that to please the invariant() we also removed fixed literal above.
|
2023-10-15 18:08:33 +02:00
|
|
|
DCHECK(InvariantsAreOk());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool BinaryImplicationGraph::InvariantsAreOk() {
|
2024-10-16 14:12:38 +02:00
|
|
|
if (time_limit_->LimitReached()) return true;
|
2023-10-15 18:08:33 +02:00
|
|
|
// We check that if a => b then not(b) => not(a).
|
|
|
|
|
absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> seen;
|
|
|
|
|
int num_redundant = 0;
|
|
|
|
|
int num_fixed = 0;
|
|
|
|
|
for (LiteralIndex a_index(0); a_index < implications_.size(); ++a_index) {
|
|
|
|
|
if (trail_->Assignment().LiteralIsAssigned(Literal(a_index))) {
|
|
|
|
|
++num_fixed;
|
|
|
|
|
if (!implications_[a_index].empty()) {
|
|
|
|
|
LOG(ERROR) << "Fixed literal has non-cleared implications";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2023-12-04 15:06:08 +01:00
|
|
|
if (is_removed_[a_index]) {
|
|
|
|
|
if (!implications_[a_index].empty()) {
|
|
|
|
|
LOG(ERROR) << "Removed literal has non-cleared implications";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2023-10-15 18:08:33 +02:00
|
|
|
if (is_redundant_[a_index]) {
|
|
|
|
|
++num_redundant;
|
2023-12-04 15:06:08 +01:00
|
|
|
if (implications_[a_index].size() != 1) {
|
2023-10-15 18:08:33 +02:00
|
|
|
LOG(ERROR)
|
|
|
|
|
<< "Redundant literal should only point to its representative "
|
|
|
|
|
<< Literal(a_index) << " => " << implications_[a_index];
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (const Literal b : implications_[a_index]) {
|
|
|
|
|
seen.insert({a_index, b.Index()});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check that reverse topo order is correct.
|
2024-07-12 13:56:11 +02:00
|
|
|
util_intops::StrongVector<LiteralIndex, int> lit_to_order;
|
2023-10-15 18:08:33 +02:00
|
|
|
if (is_dag_) {
|
|
|
|
|
lit_to_order.assign(implications_.size(), -1);
|
|
|
|
|
for (int i = 0; i < reverse_topological_order_.size(); ++i) {
|
|
|
|
|
lit_to_order[reverse_topological_order_[i]] = i;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
VLOG(2) << "num_redundant " << num_redundant;
|
|
|
|
|
VLOG(2) << "num_fixed " << num_fixed;
|
|
|
|
|
for (LiteralIndex a_index(0); a_index < implications_.size(); ++a_index) {
|
|
|
|
|
const LiteralIndex not_a_index = Literal(a_index).NegatedIndex();
|
|
|
|
|
for (const Literal b : implications_[a_index]) {
|
2023-12-04 15:06:08 +01:00
|
|
|
if (is_removed_[b]) {
|
|
|
|
|
LOG(ERROR) << "A removed literal still appear! " << Literal(a_index)
|
|
|
|
|
<< " => " << b;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2023-10-15 18:08:33 +02:00
|
|
|
if (!seen.contains({b.NegatedIndex(), not_a_index})) {
|
|
|
|
|
LOG(ERROR) << "We have " << Literal(a_index) << " => " << b
|
|
|
|
|
<< " but not the reverse implication!";
|
2023-12-04 15:06:08 +01:00
|
|
|
LOG(ERROR) << "redundant[a]: " << is_redundant_[a_index]
|
|
|
|
|
<< " assigned[a]: "
|
|
|
|
|
<< trail_->Assignment().LiteralIsAssigned(Literal(a_index))
|
|
|
|
|
<< " removed[a]: " << is_removed_[a_index]
|
|
|
|
|
<< " redundant[b]: " << is_redundant_[b] << " assigned[b]: "
|
|
|
|
|
<< trail_->Assignment().LiteralIsAssigned(b)
|
|
|
|
|
<< " removed[b]: " << is_removed_[b];
|
|
|
|
|
|
2023-10-15 18:08:33 +02:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Test that if we have a dag, our topo order is correct.
|
|
|
|
|
if (is_dag_ && !is_redundant_[b] && !is_redundant_[a_index]) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_NE(lit_to_order[b], -1);
|
|
|
|
|
DCHECK_LE(lit_to_order[b], lit_to_order[a_index]);
|
2023-10-15 18:08:33 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check the at-most ones.
|
|
|
|
|
absl::flat_hash_set<std::pair<LiteralIndex, int>> lit_to_start;
|
|
|
|
|
for (LiteralIndex i(0); i < at_most_ones_.size(); ++i) {
|
|
|
|
|
for (const int start : at_most_ones_[i]) {
|
|
|
|
|
lit_to_start.insert({i, start});
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
|
|
|
|
|
for (int start = 0; start < at_most_one_buffer_.size();) {
|
|
|
|
|
const absl::Span<const Literal> amo = AtMostOne(start);
|
|
|
|
|
for (const Literal l : amo) {
|
|
|
|
|
if (is_removed_[l]) {
|
|
|
|
|
LOG(ERROR) << "A removed literal still appear in an amo " << l;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
if (!lit_to_start.contains({l, start})) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2023-10-15 18:08:33 +02:00
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
start += amo.size() + 1;
|
2023-10-15 18:08:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
2020-06-03 12:07:07 +02:00
|
|
|
}
|
|
|
|
|
|
2023-12-04 15:06:08 +01:00
|
|
|
absl::Span<const Literal> BinaryImplicationGraph::NextAtMostOne() {
|
|
|
|
|
if (at_most_one_iterator_ >= at_most_one_buffer_.size()) {
|
|
|
|
|
return absl::Span<const Literal>();
|
|
|
|
|
}
|
2024-03-04 18:07:17 +01:00
|
|
|
|
|
|
|
|
const absl::Span<const Literal> result = AtMostOne(at_most_one_iterator_);
|
|
|
|
|
at_most_one_iterator_ += result.size() + 1;
|
|
|
|
|
return result;
|
2023-12-04 15:06:08 +01:00
|
|
|
}
|
|
|
|
|
|
2014-01-27 15:05:30 +00:00
|
|
|
// ----- SatClause -----
|
|
|
|
|
|
|
|
|
|
// static
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* SatClause::Create(absl::Span<const Literal> literals) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK_GE(literals.size(), 2);
|
2020-10-28 13:42:36 +01:00
|
|
|
SatClause* clause = reinterpret_cast<SatClause*>(
|
2014-01-27 15:05:30 +00:00
|
|
|
::operator new(sizeof(SatClause) + literals.size() * sizeof(Literal)));
|
|
|
|
|
clause->size_ = literals.size();
|
|
|
|
|
for (int i = 0; i < literals.size(); ++i) {
|
|
|
|
|
clause->literals_[i] = literals[i];
|
|
|
|
|
}
|
|
|
|
|
return clause;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Note that for an attached clause, removing fixed literal is okay because if
|
2015-07-28 15:11:25 +02:00
|
|
|
// any of the watched literal is assigned, then the clause is necessarily true.
|
2014-01-27 15:05:30 +00:00
|
|
|
bool SatClause::RemoveFixedLiteralsAndTestIfTrue(
|
2020-10-28 13:42:36 +01:00
|
|
|
const VariablesAssignment& assignment) {
|
2023-12-04 15:06:08 +01:00
|
|
|
DCHECK(!IsRemoved());
|
2015-10-23 13:45:43 +02:00
|
|
|
if (assignment.VariableIsAssigned(literals_[0].Variable()) ||
|
|
|
|
|
assignment.VariableIsAssigned(literals_[1].Variable())) {
|
2014-01-27 15:05:30 +00:00
|
|
|
DCHECK(IsSatisfied(assignment));
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
int j = 2;
|
2015-10-23 13:45:43 +02:00
|
|
|
while (j < size_ && !assignment.VariableIsAssigned(literals_[j].Variable())) {
|
2015-07-28 15:11:25 +02:00
|
|
|
++j;
|
|
|
|
|
}
|
|
|
|
|
for (int i = j; i < size_; ++i) {
|
2015-10-23 13:45:43 +02:00
|
|
|
if (assignment.VariableIsAssigned(literals_[i].Variable())) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsTrue(literals_[i])) return true;
|
2014-01-27 15:05:30 +00:00
|
|
|
} else {
|
2016-06-02 13:19:10 +02:00
|
|
|
std::swap(literals_[j], literals_[i]);
|
2014-01-27 15:05:30 +00:00
|
|
|
++j;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
size_ = j;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-28 13:42:36 +01:00
|
|
|
bool SatClause::IsSatisfied(const VariablesAssignment& assignment) const {
|
2014-01-27 15:05:30 +00:00
|
|
|
for (const Literal literal : *this) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (assignment.LiteralIsTrue(literal)) return true;
|
2014-01-27 15:05:30 +00:00
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::string SatClause::DebugString() const {
|
|
|
|
|
std::string result;
|
|
|
|
|
for (const Literal literal : *this) {
|
2020-10-22 23:36:58 +02:00
|
|
|
if (!result.empty()) result.append(" ");
|
2014-01-27 15:05:30 +00:00
|
|
|
result.append(literal.DebugString());
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-22 23:36:58 +02:00
|
|
|
} // namespace sat
|
|
|
|
|
} // namespace operations_research
|