algorithms: Export from google3

This commit is contained in:
Corentin Le Molgat
2023-07-05 13:10:00 +02:00
parent 8241194148
commit 2950cfc30e
15 changed files with 1516 additions and 1070 deletions

View File

@@ -274,3 +274,8 @@ git_repository(
remote = "https://github.com/google/googletest.git",
)
git_repository(
name = "com_google_benchmark",
tag = "v1.8.1",
remote = "https://github.com/google/benchmark.git",
)

View File

@@ -98,10 +98,12 @@ cc_library(
],
)
# Weighted set covering
cc_library(
name = "weighted_set_covering_model",
srcs = ["weighted_set_covering_model.cc"],
hdrs = ["weighted_set_covering_model.h"],
name = "set_cover_model",
srcs = ["set_cover_model.cc"],
hdrs = ["set_cover_model.h"],
deps = [
"//ortools/lp_data:base",
"@com_google_absl//absl/log:check",
@@ -109,20 +111,57 @@ cc_library(
)
cc_library(
name = "weighted_set_covering",
srcs = ["weighted_set_covering.cc"],
hdrs = ["weighted_set_covering.h"],
name = "set_cover_ledger",
srcs = ["set_cover_ledger.cc"],
hdrs = ["set_cover_ledger.h"],
deps = [
":weighted_set_covering_model",
":set_cover_model",
"//ortools/base",
"//ortools/base:adjustable_priority_queue",
"@com_google_absl//absl/container:flat_hash_set",
"@com_google_absl//absl/log",
"@com_google_absl//absl/log:check",
],
)
cc_library(
name = "set_cover_utils",
srcs = ["set_cover_utils.cc"],
hdrs = ["set_cover_utils.h"],
deps = [
":set_cover_ledger",
":set_cover_model",
"//ortools/base:adjustable_priority_queue",
],
)
cc_library(
name = "set_cover",
srcs = ["set_cover.cc"],
hdrs = ["set_cover.h"],
deps = [
":set_cover_utils",
"//ortools/base",
"@com_google_absl//absl/log",
"@com_google_absl//absl/log:check",
"@com_google_absl//absl/random",
],
)
cc_test(
name = "set_cover_test",
size = "medium",
timeout = "long",
srcs = ["set_cover_test.cc"],
deps = [
":set_cover",
"@com_google_absl//absl/log",
"@com_google_benchmark//:benchmark",
"@com_google_googletest//:gtest_main",
],
)
# Graph automorphism libraries.
cc_library(
name = "dense_doubly_linked_list",
hdrs = ["dense_doubly_linked_list.h"],

View File

@@ -12,20 +12,7 @@
# limitations under the License.
file(GLOB _SRCS "*.h" "*.cc")
list(REMOVE_ITEM _SRCS
${CMAKE_CURRENT_SOURCE_DIR}/binary_indexed_tree_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/binary_search_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/dense_doubly_linked_list_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/duplicate_remover_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/dynamic_partition_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/dynamic_permutation_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/find_graph_symmetries_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/hungarian_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/knapsack_solver_for_cuts_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/knapsack_solver_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/sparse_permutation_test.cc
${CMAKE_CURRENT_SOURCE_DIR}/weighted_set_covering_test.cc
)
list(FILTER _SRCS EXCLUDE REGEX "/[^/]*_test\\.cc$")
set(NAME ${PROJECT_NAME}_algorithms)

View File

@@ -0,0 +1,269 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/set_cover.h"
#include <algorithm>
#include <limits>
#include <numeric>
#include <vector>
#include "absl/log/check.h"
#include "absl/random/random.h"
#include "ortools/algorithms/set_cover_utils.h"
#include "ortools/base/logging.h"
namespace operations_research {
constexpr SubsetIndex kNotFound(-1);
// TrivialSolutionGenerator.
bool TrivialSolutionGenerator::NextSolution() {
const SubsetIndex num_subsets(ledger_->model()->num_subsets());
SubsetBoolVector choices(num_subsets, true);
ledger_->LoadSolution(choices);
return true;
}
// RandomSolutionGenerator.
bool RandomSolutionGenerator::NextSolution() {
const SubsetIndex num_subsets(ledger_->model()->num_subsets());
std::vector<SubsetIndex> random_subset_order(num_subsets.value());
std::iota(random_subset_order.begin(), random_subset_order.end(),
SubsetIndex(0));
std::shuffle(random_subset_order.begin(), random_subset_order.end(),
absl::BitGen());
const ElementIndex num_elements(ledger_->model()->num_elements());
for (const SubsetIndex subset : random_subset_order) {
if (ledger_->num_elements_covered() == num_elements) {
break;
}
if (ledger_->marginal_impacts(subset) != 0) {
ledger_->Toggle(subset, true);
}
}
DCHECK(ledger_->CheckConsistency());
return true;
}
// GreedySolutionGenerator.
void GreedySolutionGenerator::UpdatePriorities(
const std::vector<SubsetIndex>& impacted_subsets) {
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
for (const SubsetIndex subset : impacted_subsets) {
const ElementIndex marginal_impact(ledger_->marginal_impacts(subset));
if (marginal_impact != 0) {
const Cost marginal_cost_increase =
subset_costs[subset] / marginal_impact.value();
pq_.ChangePriority(subset, -marginal_cost_increase);
} else {
pq_.Remove(subset);
}
}
}
bool GreedySolutionGenerator::NextSolution() {
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
const SubsetIndex num_subsets(ledger_->model()->num_subsets());
ledger_->MakeDataConsistent();
// The priority is the minimum marginal cost increase. Since the
// priority queue returns the smallest value, we use the opposite.
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
if (!ledger_->is_selected(subset) &&
ledger_->marginal_impacts(subset) != 0) {
const Cost marginal_cost_increase =
subset_costs[subset] / ledger_->marginal_impacts(subset).value();
pq_.Add(subset, -marginal_cost_increase);
}
}
const ElementIndex num_elements(ledger_->model()->num_elements());
ElementIndex num_elements_covered(ledger_->num_elements_covered());
while (num_elements_covered < num_elements && !pq_.IsEmpty()) {
const SubsetIndex best_subset = pq_.TopSubset();
DVLOG(1) << "Best subset: " << best_subset.value()
<< " Priority = " << pq_.Priority(best_subset)
<< " queue size = " << pq_.Size();
const std::vector<SubsetIndex> impacted_subsets =
ledger_->Toggle(best_subset, true);
UpdatePriorities(impacted_subsets);
num_elements_covered = ledger_->num_elements_covered();
DVLOG(1) << "Cost = " << ledger_->cost() << " num_uncovered_elements = "
<< num_elements - num_elements_covered;
}
DCHECK(pq_.IsEmpty());
DCHECK(ledger_->CheckConsistency());
DCHECK(ledger_->CheckSolution());
return true;
}
// SteepestSearch.
void SteepestSearch::UpdatePriorities(
const std::vector<SubsetIndex>& impacted_subsets) {
// Update priority queue. Since best_subset is in impacted_subsets, it will
// be removed.
for (const SubsetIndex subset : impacted_subsets) {
pq_.Remove(subset);
}
}
bool SteepestSearch::NextSolution(int num_iterations) {
// Return false if ledger_ contains no solution.
if (!ledger_->CheckSolution()) return false;
const SparseColumnView& columns = ledger_->model()->columns();
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
// Create priority queue with cost of using a subset, by decreasing order.
// Do it only for removable subsets.
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
// The priority is the gain from removing the subset from the solution.
if (ledger_->is_selected(subset) && ledger_->is_removable(subset)) {
pq_.Add(subset, subset_costs[subset]);
}
}
for (int iteration = 0; iteration < num_iterations && !pq_.IsEmpty();
++iteration) {
const SubsetIndex best_subset = pq_.TopSubset();
const Cost cost_decrease = subset_costs[best_subset];
DCHECK_GT(cost_decrease, 0.0);
DCHECK(ledger_->is_removable(best_subset));
DCHECK(ledger_->is_selected(best_subset));
const std::vector<SubsetIndex> impacted_subsets =
ledger_->Toggle(best_subset, false);
UpdatePriorities(impacted_subsets);
DVLOG(1) << "Cost = " << ledger_->cost();
}
DCHECK(ledger_->CheckConsistency());
DCHECK(ledger_->CheckSolution());
return true;
}
// Guided Tabu Search
void GuidedTabuSearch::Initialize() {
const SparseColumnView& columns = ledger_->model()->columns();
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
times_penalized_.AssignToZero(columns.size());
penalized_costs_ = subset_costs;
gts_priorities_ = subset_costs;
for (SubsetIndex subset(0); subset < gts_priorities_.size(); ++subset) {
gts_priorities_[subset] /= columns[subset].size().value();
}
}
namespace {
bool FlipCoin() {
// TODO(user): use STL for repeatable testing.
return absl::Bernoulli(absl::BitGen(), 0.5);
}
} // namespace
void GuidedTabuSearch::UpdatePenalties(
const std::vector<SubsetIndex>& impacted_subsets) {
const SparseColumnView& columns = ledger_->model()->columns();
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
const ElementIndex num_elements(ledger_->model()->num_elements());
Cost largest_priority = -1.0;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
if (ledger_->is_selected(subset)) {
const ElementIndex num_elements_already_covered =
num_elements - ledger_->marginal_impacts(subset);
largest_priority = std::max(largest_priority, gts_priorities_[subset]) /
num_elements_already_covered.value();
}
}
const double radius = radius_factor_ * largest_priority;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
if (ledger_->is_selected(subset)) {
const double subset_priority = gts_priorities_[subset];
if ((largest_priority - subset_priority <= radius) && FlipCoin()) {
++times_penalized_[subset];
const int times_penalized = times_penalized_[subset];
const Cost cost = subset_costs[subset] / columns[subset].size().value();
gts_priorities_[subset] = cost / (1 + times_penalized);
penalized_costs_[subset] =
cost * (1 + penalty_factor_ * times_penalized);
}
}
}
}
bool GuidedTabuSearch::NextSolution(int num_iterations) {
const SparseColumnView& columns = ledger_->model()->columns();
const SubsetCostVector& subset_costs = ledger_->model()->subset_costs();
constexpr Cost kMaxPossibleCost = std::numeric_limits<Cost>::max();
Cost best_cost = ledger_->cost();
Cost total_pen_cost = 0;
for (const Cost pen_cost : penalized_costs_) {
total_pen_cost += pen_cost;
}
SubsetBoolVector best_choices = ledger_->GetSolution();
for (int iteration = 0; iteration < num_iterations; ++iteration) {
Cost smallest_penalized_cost_increase = kMaxPossibleCost;
SubsetIndex best_subset = kNotFound;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
const Cost penalized_delta = penalized_costs_[subset];
DVLOG(1) << "Subset: " << subset.value() << " at "
<< ledger_->is_selected(subset)
<< " is removable = " << ledger_->is_removable(subset)
<< " penalized_delta = " << penalized_delta
<< " smallest_penalized_cost_increase = "
<< smallest_penalized_cost_increase;
if (!ledger_->is_selected(subset)) {
// Try to use subset in solution, if its penalized delta is good.
if (penalized_delta < smallest_penalized_cost_increase) {
smallest_penalized_cost_increase = penalized_delta;
best_subset = subset;
}
} else {
// Try to remove subset from solution, if the gain from removing, is
// OK:
if (-penalized_delta < smallest_penalized_cost_increase &&
// and it can be removed, and
ledger_->is_removable(subset) &&
// it is not Tabu OR decreases the actual cost:
(!tabu_list_.Contains(subset) ||
ledger_->cost() - subset_costs[subset] < best_cost)) {
smallest_penalized_cost_increase = -penalized_delta;
best_subset = subset;
}
}
}
if (best_subset == kNotFound) { // Local minimum reached.
ledger_->LoadSolution(best_choices);
return true;
}
total_pen_cost += smallest_penalized_cost_increase;
const std::vector<SubsetIndex> impacted_subsets =
ledger_->Toggle(best_subset, !ledger_->is_selected(best_subset));
UpdatePenalties(impacted_subsets);
tabu_list_.Add(best_subset);
if (ledger_->cost() < best_cost) {
LOG(INFO) << "Iteration:" << iteration
<< ", current cost = " << ledger_->cost()
<< ", best cost = " << best_cost
<< ", penalized cost = " << total_pen_cost;
best_cost = ledger_->cost();
best_choices = ledger_->GetSolution();
}
}
ledger_->LoadSolution(best_choices);
DCHECK(ledger_->CheckConsistency());
DCHECK(ledger_->CheckSolution());
return true;
}
} // namespace operations_research

View File

@@ -0,0 +1,249 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_H_
#define OR_TOOLS_ALGORITHMS_SET_COVER_H_
#include <vector>
#include "ortools/algorithms/set_cover_utils.h"
namespace operations_research {
// Solver classes for the weighted set covering problem.
//
// The solution procedure is based on the general scheme known as local search.
// Once a solution exists, it is improved by modifying it slightly, for example
// by flipping a binary variable, so as to minimize the cost.
// But first, we have to generate a first solution that is as good as possible.
//
// The first solution is then improved by using local search descent, which
// eliminates the T_j's that have no interest in the solution.
//
// A mix of the guided local search (GLS) and Tabu Search (TS) metaheuristic
// is also provided.
//
// TODO(user): add Large Neighborhood Search that removes a collection of T_j
// (with a parameterized way to choose them), and that runs the algorithm here
// on the corresponding subproblem.
//
// TODO(user): make Large Neighborhood Search concurrent, solving independent
// subproblems in different threads.
// An obvious idea is to take all the T_j's (or equivalently to set all the
// x_j's to 1). It's a bit silly but fast, and we can improve on it later using
// local search.
class TrivialSolutionGenerator {
public:
explicit TrivialSolutionGenerator(SetCoverLedger* ledger) : ledger_(ledger) {}
// Returns true if a solution was found.
// TODO(user): Add time-outs and exit with a partial solution. This seems
// unlikely, though.
bool NextSolution();
private:
// The ledger on which the algorithm will run.
SetCoverLedger* ledger_;
};
// A slightly more complicated but better way to compute a first solution is to
// select columns randomly. Less silly than the previous one, and provides
// much better results.
// TODO(user): make it possible to use other random generators. Idea: bias the
// generator towards the columns with the least marginal costs.
class RandomSolutionGenerator {
public:
explicit RandomSolutionGenerator(SetCoverLedger* ledger) : ledger_(ledger) {}
// Returns true if a solution was found.
bool NextSolution();
private:
// The ledger on which the algorithm will run.
SetCoverLedger* ledger_;
};
// The first solution is obtained using the Chvatal heuristic, that guarantees
// that the solution is at most 1 + log(n) times the optimal value.
// Vasek Chvatal, 1979. A greedy heuristic for the set-covering problem.
// Mathematics of Operations Research, 4(3):233-235, 1979.
// http://www.jstor.org/stable/3689577
//
// Chvatal's heuristic works as follows: Choose the subset that covers as many
// remaining uncovered elements as possible for the least possible cost per
// element and iterate.
//
// The following paper dives into the details of this class of algorithms.
// Neal E. Young, Greedy Set-Cover Algorithms (1974-1979, Chvatal,
// Johnson, Lovasz, Stein), in Kao, ed., Encyclopedia of Algorithms. Draft at:
// http://www.cs.ucr.edu/~neal/non_arxiv/Young08SetCover.pdf
//
// G. Cormode, H. Karloff, A. Wirth (2010) "Set Cover Algorithms for Very Large
// Datasets", CIKM'10, ACM, 2010.
class GreedySolutionGenerator {
public:
explicit GreedySolutionGenerator(SetCoverLedger* ledger)
: ledger_(ledger), pq_(ledger_) {}
// Returns true if a solution was found.
// TODO(user): Add time-outs and exit with a partial solution.
bool NextSolution();
private:
// Updates the priorities on the impacted_subsets.
void UpdatePriorities(const std::vector<SubsetIndex>& impacted_subsets);
// The ledger on which the algorithm will run.
SetCoverLedger* ledger_;
// The priority queue used for maintaining the subset with the lower marginal
// cost.
SubsetPriorityQueue pq_;
};
// Once we have a first solution to the problem, there may be (most often,
// there are) elements in S that are covered several times. To decrease the
// total cost, SteepestSearch tries to eliminate some redundant T_j's from
// the solution or equivalently, to flip some x_j's from 1 to 0. the algorithm
// gets its name because it goes in the steepest immediate direction, taking
// the T_j with the largest total cost.
class SteepestSearch {
public:
explicit SteepestSearch(SetCoverLedger* ledger)
: ledger_(ledger), pq_(ledger_) {}
// Returns true if a solution was found within num_iterations.
// TODO(user): Add time-outs and exit with a partial solution.
bool NextSolution(int num_iterations);
private:
// Updates the priorities on the impacted_subsets.
void UpdatePriorities(const std::vector<SubsetIndex>& impacted_subsets);
// The ledger on which the algorithm will run.
SetCoverLedger* ledger_;
// The priority queue used for maintaining the subset with the largest total
// cost.
SubsetPriorityQueue pq_;
};
// As usual and well-known with local search, SteepestSearch reaches a local
// minimum. We therefore implement Guided Tabu Search, which is a crossover of
// Guided Local Search and Tabu Search.
//
// Guided Local Search penalizes the parts of the solution that have been often
// used. It behaves as a long-term memory which "learns" the most used
// features and introduces some diversification in the search.
//
// C. Voudouris (1997) "Guided local search for combinatorial optimisation
// problems", PhD Thesis, University of Essex, Colchester, UK, July, 1997.
//
// Tabu Search makes it possible to degrade the solution temporarily
// by disallowing to go back for a certain time (changes are put in a "Tabu"
// list).
//
// Tabu behaves like a short-term memory and is the intensification part of the
// local search metaheuristic.
//
// F. Glover (1989) "Tabu Search Part 1". ORSA Journal on Computing.
// 1 (2):190206. doi:10.1287/ijoc.1.3.190.
// F. Glover (1990) "Tabu Search Part 2". ORSA Journal on Computing.
// 2 (1): 432. doi:10.1287/ijoc.2.1.4.
class GuidedTabuSearch {
public:
explicit GuidedTabuSearch(SetCoverLedger* ledger)
: ledger_(ledger),
pq_(ledger_),
lagrangian_factor_(kDefaultLagrangianFactor),
penalty_factor_(kDefaultPenaltyFactor),
radius_factor_(kDefaultRadiusFactor),
penalized_costs_(),
times_penalized_(),
tabu_list_(SubsetIndex(kDefaultTabuListSize)) {
Initialize();
}
// Initializes the Guided Tabu Search algorithm.
void Initialize();
// Returns the next solution by running the Tabu Search algorithm for maximum
// num_iterations iterations.
bool NextSolution(int num_iterations);
// TODO(user): re-introduce this is the code. It was used to favor
// subsets with the same marginal costs but that would cover more elements.
// But first, see if it makes sense to compute it.
void SetLagrangianFactor(double factor) { lagrangian_factor_ = factor; }
double GetLagrangianFactor() const { return lagrangian_factor_; }
void SetRadiusFactor(double r) { radius_factor_ = r; }
double GetRadius() const { return radius_factor_; }
// Setters and getters for the Guided Tabu Search algorithm parameters.
void SetPenaltyFactor(double factor) { penalty_factor_ = factor; }
double GetPenaltyFactor() const { return penalty_factor_; }
void SetTabuListSize(int size) { tabu_list_.Init(size); }
int GetTabuListSize() const { return tabu_list_.size(); }
private:
// Updates the priorities on the impacted_subsets.
void UpdatePriorities(const std::vector<SubsetIndex>& impacted_subsets);
// Updates the penalties on the impacted_subsets.
void UpdatePenalties(const std::vector<SubsetIndex>& impacted_subsets);
// The ledger on which the algorithm will run.
SetCoverLedger* ledger_;
// The priority queue used ***
SubsetPriorityQueue pq_;
// Search handling variables and default parameters.
static constexpr double kDefaultLagrangianFactor = 100.0;
double lagrangian_factor_;
// Guided local search-related data.
static constexpr double kPenaltyUpdateEpsilon = 1e-1;
// Guided Tabu Search parameters.
static constexpr double kDefaultPenaltyFactor = 0.2;
double penalty_factor_;
// Tabu Search parameters.
static constexpr double kDefaultRadiusFactor = 1e-8;
double radius_factor_;
// Penalized costs for each subset as used in Guided Tabu Search.
SubsetCostVector penalized_costs_;
// The number of times each subset was penalized during Guided Tabu Search.
SubsetCountVector times_penalized_;
// TODO(user): remove and use priority_queue.
// Priorities for the different subsets.
// They are similar to the marginal cost of using a particular subset,
// defined as the cost of the subset divided by the number of elements that
// are still not covered.
SubsetCostVector gts_priorities_;
// Tabu search-related data.
static constexpr int kDefaultTabuListSize = 17; // Nice prime number.
TabuList<SubsetIndex> tabu_list_;
};
} // namespace operations_research
#endif // OR_TOOLS_ALGORITHMS_SET_COVER_H_

View File

@@ -0,0 +1,344 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/set_cover_ledger.h"
#include <algorithm>
#include <vector>
#include "absl/container/flat_hash_set.h"
#include "absl/log/check.h"
#include "ortools/algorithms/set_cover_model.h"
#include "ortools/base/logging.h"
namespace operations_research {
// Note: in many of the member functions, variables have "crypterse" names
// to avoid confusing them with member data. For example mrgnl_impcts is used
// to avoid confusion with marginal_impacts_.
void SetCoverLedger::Initialize() {
DCHECK(model_->ComputeFeasibility());
model_->CreateSparseRowView();
const SubsetIndex num_subsets(model_->num_subsets());
is_selected_.assign(num_subsets, false);
is_removable_.assign(num_subsets, false);
marginal_impacts_.assign(num_subsets, ElementIndex(0));
const SparseColumnView& columns = model_->columns();
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
marginal_impacts_[subset] = columns[subset].size().value();
}
const ElementIndex num_elements(model_->num_elements());
coverage_.assign(num_elements, SubsetIndex(0));
cost_ = 0.0;
num_elements_covered_ = ElementIndex(0);
}
bool SetCoverLedger::CheckConsistency() const {
CHECK(CheckCoverageAndMarginalImpacts(is_selected_));
CHECK(CheckIsRemovable());
return true;
}
void SetCoverLedger::LoadSolution(const SubsetBoolVector& c) {
is_selected_ = c;
MakeDataConsistent();
}
bool SetCoverLedger::CheckSolution() const {
bool is_ok = true;
const ElementToSubsetVector cvrg = ComputeCoverage(is_selected_);
const ElementIndex num_elements(model_->num_elements());
for (ElementIndex element(0); element < num_elements; ++element) {
if (cvrg[element] == 0) {
LOG(ERROR) << "Recomputed coverage_ for element " << element << " = 0";
is_ok = false;
}
}
const Cost recomputed_cost = ComputeCost(is_selected_);
if (cost_ != recomputed_cost) {
LOG(ERROR) << "Cost = " << cost_
<< ", while recomputed cost_ = " << recomputed_cost;
is_ok = false;
}
return is_ok;
}
bool SetCoverLedger::CheckCoverageAgainstSolution(
const SubsetBoolVector& choices) const {
const SubsetIndex num_subsets(model_->num_subsets());
DCHECK_EQ(num_subsets, choices.size());
const ElementToSubsetVector cvrg = ComputeCoverage(choices);
bool is_ok = true;
const ElementIndex num_elements(model_->num_elements());
for (ElementIndex element(0); element < num_elements; ++element) {
if (coverage_[element] != cvrg[element]) {
LOG(ERROR) << "Recomputed coverage_ for element " << element << " = "
<< cvrg[element]
<< ", while updated coverage_ = " << coverage_[element];
is_ok = false;
}
}
return is_ok;
}
bool SetCoverLedger::CheckCoverageAndMarginalImpacts(
const SubsetBoolVector& choices) const {
const SubsetIndex num_subsets(model_->num_subsets());
DCHECK_EQ(num_subsets, choices.size());
const ElementToSubsetVector cvrg = ComputeCoverage(choices);
bool is_ok = CheckCoverageAgainstSolution(choices);
const SubsetToElementVector mrgnl_impcts = ComputeMarginalImpacts(cvrg);
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
if (marginal_impacts_[subset] != mrgnl_impcts[subset]) {
LOG(ERROR) << "Recomputed marginal impact for subset " << subset << " = "
<< mrgnl_impcts[subset] << ", while updated marginal impact = "
<< marginal_impacts_[subset];
is_ok = false;
}
}
return is_ok;
}
// Used only once, for testing. TODO(user): Merge with
// CheckCoverageAndMarginalImpacts.
SubsetToElementVector SetCoverLedger::ComputeMarginalImpacts(
const ElementToSubsetVector& cvrg) const {
const ElementIndex num_elements(model_->num_elements());
DCHECK_EQ(num_elements, cvrg.size());
const SparseColumnView& columns = model_->columns();
const SubsetIndex num_subsets(model_->num_subsets());
SubsetToElementVector mrgnl_impcts(num_subsets, ElementIndex(0));
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
for (ElementIndex element : columns[subset]) {
if (cvrg[element] == 0) {
++mrgnl_impcts[subset];
}
}
DCHECK_LE(mrgnl_impcts[subset], columns[subset].size().value());
DCHECK_GE(mrgnl_impcts[subset], 0);
}
return mrgnl_impcts;
}
Cost SetCoverLedger::ComputeCost(const SubsetBoolVector& c) const {
Cost recomputed_cost = 0;
const SubsetCostVector& subset_costs = model_->subset_costs();
const SubsetIndex num_subsets(model_->num_subsets());
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
if (c[subset]) {
recomputed_cost += subset_costs[subset];
}
}
return recomputed_cost;
}
ElementIndex SetCoverLedger::ComputeNumElementsCovered(
const ElementToSubsetVector& cvrg) const {
// Use "crypterse" naming to avoid confusing with num_elements_.
int num_elmnts_cvrd = 0;
for (ElementIndex element(0); element < model_->num_elements(); ++element) {
if (cvrg[element] >= 1) {
++num_elmnts_cvrd;
}
}
return ElementIndex(num_elmnts_cvrd);
}
ElementToSubsetVector SetCoverLedger::ComputeCoverage(
const SubsetBoolVector& choices) const {
const ElementIndex num_elements(model_->num_elements());
const SparseRowView& rows = model_->rows();
// Use "crypterse" naming to avoid confusing with coverage_.
ElementToSubsetVector cvrg(num_elements, SubsetIndex(0));
for (ElementIndex element(0); element < num_elements; ++element) {
for (SubsetIndex subset : rows[element]) {
if (choices[subset]) {
++cvrg[element];
}
}
DCHECK_LE(cvrg[element], rows[element].size().value());
DCHECK_GE(cvrg[element], 0);
}
return cvrg;
}
bool SetCoverLedger::CheckSingleSubsetCoverage(SubsetIndex subset) const {
ElementToSubsetVector cvrg = ComputeSingleSubsetCoverage(subset);
const SparseColumnView& columns = model_->columns();
for (const ElementIndex element : columns[subset]) {
DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element;
}
return true;
}
// Used only once, for testing. TODO(user): Merge with
// CheckSingleSubsetCoverage.
ElementToSubsetVector SetCoverLedger::ComputeSingleSubsetCoverage(
SubsetIndex subset) const {
const SparseColumnView& columns = model_->columns();
const ElementIndex num_elements(model_->num_elements());
// Use "crypterse" naming to avoid confusing with cvrg.
ElementToSubsetVector cvrg(num_elements, SubsetIndex(0));
const SparseRowView& rows = model_->rows();
for (const ElementIndex element : columns[subset]) {
for (SubsetIndex subset : rows[element]) {
if (is_selected_[subset]) {
++cvrg[element];
}
}
DCHECK_LE(cvrg[element], rows[element].size().value());
DCHECK_GE(cvrg[element], 0);
}
return cvrg;
}
std::vector<SubsetIndex> SetCoverLedger::Toggle(SubsetIndex subset,
bool value) {
// Note: "if p then q" is also "not(p) or q", or p <= q (p LE q).
// If selected, then is_removable, to make sure we still have a solution.
DCHECK(is_selected_[subset] <= is_removable_[subset]);
// If value, then marginal_impact > 0, to not increase the cost.
DCHECK((value <= (marginal_impacts_[subset] > 0)));
return UnsafeToggle(subset, value);
}
std::vector<SubsetIndex> SetCoverLedger::UnsafeToggle(SubsetIndex subset,
bool value) {
// We allow to deselect a non-removable subset, but at least the
// Toggle should be a real change.
DCHECK_NE(is_selected_[subset], value);
// If selected, then marginal_impact == 0.
DCHECK((is_selected_[subset] <= (marginal_impacts_[subset] == 0)));
DVLOG(1) << (value ? "S" : "Des") << "electing subset " << subset;
const SubsetCostVector& subset_costs = model_->subset_costs();
cost_ += value ? subset_costs[subset] : -subset_costs[subset];
is_selected_[subset] = value;
UpdateCoverage(subset, value);
const std::vector<SubsetIndex> impacted_subsets =
ComputeImpactedSubsets(subset);
UpdateIsRemovable(impacted_subsets);
UpdateMarginalImpacts(impacted_subsets);
DCHECK((is_selected_[subset] <= (marginal_impacts_[subset] == 0)));
return impacted_subsets;
}
void SetCoverLedger::UpdateCoverage(SubsetIndex subset, bool value) {
const SparseColumnView& columns = model_->columns();
const SparseRowView& rows = model_->rows();
const int delta = value ? 1 : -1;
for (const ElementIndex element : columns[subset]) {
DVLOG(2) << "Coverage of element " << element << " changed from "
<< coverage_[element] << " to " << coverage_[element] + delta;
coverage_[element] += delta;
DCHECK_GE(coverage_[element], 0);
DCHECK_LE(coverage_[element], rows[element].size().value());
if (coverage_[element] == 1) {
++num_elements_covered_;
} else if (coverage_[element] == 0) {
--num_elements_covered_;
}
}
DCHECK(CheckSingleSubsetCoverage(subset));
}
// Compute the impact of the change in the assignment for each subset
// containing element. Store this in a flat_hash_set so as to buffer the
// change.
std::vector<SubsetIndex> SetCoverLedger::ComputeImpactedSubsets(
SubsetIndex subset) const {
const SparseColumnView& columns = model_->columns();
const SparseRowView& rows = model_->rows();
absl::flat_hash_set<SubsetIndex> impacted_subsets_collection;
for (const ElementIndex element : columns[subset]) {
for (const SubsetIndex subset : rows[element]) {
impacted_subsets_collection.insert(subset);
}
}
DCHECK(impacted_subsets_collection.find(subset) !=
impacted_subsets_collection.end());
std::vector<SubsetIndex> impacted_subsets(impacted_subsets_collection.begin(),
impacted_subsets_collection.end());
DCHECK_LE(impacted_subsets.size(), model_->num_subsets());
std::sort(impacted_subsets.begin(), impacted_subsets.end());
return impacted_subsets;
}
bool SetCoverLedger::ComputeIsRemovable(SubsetIndex subset) const {
DCHECK(CheckSingleSubsetCoverage(subset));
const SparseColumnView& columns = model_->columns();
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] <= 1) {
return false;
}
}
return true;
}
void SetCoverLedger::UpdateIsRemovable(
const std::vector<SubsetIndex>& impacted_subsets) {
for (const SubsetIndex subset : impacted_subsets) {
is_removable_[subset] = ComputeIsRemovable(subset);
}
}
SubsetBoolVector SetCoverLedger::ComputeIsRemovable(
const ElementToSubsetVector& cvrg) const {
DCHECK(CheckCoverageAgainstSolution(is_selected_));
const SubsetIndex num_subsets(model_->num_subsets());
SubsetBoolVector is_rmvble(num_subsets, true);
const SparseRowView& rows = model_->rows();
for (ElementIndex element(0); element < rows.size(); ++element) {
if (cvrg[element] <= 1) {
for (const SubsetIndex subset : rows[element]) {
is_rmvble[subset] = false;
}
}
}
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
DCHECK_EQ(is_rmvble[subset], ComputeIsRemovable(subset));
}
return is_rmvble;
}
bool SetCoverLedger::CheckIsRemovable() const {
const SubsetBoolVector is_rmvble = ComputeIsRemovable(coverage_);
const SubsetIndex num_subsets(model_->num_subsets());
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
DCHECK_EQ(is_rmvble[subset], ComputeIsRemovable(subset));
}
return true;
}
void SetCoverLedger::UpdateMarginalImpacts(
const std::vector<SubsetIndex>& impacted_subsets) {
const SparseColumnView& columns = model_->columns();
for (const SubsetIndex subset : impacted_subsets) {
ElementIndex impact(0);
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] == 0) {
++impact;
}
}
DVLOG(2) << "Changing impact of subset " << subset << " from "
<< marginal_impacts_[subset] << " to " << impact;
marginal_impacts_[subset] = impact;
DCHECK_LE(marginal_impacts_[subset], columns[subset].size().value());
DCHECK_GE(marginal_impacts_[subset], 0);
}
DCHECK(CheckCoverageAndMarginalImpacts(is_selected_));
}
} // namespace operations_research

View File

@@ -0,0 +1,193 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_LEDGER_H_
#define OR_TOOLS_ALGORITHMS_SET_COVER_LEDGER_H_
#include <sys/types.h>
#include <vector>
#include "ortools/algorithms/set_cover_model.h"
namespace operations_research {
using SubsetCountVector = glop::StrictITIVector<SubsetIndex, int>;
using SubsetBoolVector = glop::StrictITIVector<SubsetIndex, bool>;
// SetCoverLedger does the bookkeeping for a solution to the
// SetCoverModel passed as argument.
// The state of a SetCoverLedger instance is uniquely de fined by a
// SubsetBoolVector representing whether a subset is selected in the solution
// or not.
// A SetCoverLedger is (relatively) small:
// is_selected_, a partial solution, vector of Booleans of size #subsets.
// From this, the following can be computed:
// coverage_, the number of times an elememt is covered;
// marginal_impacts_, the number of elements of a subset still uncovered;
// is_removable_, whether a subset can be removed from the solution.
// Note that is_removable_[subset] implies is_selected_[subset], and thus
// (is_removable_[subset] <= is_selected_[subset]) == true.
class SetCoverLedger {
public:
// Constructs an empty weighted set covering solver state.
// The model may not change after the ledger was built.
explicit SetCoverLedger(SetCoverModel* m) : model_(m) { Initialize(); }
// Initializes the solver once the data is set. The model cannot be changed
// afterwards.
void Initialize();
// Recomputes all the invariants for the current solution.
void MakeDataConsistent() {
cost_ = ComputeCost(is_selected_);
coverage_ = ComputeCoverage(is_selected_);
is_removable_ = ComputeIsRemovable(coverage_);
marginal_impacts_ = ComputeMarginalImpacts(coverage_);
num_elements_covered_ = ComputeNumElementsCovered(coverage_);
}
// Returns the weighted set covering model to which the state applies.
SetCoverModel* model() const { return model_; }
// Returns the cost of current solution.
Cost cost() const { return cost_; }
// Returns whether subset is selected in the solution.
bool is_selected(SubsetIndex subset) const { return is_selected_[subset]; }
// Returns the number of elements in each subset that are not covered in the
// current solution.
ElementIndex marginal_impacts(SubsetIndex subset) const {
return marginal_impacts_[subset];
}
// Returns the number of subsets covering each element.
SubsetIndex coverage(ElementIndex subset) const { return coverage_[subset]; }
// Returns whether subset can be removed from the solution.
bool is_removable(SubsetIndex subset) const { return is_removable_[subset]; }
// Returns the number of elements covered.
ElementIndex num_elements_covered() const { return num_elements_covered_; }
// Stores the solution and recomputes the data in the ledger.
void LoadSolution(const SubsetBoolVector& c);
// Returns the current solution.
SubsetBoolVector GetSolution() const { return is_selected_; }
// Returns true if the data stored in the ledger is consistent.
bool CheckConsistency() const;
// Computes is_removable_ from scratch for every subset.
// TODO(user): reconsider exposing this.
void RecomputeIsRemovable() { is_removable_ = ComputeIsRemovable(coverage_); }
// Returns the subsets that share at least one element with subset.
// TODO(user): is it worth to precompute this?
std::vector<SubsetIndex> ComputeImpactedSubsets(SubsetIndex subset) const;
// Updates is_removable_ for each subset in impacted_subsets.
void UpdateIsRemovable(const std::vector<SubsetIndex>& impacted_subsets);
// Updates marginal_impacts_ for each subset in impacted_subsets.
void UpdateMarginalImpacts(const std::vector<SubsetIndex>& impacted_subsets);
// Toggles is_selected_[subset] to value, and incrementally updates the
// ledger.
// Returns a vector of subsets impacted by the change, in case they need
// to be reconsidered in a solution geneator or a local search algorithm.
// Calls UnsafeToggle, with the added checks:
// If value is true, DCHECKs that subset is removable.
// If value is true, DCHECKs that marginal impact of subset is removable.
std::vector<SubsetIndex> Toggle(SubsetIndex subset, bool value);
// Same as Toggle, with less DCHECKS.
// Useful for some meta-heuristics that allow to go through infeasible
// solutions.
// Only checks that value is different from is_selected_[subset].
std::vector<SubsetIndex> UnsafeToggle(SubsetIndex subset, bool value);
// Update coverage_ for subset when setting is_selected_[subset] to value.
void UpdateCoverage(SubsetIndex subset, bool value);
// Returns true if the elements selected in the current solution cover all
// the elements of the set.
bool CheckSolution() const;
// Checks that coverage_ and marginal_impacts_ are consistent with choices.
bool CheckCoverageAndMarginalImpacts(const SubsetBoolVector& choices) const;
private:
// Recomputes the cost from scratch from c.
Cost ComputeCost(const SubsetBoolVector& c) const;
// Computes is_removable based on a coverage cvrg.
SubsetBoolVector ComputeIsRemovable(const ElementToSubsetVector& cvrg) const;
// Computes marginal impacts based on a coverage cvrg.
SubsetToElementVector ComputeMarginalImpacts(
const ElementToSubsetVector& cvrg) const;
// Computes the number of elements covered based on coverage vector 'cvrg'.
ElementIndex ComputeNumElementsCovered(
const ElementToSubsetVector& cvrg) const;
// Returns true if subset can be removed from the solution, i.e. it is
// redundant to cover all the elements.
// This function is used to check that is_removable[subset] is consistent.
bool ComputeIsRemovable(SubsetIndex subset) const;
// Returns the number of elements currently covered by subset.
ElementToSubsetVector ComputeSingleSubsetCoverage(SubsetIndex subset) const;
// Returns a vector containing the number of subsets covering each element.
ElementToSubsetVector ComputeCoverage(const SubsetBoolVector& choices) const;
// Checks that the value of coverage_ is correct by recomputing and comparing.
bool CheckSingleSubsetCoverage(SubsetIndex subset) const;
// Checks that coverage_ is consistent with choices.
bool CheckCoverageAgainstSolution(const SubsetBoolVector& choices) const;
// Returns true if is_removable_ is consistent.
bool CheckIsRemovable() const;
// The weighted set covering model on which the solver is run.
SetCoverModel* model_;
// Current cost.
Cost cost_;
// The number of elements covered in the current solution.
ElementIndex num_elements_covered_;
// Current assignment.
SubsetBoolVector is_selected_;
// The marginal impact of a subset is the number of elements in that subset
// that are not covered in the current solution.
SubsetToElementVector marginal_impacts_;
// The coverage of an element is the number of used subsets which contains
// the said element.
ElementToSubsetVector coverage_;
// True if the subset can be removed from the solution without making it
// infeasible.
SubsetBoolVector is_removable_;
};
} // namespace operations_research
#endif // OR_TOOLS_ALGORITHMS_SET_COVER_LEDGER_H_

View File

@@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/weighted_set_covering_model.h"
#include "ortools/algorithms/set_cover_model.h"
#include <algorithm>
@@ -19,20 +19,20 @@
namespace operations_research {
void WeightedSetCoveringModel::AddEmptySubset(Cost cost) {
void SetCoverModel::AddEmptySubset(Cost cost) {
subset_costs_.push_back(cost);
columns_.push_back(SparseColumn());
row_view_is_valid_ = false;
}
void WeightedSetCoveringModel::AddElementToLastSubset(int element) {
void SetCoverModel::AddElementToLastSubset(int element) {
ElementIndex new_element(element);
columns_.back().push_back(new_element);
num_elements_ = std::max(num_elements_, new_element + 1);
row_view_is_valid_ = false;
}
void WeightedSetCoveringModel::SetSubsetCost(int subset, Cost cost) {
void SetCoverModel::SetSubsetCost(int subset, Cost cost) {
const SubsetIndex subset_index(subset);
const SubsetIndex size = std::max(columns_.size(), subset_index + 1);
columns_.resize(size, SparseColumn());
@@ -41,7 +41,7 @@ void WeightedSetCoveringModel::SetSubsetCost(int subset, Cost cost) {
row_view_is_valid_ = false; // Probably overkill, but better safe than sorry.
}
void WeightedSetCoveringModel::AddElementToSubset(int element, int subset) {
void SetCoverModel::AddElementToSubset(int element, int subset) {
const SubsetIndex subset_index(subset);
const SubsetIndex size = std::max(columns_.size(), subset_index + 1);
subset_costs_.resize(size, 0.0);
@@ -52,13 +52,22 @@ void WeightedSetCoveringModel::AddElementToSubset(int element, int subset) {
row_view_is_valid_ = false;
}
void WeightedSetCoveringModel::CreateSparseRowView() {
void SetCoverModel::CreateSparseRowView() {
if (row_view_is_valid_) {
return;
}
rows_.resize(num_elements_, SparseRow());
glop::StrictITIVector<ElementIndex, int> row_sizes(num_elements_, 0);
for (SubsetIndex subset(0); subset < columns_.size(); ++subset) {
std::sort(columns_[subset].begin(), columns_[subset].end());
for (const ElementIndex element : columns_[subset]) {
++row_sizes[element];
}
}
for (ElementIndex element(0); element < num_elements_; ++element) {
rows_[element].reserve(EntryIndex(row_sizes[element]));
}
for (SubsetIndex subset(0); subset < columns_.size(); ++subset) {
for (const ElementIndex element : columns_[subset]) {
rows_[element].push_back(subset);
}
@@ -66,7 +75,7 @@ void WeightedSetCoveringModel::CreateSparseRowView() {
row_view_is_valid_ = true;
}
bool WeightedSetCoveringModel::ComputeFeasibility() const {
bool SetCoverModel::ComputeFeasibility() const {
CHECK_GT(num_elements_, 0);
CHECK_GT(columns_.size(), 0);
CHECK_EQ(columns_.size(), subset_costs_.size());
@@ -91,4 +100,5 @@ bool WeightedSetCoveringModel::ComputeFeasibility() const {
<< *std::max_element(coverage.begin(), coverage.end());
return true;
}
} // namespace operations_research

View File

@@ -11,8 +11,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_MODEL_H_
#define OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_MODEL_H_
#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_
#define OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_
#include "ortools/lp_data/lp_types.h" // For StrictITIVector.
@@ -64,10 +64,10 @@ using ElementToSubsetVector = glop::StrictITIVector<ElementIndex, SubsetIndex>;
using SubsetToElementVector = glop::StrictITIVector<SubsetIndex, ElementIndex>;
// Main class for describing a weighted set-covering problem.
class WeightedSetCoveringModel {
class SetCoverModel {
public:
// Constructs an empty weighted set-covering problem.
WeightedSetCoveringModel()
SetCoverModel()
: num_elements_(0),
row_view_is_valid_(false),
subset_costs_(),
@@ -82,11 +82,11 @@ class WeightedSetCoveringModel {
// number of columns.
SubsetIndex num_subsets() const { return columns_.size(); }
SubsetCostVector subset_costs() const { return subset_costs_; }
const SubsetCostVector& subset_costs() { return subset_costs_; }
SparseColumnView columns() const { return columns_; }
const SparseColumnView& columns() { return columns_; }
SparseRowView rows() const {
const SparseRowView& rows() {
DCHECK(row_view_is_valid_);
return rows_;
}
@@ -136,4 +136,4 @@ class WeightedSetCoveringModel {
} // namespace operations_research
#endif // OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_MODEL_H_
#endif // OR_TOOLS_ALGORITHMS_SET_COVER_MODEL_H_

View File

@@ -0,0 +1,174 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/set_cover.h"
#include "benchmark/benchmark.h"
#include "gtest/gtest.h"
#include "ortools/base/logging.h"
namespace operations_research {
namespace {
TEST(SetCoverTest, InitialValues) {
SetCoverModel model;
model.AddEmptySubset(1);
model.AddElementToLastSubset(0);
model.AddEmptySubset(1);
model.AddElementToLastSubset(1);
model.AddElementToLastSubset(2);
model.AddEmptySubset(1);
model.AddElementToLastSubset(1);
model.AddEmptySubset(1);
model.AddElementToLastSubset(2);
EXPECT_TRUE(model.ComputeFeasibility());
SetCoverLedger ledger(&model);
TrivialSolutionGenerator trivial(&ledger);
CHECK(trivial.NextSolution());
LOG(INFO) << "TrivialSolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
GreedySolutionGenerator greedy(&ledger);
CHECK(greedy.NextSolution());
LOG(INFO) << "GreedySolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
SteepestSearch steepest(&ledger);
CHECK(steepest.NextSolution(500));
LOG(INFO) << "SteepestSearch cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
}
TEST(SetCoverTest, Infeasible) {
SetCoverModel model;
model.AddEmptySubset(1);
model.AddElementToLastSubset(0);
model.AddEmptySubset(1);
model.AddElementToLastSubset(3);
EXPECT_FALSE(model.ComputeFeasibility());
}
SetCoverModel CreateKnightsCoverModel(int num_rows, int num_cols) {
SetCoverModel model;
constexpr int knight_row_move[] = {2, 1, -1, -2, -2, -1, 1, 2};
constexpr int knight_col_move[] = {1, 2, 2, 1, -1, -2, -2, -1};
for (int row = 0; row < num_rows; ++row) {
for (int col = 0; col < num_cols; ++col) {
model.AddEmptySubset(1);
model.AddElementToLastSubset(row * num_cols + col);
for (int i = 0; i < 8; ++i) {
const int new_row = row + knight_row_move[i];
const int new_col = col + knight_col_move[i];
if (new_row >= 0 && new_row < num_rows && new_col >= 0 &&
new_col < num_cols) {
model.AddElementToLastSubset(new_row * num_cols + new_col);
}
}
}
}
return model;
}
#ifdef NDEBUG
static constexpr int SIZE = 512;
#else
static constexpr int SIZE = 32;
#endif
TEST(SetCoverTest, KnightsCoverCreation) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
EXPECT_TRUE(model.ComputeFeasibility());
}
TEST(SetCoverTest, KnightsCoverTrivalAndGreedy) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
EXPECT_TRUE(model.ComputeFeasibility());
SetCoverLedger ledger(&model);
TrivialSolutionGenerator trivial(&ledger);
CHECK(trivial.NextSolution());
LOG(INFO) << "TrivialSolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
// Reinitialize before using Greedy, to start from scratch.
ledger.Initialize();
GreedySolutionGenerator greedy(&ledger);
CHECK(greedy.NextSolution());
LOG(INFO) << "GreedySolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
SteepestSearch steepest(&ledger);
CHECK(steepest.NextSolution(100000));
LOG(INFO) << "SteepestSearch cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
}
TEST(SetCoverTest, KnightsCoverGreedy) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
SetCoverLedger ledger(&model);
GreedySolutionGenerator greedy(&ledger);
CHECK(greedy.NextSolution());
LOG(INFO) << "GreedySolutionGenerator cost: " << ledger.cost();
SteepestSearch steepest(&ledger);
CHECK(steepest.NextSolution(100000));
LOG(INFO) << "SteepestSearch cost: " << ledger.cost();
}
TEST(SetCoverTest, KnightsCoverRandom) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
EXPECT_TRUE(model.ComputeFeasibility());
SetCoverLedger ledger(&model);
RandomSolutionGenerator random(&ledger);
CHECK(random.NextSolution());
LOG(INFO) << "RandomSolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
SteepestSearch steepest(&ledger);
CHECK(steepest.NextSolution(100000));
LOG(INFO) << "SteepestSearch cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
}
TEST(SetCoverTest, KnightsCoverTrivial) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
EXPECT_TRUE(model.ComputeFeasibility());
SetCoverLedger ledger(&model);
TrivialSolutionGenerator trivial(&ledger);
CHECK(trivial.NextSolution());
LOG(INFO) << "TrivialSolutionGenerator cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
SteepestSearch steepest(&ledger);
CHECK(steepest.NextSolution(100000));
LOG(INFO) << "SteepestSearch cost: " << ledger.cost();
EXPECT_TRUE(ledger.CheckSolution());
}
void BM_Steepest(benchmark::State& state) {
for (auto s : state) {
SetCoverModel model = CreateKnightsCoverModel(SIZE, SIZE);
SetCoverLedger ledger(&model);
GreedySolutionGenerator greedy(&ledger);
SteepestSearch steepest(&ledger);
}
}
BENCHMARK(BM_Steepest)->Arg(1 << 5);
} // namespace
} // namespace operations_research

View File

@@ -0,0 +1,51 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/set_cover_utils.h"
#include "ortools/base/adjustable_priority_queue-inl.h" // IWYU pragma: keep
namespace operations_research {
void SubsetPriorityQueue::Initialize() {
max_pq_.Clear();
pq_elements_.assign(ledger_->model()->num_subsets(), SubsetPriority());
}
void SubsetPriorityQueue::Add(SubsetIndex subset, Cost priority) {
pq_elements_[subset] = SubsetPriority(subset, priority);
max_pq_.Add(&pq_elements_[subset]);
}
void SubsetPriorityQueue::ChangePriority(SubsetIndex subset, Cost priority) {
// TODO(user): see if the reference to ledger_ can be removed.
if (ledger_->marginal_impacts(subset) != 0) {
pq_elements_[subset].SetPriority(priority);
max_pq_.NoteChangedPriority(&pq_elements_[subset]);
DVLOG(1) << "Priority of subset " << subset << " is now "
<< pq_elements_[subset].GetPriority();
}
}
void SubsetPriorityQueue::Remove(SubsetIndex subset) {
if (max_pq_.Contains(&pq_elements_[subset])) {
DVLOG(1) << "Removing subset " << subset << " from priority queue";
max_pq_.Remove(&pq_elements_[subset]);
}
}
SubsetIndex SubsetPriorityQueue::TopSubset() const {
return max_pq_.Top()->GetSubset();
}
} // namespace operations_research

View File

@@ -0,0 +1,158 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_
#define OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_
#include <limits>
#include <vector>
#include "ortools/algorithms/set_cover_ledger.h"
#include "ortools/algorithms/set_cover_model.h"
#include "ortools/base/adjustable_priority_queue.h"
namespace operations_research {
// Element used for AdjustablePriorityQueue. It's an implementation detail.
class SubsetPriority {
public:
SubsetPriority()
: heap_index_(-1),
subset_(0),
priority_(std::numeric_limits<Cost>::infinity()) {}
SubsetPriority(SubsetIndex s, Cost cost)
: heap_index_(s.value()), subset_(s), priority_(cost) {}
void SetHeapIndex(int h) { heap_index_ = h; }
int GetHeapIndex() const { return heap_index_; }
// The priority queue maintains the max element. This comparator breaks ties
// between subsets using their cardinalities.
bool operator<(const SubsetPriority& other) const {
return priority_ < other.priority_ ||
(priority_ == other.priority_ && subset_ < other.subset_);
}
SubsetIndex GetSubset() const { return subset_; }
void SetPriority(Cost priority) { priority_ = priority; }
Cost GetPriority() const { return priority_; }
private:
int heap_index_;
SubsetIndex subset_;
Cost priority_;
};
using SubsetPriorityVector = glop::StrictITIVector<SubsetIndex, SubsetPriority>;
// Also an implementation detail.
class SubsetPriorityQueue {
public:
explicit SubsetPriorityQueue(SetCoverLedger* ledger) : ledger_(ledger) {
Initialize();
}
// Adds subset to the priority queue.
void Add(SubsetIndex subset, Cost priority);
// Changes the priority of subset in the queue.
void ChangePriority(SubsetIndex subset, Cost priority);
// Removes subset from the queue, if it is in the queue.
void Remove(SubsetIndex subset);
// Returns true if the subset is in the queue.
bool Contains(SubsetIndex subset) {
return max_pq_.Contains(&pq_elements_[subset]);
}
// Returns true if the queue is empty.
bool IsEmpty() const { return max_pq_.IsEmpty(); }
// Returns the top subset in the queue.
SubsetIndex TopSubset() const;
// Returns the priority of the subset in the queue.
Cost Priority(SubsetIndex subset) {
return pq_elements_[subset].GetPriority();
}
// Returns the size of the queue.
ssize_t Size() const { return max_pq_.Size(); }
private:
// Initializes the priority queue.
void Initialize();
// The ledger on which the priority queue applies.
SetCoverLedger* ledger_;
// The adjustable priority queue per se.
AdjustablePriorityQueue<SubsetPriority> max_pq_;
// The elements of the priority queue.
SubsetPriorityVector pq_elements_;
};
// A Tabu list is a fixed-sized circular array of small size, usually a few
// dozens of elements.
template <typename T>
class TabuList {
public:
explicit TabuList(T size) : array_(0), fill_(0), index_(0) {
array_.resize(size.value(), T(-1));
}
// Returns the size of the array.
int size() const { return array_.size(); }
// Initializes the array of the Tabu list.
void Init(int size) {
array_.resize(size, T(-1));
fill_ = 0;
index_ = 0;
}
// Adds t to the array. When the end of the array is reached, re-start at 0.
void Add(T t) {
const int size = array_.size();
array_[index_] = t;
++index_;
if (index_ >= size) {
index_ = 0;
}
if (fill_ < size) {
++fill_;
}
}
// Returns true if t is in the array. This is O(size), but small.
bool Contains(T t) const {
for (int i = 0; i < fill_; ++i) {
if (t == array_[i]) {
return true;
}
}
return false;
}
private:
std::vector<T> array_;
int fill_;
int index_;
};
} // namespace operations_research
#endif // OR_TOOLS_ALGORITHMS_SET_COVER_UTILS_H_

View File

@@ -1,549 +0,0 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/weighted_set_covering.h"
#include <algorithm>
#include <limits>
#include <vector>
#include "absl/container/flat_hash_set.h"
#include "absl/log/check.h"
#include "absl/random/random.h"
#include "ortools/algorithms/weighted_set_covering_model.h"
#include "ortools/base/adjustable_priority_queue-inl.h" // IWYU pragma: keep
#include "ortools/base/logging.h"
namespace operations_research {
constexpr SubsetIndex kNotFound(-1);
void WeightedSetCoveringSolver::Initialize() {
DCHECK(model_.ComputeFeasibility());
model_.CreateSparseRowView();
const SubsetIndex num_subsets(model_.num_subsets());
choices_.assign(num_subsets, false);
is_removable_.assign(num_subsets, false);
times_penalized_.assign(num_subsets, 0);
marginal_impacts_.assign(num_subsets, ElementIndex(0));
const ElementIndex num_elements(model_.num_elements());
coverage_.assign(num_elements, SubsetIndex(0));
cost_ = 0.0;
const SparseColumnView& columns = model_.columns();
pq_elements_.assign(num_subsets, SubsetPriority());
pq_.Clear();
const SubsetCostVector& subset_costs = model_.subset_costs();
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
marginal_impacts_[subset] = columns[subset].size().value();
}
penalized_costs_ = model_.subset_costs();
gts_priorities_ = model_.subset_costs();
}
void WeightedSetCoveringSolver::StoreSolution() {
best_solution_.StoreCostAndSolution(cost_, choices_);
}
void WeightedSetCoveringSolver::RestoreSolution() {
choices_ = best_solution_.choices();
cost_ = best_solution_.cost();
coverage_ = ComputeCoverage(choices_);
DCHECK(CheckSolution());
}
bool WeightedSetCoveringSolver::CheckSolution() const {
Cost total_cost = 0;
const SubsetCostVector& subset_costs = model_.subset_costs();
const SubsetIndex num_subsets(model_.num_subsets());
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
if (choices_[subset]) {
total_cost += subset_costs[subset];
}
}
DCHECK_EQ(cost_, total_cost);
return CheckCoverageAndMarginalImpacts(choices_);
}
ElementToSubsetVector WeightedSetCoveringSolver::ComputeCoverage(
const ChoiceVector& choices) const {
const ElementIndex num_elements(model_.num_elements());
const SparseRowView& rows = model_.rows();
// Use "crypterse" naming to avoid confusing with coverage_.
ElementToSubsetVector cvrg(num_elements, SubsetIndex(0));
for (ElementIndex element(0); element < num_elements; ++element) {
for (SubsetIndex subset : rows[element]) {
if (choices[subset]) {
++cvrg[element];
}
}
DCHECK_LE(cvrg[element], rows[element].size().value());
DCHECK_GE(cvrg[element], 0);
}
return cvrg;
}
SubsetToElementVector WeightedSetCoveringSolver::ComputeMarginalImpacts(
const ElementToSubsetVector& cvrg) const {
const ElementIndex num_elements(model_.num_elements());
DCHECK_EQ(num_elements, cvrg.size());
const SparseColumnView& columns = model_.columns();
const SubsetIndex num_subsets(model_.num_subsets());
// Use "crypterse" naming to avoid confusing with marginal_impacts_.
SubsetToElementVector mrgnl_impcts(num_subsets, ElementIndex(0));
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
for (ElementIndex element : columns[subset]) {
if (cvrg[element] == 0) {
++mrgnl_impcts[subset];
}
}
DCHECK_LE(mrgnl_impcts[subset], columns[subset].size().value());
DCHECK_GE(mrgnl_impcts[subset], 0);
}
return mrgnl_impcts;
}
bool WeightedSetCoveringSolver::CheckCoverage(
const ChoiceVector& choices) const {
const SubsetIndex num_subsets(model_.num_subsets());
DCHECK_EQ(num_subsets, choices.size());
const ElementIndex num_elements(model_.num_elements());
const ElementToSubsetVector cvrg = ComputeCoverage(choices);
for (ElementIndex element(0); element < num_elements; ++element) {
// TODO(user): do a LOG(ERROR) instead
DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element;
}
return true;
}
bool WeightedSetCoveringSolver::CheckCoverageAndMarginalImpacts(
const ChoiceVector& choices) const {
const SubsetIndex num_subsets(model_.num_subsets());
DCHECK_EQ(num_subsets, choices.size());
const ElementToSubsetVector cvrg = ComputeCoverage(choices);
const ElementIndex num_elements(model_.num_elements());
for (ElementIndex element(0); element < num_elements; ++element) {
DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element;
}
const SubsetToElementVector mrgnl_impcts = ComputeMarginalImpacts(cvrg);
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
DCHECK_EQ(marginal_impacts_[subset], mrgnl_impcts[subset])
<< " Subset = " << subset;
}
return true;
}
ElementToSubsetVector WeightedSetCoveringSolver::ComputeSingleSubsetCoverage(
SubsetIndex subset) const {
const SparseColumnView& columns = model_.columns();
const ElementIndex num_elements(model_.num_elements());
ElementToSubsetVector cvrg(num_elements, SubsetIndex(0));
const SparseRowView& rows = model_.rows();
for (const ElementIndex element : columns[subset]) {
for (SubsetIndex subset : rows[element]) {
if (choices_[subset]) {
++cvrg[element];
}
}
DCHECK_LE(cvrg[element], rows[element].size().value());
DCHECK_GE(cvrg[element], 0);
}
return cvrg;
}
bool WeightedSetCoveringSolver::CheckSingleSubsetCoverage(
SubsetIndex subset) const {
ElementToSubsetVector cvrg = ComputeSingleSubsetCoverage(subset);
const SparseColumnView& columns = model_.columns();
for (const ElementIndex element : columns[subset]) {
DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element;
}
return true;
}
void WeightedSetCoveringSolver::UpdateCoverage(SubsetIndex subset, bool value) {
const SparseColumnView& columns = model_.columns();
const int delta = value ? 1 : -1;
for (const ElementIndex element : columns[subset]) {
#ifndef NDEBUG
const SubsetIndex previous_coverage = coverage_[element];
#endif // NDEBUG
coverage_[element] += delta;
#ifndef NDEBUG
VLOG(1) << "Coverage of element " << element << " changed from "
<< previous_coverage << " to " << coverage_[element];
#endif // NDEBUG
DCHECK_GE(coverage_[element], 0);
DCHECK_LE(coverage_[element], columns[subset].size().value());
}
#ifndef NDEBUG
ElementToSubsetVector cvrg = ComputeSingleSubsetCoverage(subset);
for (const ElementIndex element : columns[subset]) {
DCHECK_EQ(coverage_[element], cvrg[element]) << " Element = " << element;
}
#endif
}
// Compute the impact of the change in the assignment for each subset
// containing element. Store this in a flat_hash_set so as to buffer the
// change.
std::vector<SubsetIndex> WeightedSetCoveringSolver::ComputeImpactedSubsets(
SubsetIndex subset) const {
const SparseColumnView& columns = model_.columns();
const SparseRowView& rows = model_.rows();
absl::flat_hash_set<SubsetIndex> impacted_subsets_collection;
for (const ElementIndex element : columns[subset]) {
for (const SubsetIndex subset : rows[element]) {
impacted_subsets_collection.insert(subset);
}
}
DCHECK(impacted_subsets_collection.find(subset) !=
impacted_subsets_collection.end());
std::vector<SubsetIndex> impacted_subsets(impacted_subsets_collection.begin(),
impacted_subsets_collection.end());
DCHECK_LE(impacted_subsets.size(), model_.num_subsets());
std::sort(impacted_subsets.begin(), impacted_subsets.end());
return impacted_subsets;
}
void WeightedSetCoveringSolver::UpdateIsRemovable(
const std::vector<SubsetIndex>& impacted_subsets) {
const SparseColumnView& columns = model_.columns();
for (const SubsetIndex subset : impacted_subsets) {
is_removable_[subset] = true;
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] == 1) {
is_removable_[subset] = false;
break;
}
}
DCHECK_EQ(is_removable_[subset], CanBeRemoved(subset));
}
}
void WeightedSetCoveringSolver::UpdateMarginalImpacts(
const std::vector<SubsetIndex>& impacted_subsets) {
const SparseColumnView& columns = model_.columns();
for (const SubsetIndex subset : impacted_subsets) {
ElementIndex impact(0);
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] == 0) {
++impact;
}
}
#ifndef NDEBUG
VLOG(1) << "Changing impact of subset " << subset << " from "
<< marginal_impacts_[subset] << " to " << impact;
#endif
marginal_impacts_[subset] = impact;
DCHECK_LE(marginal_impacts_[subset], columns[subset].size().value());
DCHECK_GE(marginal_impacts_[subset], 0);
}
}
void WeightedSetCoveringSolver::ToggleChoice(SubsetIndex subset, bool value) {
#ifndef NDEBUG
VLOG(1) << "Changing assignment of subset " << subset << " to " << value;
#endif
DCHECK(choices_[subset] != value);
const SubsetCostVector& subset_costs = model_.subset_costs();
cost_ += value ? subset_costs[subset] : -subset_costs[subset];
choices_[subset] = value;
}
std::vector<SubsetIndex> WeightedSetCoveringSolver::Toggle(SubsetIndex subset,
bool value) {
ToggleChoice(subset, value);
UpdateCoverage(subset, value);
const std::vector<SubsetIndex> impacted_subsets =
ComputeImpactedSubsets(subset);
UpdateIsRemovable(impacted_subsets);
return impacted_subsets;
}
void WeightedSetCoveringSolver::GenerateTrivialSolution() {
const SubsetCostVector& subset_costs = model_.subset_costs();
const SparseColumnView& columns = model_.columns();
const SubsetIndex num_subsets(model_.num_subsets());
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
choices_[subset] = true;
cost_ += subset_costs[subset];
}
coverage_ = ComputeCoverage(choices_);
const ElementIndex num_elements = model_.num_elements();
for (ElementIndex element(0); element < num_elements; ++element) {
DCHECK_GT(coverage_[element], SubsetIndex(0));
}
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
marginal_impacts_[subset] = 0;
}
DCHECK(CheckSolution());
DCHECK(CheckCoverageAndMarginalImpacts(choices_));
StoreSolution();
}
void WeightedSetCoveringSolver::UpdateGreedyPriorities(
const std::vector<SubsetIndex>& impacted_subsets) {
const SubsetCostVector& subset_costs = model_.subset_costs();
for (const SubsetIndex subset : impacted_subsets) {
const ElementIndex marginal_impact(marginal_impacts_[subset]);
if (marginal_impact != 0) {
const Cost marginal_cost_increase =
subset_costs[subset] / marginal_impact.value();
pq_elements_[subset].SetPriority(-marginal_cost_increase);
pq_.NoteChangedPriority(&pq_elements_[subset]);
#ifndef NDEBUG
VLOG(1) << "Priority of subset " << subset << " is now "
<< pq_elements_[subset].GetPriority();
#endif
} else if (pq_.Contains(&pq_elements_[subset])) {
#ifndef NDEBUG
VLOG(1) << "Removing subset " << subset << " from priority queue";
#endif
pq_.Remove(&pq_elements_[subset]);
}
}
}
void WeightedSetCoveringSolver::GenerateGreedySolution() {
const SparseColumnView& columns = model_.columns();
const SubsetCostVector& subset_costs = model_.subset_costs();
const SubsetIndex num_subsets(model_.num_subsets());
pq_elements_.assign(num_subsets, SubsetPriority());
pq_.Clear();
// The priority is the minimum marginal cost increase. Since the
// priority queue returns the smallest value, we use the opposite.
for (SubsetIndex subset(0); subset < num_subsets; ++subset) {
marginal_impacts_[subset] = columns[subset].size().value();
const Cost marginal_cost_increase =
subset_costs[subset] / marginal_impacts_[subset].value();
pq_elements_[subset] =
SubsetPriority(subset.value(), subset, -marginal_cost_increase);
pq_.Add(&pq_elements_[subset]);
}
const ElementIndex num_elements(model_.num_elements());
ElementIndex num_uncovered_elements(num_elements);
while (num_uncovered_elements > 0) {
const auto top = pq_.Top();
const SubsetIndex best_subset = top->GetSubset();
#ifndef NDEBUG
VLOG(1) << "Best subset: " << best_subset.value()
<< " Priority = " << top->GetPriority()
<< " queue size = " << pq_.Size();
#endif
const std::vector<SubsetIndex> impacted_subsets = Toggle(best_subset, true);
UpdateMarginalImpacts(impacted_subsets);
DCHECK(CheckCoverageAndMarginalImpacts(choices_));
DCHECK_EQ(marginal_impacts_[best_subset], 0);
UpdateGreedyPriorities(impacted_subsets);
// Update num_uncovered_elements_.
// By definition the elements of best_subset are all covered. Only the ones
// that are covered only once are the ones that are newly covered
for (const ElementIndex element : columns[best_subset]) {
if (coverage_[element] == 1) {
--num_uncovered_elements;
}
}
}
DCHECK_EQ(num_uncovered_elements, 0);
DCHECK(CheckSolution());
StoreSolution();
}
bool WeightedSetCoveringSolver::CanBeRemoved(SubsetIndex subset) const {
DCHECK(CheckSingleSubsetCoverage(subset));
const SparseColumnView& columns = model_.columns();
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] == 1) {
return false;
}
}
return true;
}
void WeightedSetCoveringSolver::UpdateSteepestPriorities(
const std::vector<SubsetIndex>& impacted_subsets) {
// Update priority queue.
const SubsetCostVector& subset_costs = model_.subset_costs();
for (const SubsetIndex subset : impacted_subsets) {
if (choices_[subset] && is_removable_[subset]) {
#ifndef NDEBUG
VLOG(1) << "Removing subset " << subset << " from priority queue";
#endif
} else {
pq_elements_[subset].SetPriority(-subset_costs[subset]);
pq_.NoteChangedPriority(&pq_elements_[subset]);
}
}
}
void WeightedSetCoveringSolver::Steepest(int num_iterations) {
const SparseColumnView& columns = model_.columns();
const SubsetCostVector& subset_costs = model_.subset_costs();
// Create priority queue with cost of using a subset, by decreasing order.
// Do it only for removable subsets.
pq_elements_.clear();
pq_elements_.assign(columns.size(), SubsetPriority());
pq_.Clear();
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
// The priority is the gain from removing the subset from the solution.
const Cost priority = (choices_[subset] && CanBeRemoved(subset))
? subset_costs[subset]
: -subset_costs[subset];
pq_elements_[subset] = SubsetPriority(subset.value(), subset, priority);
pq_.Add(&pq_elements_[subset]);
}
// TODO(user): Pop() the priority queue.
for (int iteration = 0; iteration < num_iterations; ++iteration) {
const Cost priority = pq_.Top()->GetPriority();
if (priority < 0) {
break;
}
const SubsetIndex best_subset = pq_.Top()->GetSubset();
const Cost cost_decrease = subset_costs[best_subset];
#ifndef NDEBUG
VLOG(1) << "Iteration " << iteration << " Subset: " << best_subset.value()
<< " at " << choices_[best_subset]
<< " can be removed = " << CanBeRemoved(best_subset)
<< " is removable = " << is_removable_[best_subset]
<< " cost_decrease = " << cost_decrease
<< " priority = " << pq_.Top()->GetPriority();
#endif
DCHECK_EQ(cost_decrease, pq_.Top()->GetPriority());
DCHECK(choices_[best_subset]);
DCHECK(CanBeRemoved(best_subset));
DCHECK_EQ(is_removable_[best_subset], CanBeRemoved(best_subset));
const std::vector<SubsetIndex> impacted_subsets =
Toggle(best_subset, false);
UpdateSteepestPriorities(impacted_subsets);
DCHECK_EQ(pq_elements_.size(), columns.size());
}
StoreSolution();
}
void WeightedSetCoveringSolver::ResetGuidedTabuSearch() {
const SparseColumnView& columns = model_.columns();
const SubsetCostVector& subset_costs = model_.subset_costs();
penalized_costs_ = subset_costs;
gts_priorities_ = subset_costs;
for (SubsetIndex subset(0); subset < gts_priorities_.size(); ++subset) {
gts_priorities_[subset] /= columns[subset].size().value();
}
}
void WeightedSetCoveringSolver::GuidedTabuSearch(int num_iterations) {
const SparseColumnView& columns = model_.columns();
const SubsetCostVector& subset_costs = model_.subset_costs();
constexpr Cost kMaxPossibleCost = std::numeric_limits<Cost>::max();
Cost best_cost = best_solution_.cost();
Cost total_pen_cost = 0;
for (const Cost pen_cost : penalized_costs_) {
total_pen_cost += pen_cost;
}
for (int iteration = 0; iteration < num_iterations; ++iteration) {
Cost smallest_penalized_cost_increase = kMaxPossibleCost;
SubsetIndex best_subset = kNotFound;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
const Cost penalized_delta = penalized_costs_[subset];
// TODO(user): re-check that CanBeRemoved is computed/checked properly.
VLOG(1) << "Subset: " << subset.value() << " at " << choices_[subset]
<< " can be removed = " << CanBeRemoved(subset)
<< " is removable = " << is_removable_[subset]
<< " penalized_delta = " << penalized_delta
<< " smallest_penalized_cost_increase = "
<< smallest_penalized_cost_increase;
if (!choices_[subset]) {
// Try to use subset in solution, if its penalized delta is good.
if (penalized_delta < smallest_penalized_cost_increase) {
smallest_penalized_cost_increase = penalized_delta;
best_subset = subset;
}
} else {
// Try to remove subset from solution, if the gain from removing, is
// OK:
if (-penalized_delta < smallest_penalized_cost_increase &&
// and it can be removed, and
is_removable_[subset] &&
// it is not Tabu OR decreases the actual cost:
(!tabu_list_.Contains(subset) ||
cost_ - subset_costs[subset] < best_cost)) {
smallest_penalized_cost_increase = -penalized_delta;
best_subset = subset;
}
}
}
if (best_subset == kNotFound) { // Local minimum reached.
RestoreSolution();
return;
}
total_pen_cost += smallest_penalized_cost_increase;
const std::vector<SubsetIndex> impacted_subsets =
Toggle(best_subset, !choices_[best_subset]);
UpdateMarginalImpacts(impacted_subsets);
DCHECK(CheckCoverageAndMarginalImpacts(choices_));
DCHECK_EQ(marginal_impacts_[best_subset], 0);
UpdatePenalties();
tabu_list_.Add(best_subset);
if (cost_ < best_cost) {
LOG(INFO) << "It;eration:" << iteration << ", current cost = " << cost_
<< ", best cost = " << best_cost
<< ", penalized cost = " << total_pen_cost;
best_cost = cost_;
}
}
RestoreSolution();
}
bool WeightedSetCoveringSolver::FlipCoin() const {
// TODO(user): use STL for repeatable testing.
return absl::Bernoulli(absl::BitGen(), 0.5);
}
ElementIndex WeightedSetCoveringSolver::ComputeNumElementsAlreadyCovered(
SubsetIndex subset) const {
const SparseColumnView& columns = model_.columns();
ElementIndex num_elements_covered(0);
for (const ElementIndex element : columns[subset]) {
if (coverage_[element] >= 1) {
++num_elements_covered;
}
}
return num_elements_covered;
}
void WeightedSetCoveringSolver::UpdatePenalties() {
const SparseColumnView& columns = model_.columns();
const SubsetCostVector& subset_costs = model_.subset_costs();
Cost largest_priority = -1.0;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
if (choices_[subset]) {
largest_priority = std::max(largest_priority, gts_priorities_[subset]) /
ComputeNumElementsAlreadyCovered(subset).value();
}
}
const double radius = radius_factor_ * largest_priority;
for (SubsetIndex subset(0); subset < columns.size(); ++subset) {
if (choices_[subset]) {
const double subset_priority = gts_priorities_[subset];
if ((largest_priority - subset_priority <= radius) && FlipCoin()) {
++times_penalized_[subset];
const int times_penalized = times_penalized_[subset];
const Cost cost = subset_costs[subset] / columns[subset].size().value();
gts_priorities_[subset] = cost / (1 + times_penalized);
penalized_costs_[subset] =
cost * (1 + penalty_factor_ * times_penalized);
}
}
}
}
} // namespace operations_research

View File

@@ -1,391 +0,0 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_H_
#define OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_H_
#include <limits>
#include <vector>
#include "ortools/algorithms/weighted_set_covering_model.h"
#include "ortools/base/adjustable_priority_queue.h"
// Solver class for the weighted set covering problem.
//
// The first solution is obtained using the Chvatal heuristic, that guarantees
// that the solution is at most 1 + log(n) times the optimal value.
// Vasek Chvatal, 1979. A greedy heuristic for the set-covering problem.
// Mathematics of Operations Research, 4(3):233-235, 1979.
// www.jstor.org/stable/3689577
//
// The idea is basically to compute the cost per element for a T_j to cover
// them, and to start by the ones with the best such amortized costs.
//
// The following paper dives into the details this class of algorithms.
// Neal E. Young, Greedy Set-Cover Algorithms (1974-1979, Chvatal, Johnson,
// Lovasz, Stein), in Kao, ed., Encyclopedia of Algorithms.
// Draft at: http://www.cs.ucr.edu/~neal/non_arxiv/Young08SetCover.pdf
//
// The first solution is then improved by using local search descent, which
// eliminates the T_j's that have no interest in the solution.
//
// A guided local search (GLS) metaheuristic is also provided.
//
// TODO(user): add Large Neighborhood Search that removes a collection of T_j
// (with a parameterized way to choose them), and that runs the algorithm here
// on the corresponding subproblem.
//
// TODO(user): make Large Neighborhood Search concurrent, solving independent
// subproblems in different threads.
//
// The solution procedure is based on the general scheme known as local search.
// Once a solution exists, it is improved by modifying it slightly, for example
// by flipping a binary variable, so as to minimize the cost.
// But first, we have to generate a first solution that is as good as possible.
//
// An obvious idea is to take all the T_j's (or equivalently to set all the
// x_j's to 1). It's a bit silly but fast, and we can improve on it later.
// set_covering.UseEverything();
//
// A better idea is to use Chvatal's heuristic as described in the
// abovementioned paper: Choose the subset that covers as many remaining
// uncovered elements as possible for the least possible cost and iterate.
// set_covering.GenerateGreedySolution();
//
// Now that we have a first solution to the problem, there may be (most often,
// there are) elements in S that are covered several times. To decrease the
// total cost, Steepest tries to eliminate some redundant T_j's from the
// solution or equivalently, to flip some x_j's from 1 to 0.
// set_covering.Steepest(num_steepest_iterations);
//
// As usual and well-known with local search, Steepest reaches a local minimum.
// We therefore implement Guided Tabu Search, which is a crossover of
// Guided Local Search and Tabu Search.
// Guided Local Search penalizes the parts of the solution that have been often
// used, while Tabu Search makes it possible to degrade the solution temporarily
// by disallowing to go back for a certain time (changes are put in a "Tabu"
// list).
//
// C. Voudouris (1997) "Guided local search for combinatorial optimisation
// problems", PhD Thesis, University of Essex, Colchester, UK, July, 1997.
// F. Glover (1989) "Tabu Search Part 1". ORSA Journal on Computing.
// 1 (2):190206. doi:10.1287/ijoc.1.3.190.
// F. Glover (1990) "Tabu Search Part 2". ORSA Journal on Computing.
// 2 (1): 432. doi:10.1287/ijoc.2.1.4.
// To use Guided Tabu Search, use:
// set_covering.GuidedTabuSearch(num_guided_tabu_search_iterations);
//
// G. Cormode, H. Karloff, A.Wirth (2010) "Set Cover Algorithms for Very Large
// Datasets", CIKM'10, ACM, 2010.
// TODO(user): Add documentation for the parameters that can be set.
namespace operations_research {
// Representation of a solution, with the values of the variables, along its
// the total cost.
using ChoiceVector = glop::StrictITIVector<SubsetIndex, bool>;
class WeightedSetCoveringSolution {
public:
WeightedSetCoveringSolution() : cost_(0), choices_() {}
WeightedSetCoveringSolution(Cost cost, ChoiceVector assignment)
: cost_(cost), choices_(assignment) {}
void StoreCostAndSolution(Cost cost, ChoiceVector assignment) {
cost_ = cost;
choices_ = assignment;
}
// Returns the cost of the solution.
Cost cost() const { return cost_; }
void SetCost(Cost cost) { cost_ = cost; }
void AddToCost(Cost value) { cost_ += value; }
void SubtractFromCost(Cost value) { cost_ -= value; }
bool IsSet(SubsetIndex subset) const { return choices_[subset]; }
void Set(SubsetIndex subset, bool value) { choices_[subset] = value; }
// Returns the assignment of the solution.
ChoiceVector choices() const { return choices_; }
// Returns assignment as a "usable", less strongly typed vector of bool.
// It's easier to use by the calling code.
std::vector<bool> ChoicesAsVectorOfBooleans() const {
std::vector<bool> assignment;
for (int i = 0; i < choices_.size(); ++i) {
assignment.push_back(choices_[SubsetIndex(i)]);
}
return assignment;
}
private:
Cost cost_;
ChoiceVector choices_;
};
template <typename T>
class TabuList {
public:
explicit TabuList(T size) : array_(0), fill_(0), index_(0) {
array_.resize(size.value(), T(-1));
}
int size() const { return array_.size(); }
void Init(int size) {
array_.resize(size, T(-1));
fill_ = 0;
index_ = 0;
}
void Add(T t) {
const int size = array_.size();
array_[index_] = t;
++index_;
if (index_ >= size) {
index_ = 0;
}
if (fill_ < size) {
++fill_;
}
}
bool Contains(T t) const {
for (int i = 0; i < fill_; ++i) {
if (t == array_[i]) {
return true;
}
}
return false;
}
private:
std::vector<T> array_;
int fill_;
int index_;
};
// Element used for AdjustablePriorityQueue.
class SubsetPriority {
public:
SubsetPriority()
: heap_index_(-1),
subset_(0),
priority_(std::numeric_limits<Cost>::infinity()) {}
SubsetPriority(int h, SubsetIndex s, Cost cost)
: heap_index_(h), subset_(s), priority_(cost) {}
void SetHeapIndex(int h) { heap_index_ = h; }
int GetHeapIndex() const { return heap_index_; }
// The priority queue maintains the max element. This comparator breaks ties
// between subsets using their cardinalities.
bool operator<(const SubsetPriority& other) const {
return priority_ < other.priority_ ||
(priority_ == other.priority_ && subset_ < other.subset_);
}
SubsetIndex GetSubset() const { return subset_; }
void SetPriority(Cost priority) { priority_ = priority; }
Cost GetPriority() const { return priority_; }
private:
int heap_index_;
SubsetIndex subset_;
Cost priority_;
};
using SubsetCountVector = glop::StrictITIVector<SubsetIndex, int>;
using SubsetBoolVector = glop::StrictITIVector<SubsetIndex, bool>;
using SubsetPriorityVector = glop::StrictITIVector<SubsetIndex, SubsetPriority>;
class WeightedSetCoveringSolver {
public:
// Constructs an empty weighted set covering problem.
explicit WeightedSetCoveringSolver(const WeightedSetCoveringModel& model)
: model_(model),
lagrangian_factor_(kDefaultLagrangianFactor),
penalty_factor_(kDefaultPenaltyFactor),
radius_factor_(kDefaultRadiusFactor),
tabu_list_(SubsetIndex(kDefaultTabuListSize)) {}
// Setters and getters for the Guided Tabu Search algorithm parameters.
void SetPenaltyFactor(double factor) { penalty_factor_ = factor; }
double GetPenaltyFactor() const { return penalty_factor_; }
// TODO(user): re-introduce this is the code. It was used to favor
// subsets with the same marginal costs but that would cover more elements.
// But first, see if it makes sense to compute it.
void SetLagrangianFactor(double factor) { lagrangian_factor_ = factor; }
double GetLagrangianFactor() const { return lagrangian_factor_; }
void SetRadiusFactor(double r) { radius_factor_ = r; }
double GetRadius() const { return radius_factor_; }
void SetTabuListSize(int size) { tabu_list_.Init(size); }
int GetTabuListSize() const { return tabu_list_.size(); }
// Initializes the solver once the data is set. The model cannot be changed
// afterwards. Only lagrangian_factor_, radius_factor_ and tabu_list_size_
// can be modified.
void Initialize();
// Generates a trivial solution using all the subsets.
void GenerateTrivialSolution();
// Generates a solution using a greedy algorithm.
void GenerateGreedySolution();
// Returns true if the elements selected in the current solution cover all
// the elements of the set.
bool CheckSolution() const;
// Runs a steepest local search.
void Steepest(int num_iterations);
// Runs num_iterations of guided tabu search.
// TODO(user): Describe what a Guided Tabu Search is.
void GuidedTabuSearch(int num_iterations);
// Resets the guided tabu search by clearing the penalties and the tabu list.
// TODO(user): review whether this is public or not.
void ResetGuidedTabuSearch();
// Stores the solution. TODO(user): review whether this is public or not.
void StoreSolution();
// Restores the solution. TODO(user): review whether this is public or not.
void RestoreSolution();
// Returns the current solution.
WeightedSetCoveringSolution GetSolution() const { return solution_; }
// Returns the best solution.
WeightedSetCoveringSolution GetBestSolution() const { return best_solution_; }
private:
// Computes the number of elements in subset that are already covered in the
// current solution.
ElementIndex ComputeNumElementsAlreadyCovered(SubsetIndex subset) const;
// Returns a Boolean with a .5 probability. TODO(user): remove from the class.
bool FlipCoin() const;
// TODO(user): remove this.
void ToggleChoice(SubsetIndex subset, bool value);
std::vector<SubsetIndex> Toggle(SubsetIndex subset, bool value);
// Returns the number of elements currently covered by subset.
ElementToSubsetVector ComputeSingleSubsetCoverage(SubsetIndex subset) const;
// TODO(user): add a specific section in the class.
// Checks that the value of coverage_ is correct by recomputing and comparing.
bool CheckSingleSubsetCoverage(SubsetIndex subset) const;
// Returns a vector containing the number of subsets covering each element.
ElementToSubsetVector ComputeCoverage(const ChoiceVector& choices) const;
// Update coverage_ for subset when setting choices_[subset] to value.
void UpdateCoverage(SubsetIndex subset, bool value);
// Returns the subsets that share at least one element with subset.
// TODO(user): use union-find to make it faster?
std::vector<SubsetIndex> ComputeImpactedSubsets(SubsetIndex subset) const;
// Updates is_removable_ for each subset in impacted_subsets.
void UpdateIsRemovable(const std::vector<SubsetIndex>& impacted_subsets);
// Updates marginal_impacts_ for each subset in impacted_subsets.
void UpdateMarginalImpacts(const std::vector<SubsetIndex>& impacted_subsets);
// Checks that coverage_ is consistent with choices.
bool CheckCoverage(const ChoiceVector& choices) const;
// Checks that coverage_ and marginal_impacts_ are consistent with choices.
bool CheckCoverageAndMarginalImpacts(const ChoiceVector& choices) const;
// Computes marginal impacts based on a coverage cvrg.
SubsetToElementVector ComputeMarginalImpacts(
const ElementToSubsetVector& cvrg) const;
// Updates the priorities for the greedy first solution algorithm.
void UpdateGreedyPriorities(const std::vector<SubsetIndex>& impacted_subsets);
// Updates the priorities for the steepest local search algorithm.
void UpdateSteepestPriorities(
const std::vector<SubsetIndex>& impacted_subsets);
// Returns true if subset can be removed from the solution, i.e. it is
// redundant to cover all the elements.
// This function is used to check that _is_removable_[subset] is consistent.
bool CanBeRemoved(SubsetIndex subset) const;
// Updates the penalties for Guided Local Search.
void UpdatePenalties();
// The weighted set covering model on which the solver is run.
WeightedSetCoveringModel model_;
// The best solution found so far for the problem.
WeightedSetCoveringSolution best_solution_;
// The current solution.
WeightedSetCoveringSolution solution_;
// Current cost_.
Cost cost_;
// Current assignment.
ChoiceVector choices_;
// Priorities for the different subsets.
// They are similar to the marginal cost of using a particular subset,
// defined as the cost of the subset divided by the number of elements that
// are still not covered.
SubsetCostVector gts_priorities_;
// Priority queue.
AdjustablePriorityQueue<SubsetPriority> pq_;
// Elements in the priority queue.
SubsetPriorityVector pq_elements_;
SubsetToElementVector marginal_impacts_;
// The coverage of an element is the number of used subsets which contains
// the said element.
ElementToSubsetVector coverage_;
// True if the subset can be removed from the solution without making it
// infeasible.
SubsetBoolVector is_removable_;
// Search handling variables and default parameters.
static constexpr double kDefaultLagrangianFactor = 100.0;
double lagrangian_factor_;
// Guided local search-related data.
static constexpr double kPenaltyUpdateEpsilon = 1e-1;
static constexpr double kDefaultPenaltyFactor = 0.2;
double penalty_factor_;
static constexpr double kDefaultRadiusFactor = 1e-8;
double radius_factor_;
// Penalized costs for each subset as used in Guided Tabu Search.
SubsetCostVector penalized_costs_;
// The number of times each subset was penalized during Guided Tabu Search.
SubsetCountVector times_penalized_;
// Tabu search-related data.
static constexpr int kDefaultTabuListSize = 17; // Nice prime number.
TabuList<SubsetIndex> tabu_list_;
};
} // namespace operations_research
#endif // OR_TOOLS_ALGORITHMS_WEIGHTED_SET_COVERING_H_

View File

@@ -1,93 +0,0 @@
// Copyright 2010-2022 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/algorithms/weighted_set_covering.h"
#include "gtest/gtest.h"
#include "ortools/algorithms/weighted_set_covering_model.h"
#include "ortools/base/logging.h"
namespace operations_research {
namespace {
TEST(SetCoveringTest, InitialValues) {
WeightedSetCoveringModel model;
model.AddEmptySubset(1);
model.AddElementToLastSubset(0);
model.AddEmptySubset(1);
model.AddElementToLastSubset(1);
model.AddElementToLastSubset(2);
model.AddEmptySubset(1);
model.AddElementToLastSubset(1);
model.AddEmptySubset(1);
model.AddElementToLastSubset(2);
EXPECT_TRUE(model.ComputeFeasibility());
WeightedSetCoveringSolver set_covering(model);
set_covering.Initialize();
set_covering.GenerateGreedySolution();
set_covering.Steepest(500);
// set_covering.GuidedTabuSearch(500);
set_covering.RestoreSolution();
}
TEST(SetCoveringTest, Infeasible) {
WeightedSetCoveringModel model;
model.AddEmptySubset(1);
model.AddElementToLastSubset(0);
model.AddEmptySubset(1);
model.AddElementToLastSubset(3);
EXPECT_FALSE(model.ComputeFeasibility());
}
TEST(SetCoveringTest, KnightsCover) {
const int knight_row_move[] = {2, 1, -1, -2, -2, -1, 1, 2};
const int knight_col_move[] = {1, 2, 2, 1, -1, -2, -2, -1};
WeightedSetCoveringModel model;
const int num_rows = 30;
const int num_cols = 30;
for (int row = 0; row < num_rows; ++row) {
for (int col = 0; col < num_cols; ++col) {
model.AddEmptySubset(1);
model.AddElementToLastSubset(row * num_cols + col);
for (int i = 0; i < 8; ++i) {
const int new_row = row + knight_row_move[i];
const int new_col = col + knight_col_move[i];
if (new_row >= 0 && new_row < num_rows && new_col >= 0 &&
new_col < num_cols) {
model.AddElementToLastSubset(new_row * num_cols + new_col);
}
}
}
}
EXPECT_TRUE(model.ComputeFeasibility());
WeightedSetCoveringSolver set_covering(model);
set_covering.Initialize();
set_covering.GenerateTrivialSolution();
set_covering.RestoreSolution();
LOG(INFO) << set_covering.GetBestSolution().cost();
EXPECT_TRUE(set_covering.CheckSolution());
set_covering.Initialize();
set_covering.GenerateGreedySolution();
set_covering.RestoreSolution();
LOG(INFO) << set_covering.GetBestSolution().cost();
set_covering.Steepest(1000);
set_covering.RestoreSolution();
LOG(INFO) << set_covering.GetBestSolution().cost();
set_covering.GuidedTabuSearch(10);
set_covering.RestoreSolution();
LOG(INFO) << set_covering.GetBestSolution().cost();
EXPECT_TRUE(set_covering.CheckSolution());
}
} // namespace
} // namespace operations_research