more cleaning on graphs
This commit is contained in:
@@ -724,6 +724,7 @@ cc_library(
|
||||
|
||||
cc_library(
|
||||
name = "parse_dimacs_assignment",
|
||||
srcs = ["parse_dimacs_assignment.cc"],
|
||||
hdrs = ["parse_dimacs_assignment.h"],
|
||||
deps = [
|
||||
"//ortools/base",
|
||||
@@ -877,6 +878,7 @@ cc_test(
|
||||
# Frequency Assignment Problem
|
||||
cc_library(
|
||||
name = "fap_parser",
|
||||
srcs = ["fap_parser.cc"],
|
||||
hdrs = ["fap_parser.h"],
|
||||
deps = [
|
||||
"//ortools/base",
|
||||
@@ -890,6 +892,7 @@ cc_library(
|
||||
|
||||
cc_library(
|
||||
name = "fap_model_printer",
|
||||
srcs = ["fap_model_printer.cc"],
|
||||
hdrs = ["fap_model_printer.h"],
|
||||
deps = [
|
||||
":fap_parser",
|
||||
@@ -902,6 +905,7 @@ cc_library(
|
||||
|
||||
cc_library(
|
||||
name = "fap_utilities",
|
||||
srcs = ["fap_utilities.cc"],
|
||||
hdrs = ["fap_utilities.h"],
|
||||
deps = [
|
||||
":fap_parser",
|
||||
|
||||
@@ -44,16 +44,17 @@ list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling_run.cc") # missing prot
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/course_scheduling.cc") # missing proto
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dimacs_assignment.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/dobble_ls.cc") # Too long
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/fap_*.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/frequency_assignment_problem.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/jobshop_sat.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/knapsack_2d_sat.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/mps_driver.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/multi_knapsack_sat.cc") # crash
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/network_routing_sat.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/parse_dimacs_assignment.*")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/pdlp_solve.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/pdptw.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/shift_minimization_sat.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/pdlp_solve.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/strawberry_fields_with_column_generation.cc") # Too long
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/vector_bin_packing_solver.cc")
|
||||
list(FILTER CXX_SRCS EXCLUDE REGEX ".*/weighted_tardiness_sat.cc")
|
||||
|
||||
93
examples/cpp/fap_model_printer.cc
Normal file
93
examples/cpp/fap_model_printer.cc
Normal file
@@ -0,0 +1,93 @@
|
||||
// Copyright 2010-2024 Google LLC
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//
|
||||
|
||||
#include "examples/cpp/fap_model_printer.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
#include "ortools/base/logging.h"
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
FapModelPrinter::FapModelPrinter(
|
||||
const absl::btree_map<int, FapVariable>& variables,
|
||||
const std::vector<FapConstraint>& constraints, absl::string_view objective,
|
||||
const std::vector<int>& values)
|
||||
: variables_(variables),
|
||||
constraints_(constraints),
|
||||
objective_(objective),
|
||||
values_(values) {}
|
||||
|
||||
FapModelPrinter::~FapModelPrinter() = default;
|
||||
|
||||
void FapModelPrinter::PrintFapVariables() {
|
||||
LOG(INFO) << "Variable File:";
|
||||
for (const auto& it : variables_) {
|
||||
std::string domain = "{";
|
||||
for (const int value : it.second.domain) {
|
||||
absl::StrAppendFormat(&domain, "%d ", value);
|
||||
}
|
||||
domain.append("}");
|
||||
|
||||
std::string hard = " ";
|
||||
if (it.second.hard) {
|
||||
hard = " hard";
|
||||
}
|
||||
|
||||
LOG(INFO) << "Variable " << absl::StrFormat("%3d: ", it.first)
|
||||
<< absl::StrFormat("(degree: %2d) ", it.second.degree)
|
||||
<< absl::StrFormat("%3d", it.second.domain_index)
|
||||
<< absl::StrFormat("%3d", it.second.initial_position)
|
||||
<< absl::StrFormat("%3d", it.second.mobility_index)
|
||||
<< absl::StrFormat("%8d", it.second.mobility_cost)
|
||||
<< absl::StrFormat(" (%2d) ", it.second.domain_size) << domain
|
||||
<< hard;
|
||||
}
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapConstraints() {
|
||||
LOG(INFO) << "Constraint File:";
|
||||
for (const FapConstraint& ct : constraints_) {
|
||||
std::string hard = " ";
|
||||
if (ct.hard) {
|
||||
hard = " hard";
|
||||
}
|
||||
|
||||
LOG(INFO) << absl::StrFormat("%3d ", ct.variable1)
|
||||
<< absl::StrFormat("%3d ", ct.variable2) << ct.type << " "
|
||||
<< ct.operation << " " << absl::StrFormat("%3d", ct.value)
|
||||
<< absl::StrFormat("%3d", ct.weight_index)
|
||||
<< absl::StrFormat("%8d", ct.weight_cost) << hard;
|
||||
}
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapObjective() {
|
||||
LOG(INFO) << "Objective: " << objective_;
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapValues() {
|
||||
LOG(INFO) << absl::StrFormat("Values(%d): ",
|
||||
static_cast<int>(values_.size()));
|
||||
std::string domain = " ";
|
||||
for (const int value : values_) {
|
||||
absl::StrAppendFormat(&domain, "%d ", value);
|
||||
}
|
||||
LOG(INFO) << domain;
|
||||
}
|
||||
|
||||
} // namespace operations_research
|
||||
@@ -19,12 +19,10 @@
|
||||
#ifndef OR_TOOLS_EXAMPLES_FAP_MODEL_PRINTER_H_
|
||||
#define OR_TOOLS_EXAMPLES_FAP_MODEL_PRINTER_H_
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/btree_map.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "examples/cpp/fap_parser.h"
|
||||
|
||||
namespace operations_research {
|
||||
@@ -35,6 +33,11 @@ class FapModelPrinter {
|
||||
FapModelPrinter(const absl::btree_map<int, FapVariable>& variables,
|
||||
const std::vector<FapConstraint>& constraints,
|
||||
absl::string_view objective, const std::vector<int>& values);
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
FapModelPrinter(const FapModelPrinter&) = delete;
|
||||
FapModelPrinter& operator=(const FapModelPrinter&) = delete;
|
||||
|
||||
~FapModelPrinter();
|
||||
|
||||
void PrintFapObjective();
|
||||
@@ -47,73 +50,7 @@ class FapModelPrinter {
|
||||
const std::vector<FapConstraint> constraints_;
|
||||
const std::string objective_;
|
||||
const std::vector<int> values_;
|
||||
DISALLOW_COPY_AND_ASSIGN(FapModelPrinter);
|
||||
};
|
||||
|
||||
FapModelPrinter::FapModelPrinter(const absl::btree_map<int, FapVariable>& variables,
|
||||
const std::vector<FapConstraint>& constraints,
|
||||
absl::string_view objective,
|
||||
const std::vector<int>& values)
|
||||
: variables_(variables),
|
||||
constraints_(constraints),
|
||||
objective_(objective),
|
||||
values_(values) {}
|
||||
|
||||
FapModelPrinter::~FapModelPrinter() {}
|
||||
|
||||
void FapModelPrinter::PrintFapVariables() {
|
||||
LOG(INFO) << "Variable File:";
|
||||
for (const auto& it : variables_) {
|
||||
std::string domain = "{";
|
||||
for (const int value : it.second.domain) {
|
||||
absl::StrAppendFormat(&domain, "%d ", value);
|
||||
}
|
||||
domain.append("}");
|
||||
|
||||
std::string hard = " ";
|
||||
if (it.second.hard) {
|
||||
hard = " hard";
|
||||
}
|
||||
|
||||
LOG(INFO) << "Variable " << absl::StrFormat("%3d: ", it.first)
|
||||
<< absl::StrFormat("(degree: %2d) ", it.second.degree)
|
||||
<< absl::StrFormat("%3d", it.second.domain_index)
|
||||
<< absl::StrFormat("%3d", it.second.initial_position)
|
||||
<< absl::StrFormat("%3d", it.second.mobility_index)
|
||||
<< absl::StrFormat("%8d", it.second.mobility_cost)
|
||||
<< absl::StrFormat(" (%2d) ", it.second.domain_size) << domain
|
||||
<< hard;
|
||||
}
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapConstraints() {
|
||||
LOG(INFO) << "Constraint File:";
|
||||
for (const FapConstraint& ct : constraints_) {
|
||||
std::string hard = " ";
|
||||
if (ct.hard) {
|
||||
hard = " hard";
|
||||
}
|
||||
|
||||
LOG(INFO) << absl::StrFormat("%3d ", ct.variable1)
|
||||
<< absl::StrFormat("%3d ", ct.variable2) << ct.type << " "
|
||||
<< ct.operation << " " << absl::StrFormat("%3d", ct.value)
|
||||
<< absl::StrFormat("%3d", ct.weight_index)
|
||||
<< absl::StrFormat("%8d", ct.weight_cost) << hard;
|
||||
}
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapObjective() {
|
||||
LOG(INFO) << "Objective: " << objective_;
|
||||
}
|
||||
|
||||
void FapModelPrinter::PrintFapValues() {
|
||||
LOG(INFO) << absl::StrFormat("Values(%d): ", values_.size());
|
||||
std::string domain = " ";
|
||||
for (const int value : values_) {
|
||||
absl::StrAppendFormat(&domain, "%d ", value);
|
||||
}
|
||||
LOG(INFO) << domain;
|
||||
}
|
||||
|
||||
} // namespace operations_research
|
||||
#endif // OR_TOOLS_EXAMPLES_FAP_MODEL_PRINTER_H_
|
||||
|
||||
386
examples/cpp/fap_parser.cc
Normal file
386
examples/cpp/fap_parser.cc
Normal file
@@ -0,0 +1,386 @@
|
||||
// Copyright 2010-2024 Google LLC
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "examples/cpp/fap_parser.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/strings/numbers.h"
|
||||
#include "absl/strings/str_split.h"
|
||||
#include "ortools/base/helpers.h"
|
||||
#include "ortools/base/map_util.h"
|
||||
|
||||
namespace operations_research {
|
||||
namespace {
|
||||
int strtoint32(const std::string& word) {
|
||||
int result;
|
||||
CHECK(absl::SimpleAtoi(word, &result));
|
||||
return result;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
void ParseFileByLines(const std::string& filename,
|
||||
std::vector<std::string>* lines) {
|
||||
CHECK(lines != nullptr);
|
||||
std::string result;
|
||||
CHECK_OK(file::GetContents(filename, &result, file::Defaults()));
|
||||
*lines = absl::StrSplit(result, '\n', absl::SkipEmpty());
|
||||
}
|
||||
|
||||
// VariableParser Implementation
|
||||
VariableParser::VariableParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/var.txt") {}
|
||||
|
||||
VariableParser::~VariableParser() = default;
|
||||
|
||||
void VariableParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 2);
|
||||
|
||||
FapVariable variable;
|
||||
variable.domain_index = strtoint32(tokens[1]);
|
||||
if (tokens.size() > 3) {
|
||||
variable.initial_position = strtoint32(tokens[2]);
|
||||
variable.mobility_index = strtoint32(tokens[3]);
|
||||
}
|
||||
gtl::InsertOrUpdate(&variables_, strtoint32(tokens[0]), variable);
|
||||
}
|
||||
}
|
||||
|
||||
// DomainParser Implementation
|
||||
DomainParser::DomainParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/dom.txt") {}
|
||||
|
||||
DomainParser::~DomainParser() = default;
|
||||
|
||||
void DomainParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 2);
|
||||
|
||||
const int key = strtoint32(tokens[0]);
|
||||
|
||||
std::vector<int> domain;
|
||||
domain.clear();
|
||||
for (int i = 2; i < tokens.size(); ++i) {
|
||||
domain.push_back(strtoint32(tokens[i]));
|
||||
}
|
||||
|
||||
if (!domain.empty()) {
|
||||
gtl::InsertOrUpdate(&domains_, key, domain);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ConstraintParser Implementation
|
||||
ConstraintParser::ConstraintParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/ctr.txt") {}
|
||||
|
||||
ConstraintParser::~ConstraintParser() = default;
|
||||
|
||||
void ConstraintParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 5);
|
||||
|
||||
FapConstraint constraint;
|
||||
constraint.variable1 = strtoint32(tokens[0]);
|
||||
constraint.variable2 = strtoint32(tokens[1]);
|
||||
constraint.type = tokens[2];
|
||||
constraint.operation = tokens[3];
|
||||
constraint.value = strtoint32(tokens[4]);
|
||||
|
||||
if (tokens.size() > 5) {
|
||||
constraint.weight_index = strtoint32(tokens[5]);
|
||||
}
|
||||
constraints_.push_back(constraint);
|
||||
}
|
||||
}
|
||||
|
||||
// ParametersParser Implementation
|
||||
const int ParametersParser::kConstraintCoefficientNo;
|
||||
const int ParametersParser::kVariableCoefficientNo;
|
||||
const int ParametersParser::kCoefficientNo;
|
||||
|
||||
ParametersParser::ParametersParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/cst.txt"),
|
||||
objective_(""),
|
||||
constraint_weights_(kConstraintCoefficientNo, 0),
|
||||
variable_weights_(kVariableCoefficientNo, 0) {}
|
||||
|
||||
ParametersParser::~ParametersParser() = default;
|
||||
|
||||
void ParametersParser::Parse() {
|
||||
bool objective = true;
|
||||
bool largest_token = false;
|
||||
bool value_token = false;
|
||||
bool number_token = false;
|
||||
bool values_token = false;
|
||||
bool coefficient = false;
|
||||
std::vector<int> coefficients;
|
||||
std::vector<std::string> lines;
|
||||
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
if (objective) {
|
||||
largest_token = largest_token || absl::StrContains(line, "largest");
|
||||
value_token = value_token || absl::StrContains(line, "value");
|
||||
number_token = number_token || absl::StrContains(line, "number");
|
||||
values_token = values_token || absl::StrContains(line, "values");
|
||||
coefficient = coefficient || absl::StrContains(line, "coefficient");
|
||||
}
|
||||
|
||||
if (coefficient) {
|
||||
CHECK_EQ(kCoefficientNo,
|
||||
kConstraintCoefficientNo + kVariableCoefficientNo);
|
||||
objective = false;
|
||||
if (absl::StrContains(line, "=")) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
CHECK_GE(tokens.size(), 3);
|
||||
coefficients.push_back(strtoint32(tokens[2]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (coefficient) {
|
||||
CHECK_EQ(kCoefficientNo, coefficients.size());
|
||||
for (int i = 0; i < kCoefficientNo; i++) {
|
||||
if (i < kConstraintCoefficientNo) {
|
||||
constraint_weights_[i] = coefficients[i];
|
||||
} else {
|
||||
variable_weights_[i - kConstraintCoefficientNo] = coefficients[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (largest_token && value_token) {
|
||||
objective_ = "Minimize the largest assigned value.";
|
||||
} else if (number_token && values_token) {
|
||||
objective_ = "Minimize the number of assigned values.";
|
||||
} else {
|
||||
// Should not reach this point.
|
||||
LOG(WARNING) << "Cannot read the objective of the instance.";
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(user): Make FindComponents linear instead of quadratic.
|
||||
void FindComponents(const std::vector<FapConstraint>& constraints,
|
||||
const absl::btree_map<int, FapVariable>& variables,
|
||||
const int maximum_variable_id,
|
||||
absl::flat_hash_map<int, FapComponent>* components) {
|
||||
std::vector<int> in_component(maximum_variable_id + 1, -1);
|
||||
int constraint_index = 0;
|
||||
for (const FapConstraint& constraint : constraints) {
|
||||
const int variable_id1 = constraint.variable1;
|
||||
const int variable_id2 = constraint.variable2;
|
||||
const FapVariable& variable1 = gtl::FindOrDie(variables, variable_id1);
|
||||
const FapVariable& variable2 = gtl::FindOrDie(variables, variable_id2);
|
||||
CHECK_LT(variable_id1, in_component.size());
|
||||
CHECK_LT(variable_id2, in_component.size());
|
||||
if (in_component[variable_id1] < 0 && in_component[variable_id2] < 0) {
|
||||
// None of the variables belong to an existing component.
|
||||
// Create a new one.
|
||||
FapComponent component;
|
||||
const int component_index = constraint_index;
|
||||
gtl::InsertOrUpdate(&(component.variables), variable_id1, variable1);
|
||||
gtl::InsertOrUpdate(&(component.variables), variable_id2, variable2);
|
||||
in_component[variable_id1] = component_index;
|
||||
in_component[variable_id2] = component_index;
|
||||
component.constraints.push_back(constraint);
|
||||
gtl::InsertOrUpdate(components, component_index, component);
|
||||
} else if (in_component[variable_id1] >= 0 &&
|
||||
in_component[variable_id2] < 0) {
|
||||
// If variable1 belongs to an existing component, variable2 should
|
||||
// also be included in the same component.
|
||||
const int component_index = in_component[variable_id1];
|
||||
CHECK(components->contains(component_index));
|
||||
gtl::InsertOrUpdate(&((*components)[component_index].variables),
|
||||
variable_id2, variable2);
|
||||
in_component[variable_id2] = component_index;
|
||||
(*components)[component_index].constraints.push_back(constraint);
|
||||
} else if (in_component[variable_id1] < 0 &&
|
||||
in_component[variable_id2] >= 0) {
|
||||
// If variable2 belongs to an existing component, variable1 should
|
||||
// also be included in the same component.
|
||||
const int component_index = in_component[variable_id2];
|
||||
CHECK(components->contains(component_index));
|
||||
gtl::InsertOrUpdate(&((*components)[component_index].variables),
|
||||
variable_id1, variable1);
|
||||
in_component[variable_id1] = component_index;
|
||||
(*components)[component_index].constraints.push_back(constraint);
|
||||
} else {
|
||||
// The current constraint connects two different components.
|
||||
const int component_index1 = in_component[variable_id1];
|
||||
const int component_index2 = in_component[variable_id2];
|
||||
const int min_component_index =
|
||||
std::min(component_index1, component_index2);
|
||||
const int max_component_index =
|
||||
std::max(component_index1, component_index2);
|
||||
CHECK(components->contains(min_component_index));
|
||||
CHECK(components->contains(max_component_index));
|
||||
if (min_component_index != max_component_index) {
|
||||
// Update the component_index of maximum indexed component's variables.
|
||||
for (const auto& variable :
|
||||
(*components)[max_component_index].variables) {
|
||||
int variable_id = variable.first;
|
||||
in_component[variable_id] = min_component_index;
|
||||
}
|
||||
// Insert all the variables of the maximum indexed component to the
|
||||
// variables of the minimum indexed component.
|
||||
((*components)[min_component_index])
|
||||
.variables.insert(
|
||||
((*components)[max_component_index]).variables.begin(),
|
||||
((*components)[max_component_index]).variables.end());
|
||||
// Insert all the constraints of the maximum indexed component to the
|
||||
// constraints of the minimum indexed component.
|
||||
((*components)[min_component_index])
|
||||
.constraints.insert(
|
||||
((*components)[min_component_index]).constraints.end(),
|
||||
((*components)[max_component_index]).constraints.begin(),
|
||||
((*components)[max_component_index]).constraints.end());
|
||||
(*components)[min_component_index].constraints.push_back(constraint);
|
||||
// Delete the maximum indexed component from the components set.
|
||||
components->erase(max_component_index);
|
||||
} else {
|
||||
// Both variables belong to the same component, just add the constraint.
|
||||
(*components)[min_component_index].constraints.push_back(constraint);
|
||||
}
|
||||
}
|
||||
constraint_index++;
|
||||
}
|
||||
}
|
||||
|
||||
int EvaluateConstraintImpact(const absl::btree_map<int, FapVariable>& variables,
|
||||
const int max_weight_cost,
|
||||
const FapConstraint constraint) {
|
||||
const FapVariable& variable1 =
|
||||
gtl::FindOrDie(variables, constraint.variable1);
|
||||
const FapVariable& variable2 =
|
||||
gtl::FindOrDie(variables, constraint.variable2);
|
||||
const int degree1 = variable1.degree;
|
||||
const int degree2 = variable2.degree;
|
||||
const int max_degree = std::max(degree1, degree2);
|
||||
const int min_degree = std::min(degree1, degree2);
|
||||
const int operator_impact =
|
||||
constraint.operation == "=" ? max_degree : min_degree;
|
||||
const int kHardnessBias = 10;
|
||||
int hardness_impact = 0;
|
||||
if (constraint.hard) {
|
||||
hardness_impact = max_weight_cost > 0 ? kHardnessBias * max_weight_cost : 0;
|
||||
} else {
|
||||
hardness_impact = constraint.weight_cost;
|
||||
}
|
||||
return max_degree + min_degree + operator_impact + hardness_impact;
|
||||
}
|
||||
|
||||
void ParseInstance(const std::string& data_directory, bool find_components,
|
||||
absl::btree_map<int, FapVariable>* variables,
|
||||
std::vector<FapConstraint>* constraints,
|
||||
std::string* objective, std::vector<int>* frequencies,
|
||||
absl::flat_hash_map<int, FapComponent>* components) {
|
||||
CHECK(variables != nullptr);
|
||||
CHECK(constraints != nullptr);
|
||||
CHECK(objective != nullptr);
|
||||
CHECK(frequencies != nullptr);
|
||||
|
||||
// Parse the data files.
|
||||
VariableParser var(data_directory);
|
||||
var.Parse();
|
||||
*variables = var.variables();
|
||||
const int maximum_variable_id = variables->rbegin()->first;
|
||||
|
||||
ConstraintParser ctr(data_directory);
|
||||
ctr.Parse();
|
||||
*constraints = ctr.constraints();
|
||||
|
||||
DomainParser dom(data_directory);
|
||||
dom.Parse();
|
||||
|
||||
ParametersParser cst(data_directory);
|
||||
cst.Parse();
|
||||
const int maximum_weight_cost = *std::max_element(
|
||||
(cst.constraint_weights()).begin(), (cst.constraint_weights()).end());
|
||||
|
||||
// Make the variables of the instance.
|
||||
for (auto& it : *variables) {
|
||||
it.second.domain = gtl::FindOrDie(dom.domains(), it.second.domain_index);
|
||||
it.second.domain_size = it.second.domain.size();
|
||||
|
||||
if ((it.second.mobility_index == -1) || (it.second.mobility_index == 0)) {
|
||||
it.second.mobility_cost = -1;
|
||||
if (it.second.initial_position != -1) {
|
||||
it.second.hard = true;
|
||||
}
|
||||
} else {
|
||||
it.second.mobility_cost =
|
||||
(cst.variable_weights())[it.second.mobility_index - 1];
|
||||
}
|
||||
}
|
||||
// Make the constraints of the instance.
|
||||
for (FapConstraint& ct : *constraints) {
|
||||
if ((ct.weight_index == -1) || (ct.weight_index == 0)) {
|
||||
ct.weight_cost = -1;
|
||||
ct.hard = true;
|
||||
} else {
|
||||
ct.weight_cost = (cst.constraint_weights())[ct.weight_index - 1];
|
||||
ct.hard = false;
|
||||
}
|
||||
++((*variables)[ct.variable1]).degree;
|
||||
++((*variables)[ct.variable2]).degree;
|
||||
}
|
||||
// Make the available frequencies of the instance.
|
||||
*frequencies = gtl::FindOrDie(dom.domains(), 0);
|
||||
// Make the objective of the instance.
|
||||
*objective = cst.objective();
|
||||
|
||||
if (find_components) {
|
||||
CHECK(components != nullptr);
|
||||
FindComponents(*constraints, *variables, maximum_variable_id, components);
|
||||
// Evaluate each components's constraints impacts.
|
||||
for (auto& component : *components) {
|
||||
for (auto& constraint : component.second.constraints) {
|
||||
constraint.impact = EvaluateConstraintImpact(
|
||||
*variables, maximum_weight_cost, constraint);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (FapConstraint& constraint : *constraints) {
|
||||
constraint.impact =
|
||||
EvaluateConstraintImpact(*variables, maximum_weight_cost, constraint);
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace operations_research
|
||||
@@ -11,26 +11,19 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//
|
||||
// Reading and parsing the data of Frequency Assignment Problem
|
||||
// Format: http://www.inra.fr/mia/T/schiex/Doc/CELAR.shtml#synt
|
||||
//
|
||||
|
||||
#ifndef OR_TOOLS_EXAMPLES_FAP_PARSER_H_
|
||||
#define OR_TOOLS_EXAMPLES_FAP_PARSER_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/btree_map.h"
|
||||
#include "absl/container/flat_hash_map.h"
|
||||
#include "absl/strings/match.h"
|
||||
#include "absl/strings/numbers.h"
|
||||
#include "absl/strings/str_split.h"
|
||||
#include "ortools/base/file.h"
|
||||
#include "ortools/base/logging.h"
|
||||
#include "ortools/base/macros.h"
|
||||
#include "ortools/base/map_util.h"
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
@@ -124,6 +117,11 @@ struct FapComponent {
|
||||
class VariableParser {
|
||||
public:
|
||||
explicit VariableParser(const std::string& data_directory);
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
VariableParser(const VariableParser&) = delete;
|
||||
VariableParser& operator=(const VariableParser&) = delete;
|
||||
|
||||
~VariableParser();
|
||||
|
||||
const absl::btree_map<int, FapVariable>& variables() const {
|
||||
@@ -138,8 +136,6 @@ class VariableParser {
|
||||
// be consecutive, may be very sparse and don't have a specific upper-bound.
|
||||
// The key of the map, is the link's id.
|
||||
absl::btree_map<int, FapVariable> variables_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(VariableParser);
|
||||
};
|
||||
|
||||
// Parser of the dom.txt file.
|
||||
@@ -148,6 +144,11 @@ class VariableParser {
|
||||
class DomainParser {
|
||||
public:
|
||||
explicit DomainParser(const std::string& data_directory);
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
DomainParser(const DomainParser&) = delete;
|
||||
DomainParser& operator=(const DomainParser&) = delete;
|
||||
|
||||
~DomainParser();
|
||||
|
||||
const absl::btree_map<int, std::vector<int> >& domains() const {
|
||||
@@ -162,8 +163,6 @@ class DomainParser {
|
||||
// domains may be random values, since they are used as names. The key of the
|
||||
// map is the subset's id.
|
||||
absl::btree_map<int, std::vector<int> > domains_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(DomainParser);
|
||||
};
|
||||
|
||||
// Parse ctr.txt file.
|
||||
@@ -172,6 +171,11 @@ class DomainParser {
|
||||
class ConstraintParser {
|
||||
public:
|
||||
explicit ConstraintParser(const std::string& data_directory);
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
ConstraintParser(const ConstraintParser&) = delete;
|
||||
ConstraintParser& operator=(const ConstraintParser&) = delete;
|
||||
|
||||
~ConstraintParser();
|
||||
|
||||
const std::vector<FapConstraint>& constraints() const { return constraints_; }
|
||||
@@ -181,8 +185,6 @@ class ConstraintParser {
|
||||
private:
|
||||
const std::string filename_;
|
||||
std::vector<FapConstraint> constraints_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(ConstraintParser);
|
||||
};
|
||||
|
||||
// Parse cst.txt file.
|
||||
@@ -212,14 +214,6 @@ class ParametersParser {
|
||||
std::vector<int> variable_weights_;
|
||||
};
|
||||
|
||||
namespace {
|
||||
int strtoint32(const std::string& word) {
|
||||
int result;
|
||||
CHECK(absl::SimpleAtoi(word, &result));
|
||||
return result;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
// Function that finds the disjoint sub-graphs of the graph of the instance.
|
||||
void FindComponents(const std::vector<FapConstraint>& constraints,
|
||||
const absl::btree_map<int, FapVariable>& variables,
|
||||
@@ -236,357 +230,5 @@ void ParseInstance(const std::string& data_directory, bool find_components,
|
||||
std::vector<FapConstraint>* constraints,
|
||||
std::string* objective, std::vector<int>* frequencies,
|
||||
absl::flat_hash_map<int, FapComponent>* components);
|
||||
|
||||
void ParseFileByLines(const std::string& filename,
|
||||
std::vector<std::string>* lines) {
|
||||
CHECK(lines != nullptr);
|
||||
std::string result;
|
||||
CHECK_OK(file::GetContents(filename, &result, file::Defaults()));
|
||||
*lines = absl::StrSplit(result, '\n', absl::SkipEmpty());
|
||||
}
|
||||
|
||||
// VariableParser Implementation
|
||||
VariableParser::VariableParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/var.txt") {}
|
||||
|
||||
VariableParser::~VariableParser() = default;
|
||||
|
||||
void VariableParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 2);
|
||||
|
||||
FapVariable variable;
|
||||
variable.domain_index = strtoint32(tokens[1]);
|
||||
if (tokens.size() > 3) {
|
||||
variable.initial_position = strtoint32(tokens[2]);
|
||||
variable.mobility_index = strtoint32(tokens[3]);
|
||||
}
|
||||
gtl::InsertOrUpdate(&variables_, strtoint32(tokens[0]), variable);
|
||||
}
|
||||
}
|
||||
|
||||
// DomainParser Implementation
|
||||
DomainParser::DomainParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/dom.txt") {}
|
||||
|
||||
DomainParser::~DomainParser() = default;
|
||||
|
||||
void DomainParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 2);
|
||||
|
||||
const int key = strtoint32(tokens[0]);
|
||||
|
||||
std::vector<int> domain;
|
||||
domain.clear();
|
||||
for (int i = 2; i < tokens.size(); ++i) {
|
||||
domain.push_back(strtoint32(tokens[i]));
|
||||
}
|
||||
|
||||
if (!domain.empty()) {
|
||||
gtl::InsertOrUpdate(&domains_, key, domain);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ConstraintParser Implementation
|
||||
ConstraintParser::ConstraintParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/ctr.txt") {}
|
||||
|
||||
ConstraintParser::~ConstraintParser() = default;
|
||||
|
||||
void ConstraintParser::Parse() {
|
||||
std::vector<std::string> lines;
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
if (tokens.empty()) {
|
||||
continue;
|
||||
}
|
||||
CHECK_GE(tokens.size(), 5);
|
||||
|
||||
FapConstraint constraint;
|
||||
constraint.variable1 = strtoint32(tokens[0]);
|
||||
constraint.variable2 = strtoint32(tokens[1]);
|
||||
constraint.type = tokens[2];
|
||||
constraint.operation = tokens[3];
|
||||
constraint.value = strtoint32(tokens[4]);
|
||||
|
||||
if (tokens.size() > 5) {
|
||||
constraint.weight_index = strtoint32(tokens[5]);
|
||||
}
|
||||
constraints_.push_back(constraint);
|
||||
}
|
||||
}
|
||||
|
||||
// ParametersParser Implementation
|
||||
const int ParametersParser::kConstraintCoefficientNo;
|
||||
const int ParametersParser::kVariableCoefficientNo;
|
||||
const int ParametersParser::kCoefficientNo;
|
||||
|
||||
ParametersParser::ParametersParser(const std::string& data_directory)
|
||||
: filename_(data_directory + "/cst.txt"),
|
||||
objective_(""),
|
||||
constraint_weights_(kConstraintCoefficientNo, 0),
|
||||
variable_weights_(kVariableCoefficientNo, 0) {}
|
||||
|
||||
ParametersParser::~ParametersParser() = default;
|
||||
|
||||
void ParametersParser::Parse() {
|
||||
bool objective = true;
|
||||
bool largest_token = false;
|
||||
bool value_token = false;
|
||||
bool number_token = false;
|
||||
bool values_token = false;
|
||||
bool coefficient = false;
|
||||
std::vector<int> coefficients;
|
||||
std::vector<std::string> lines;
|
||||
|
||||
ParseFileByLines(filename_, &lines);
|
||||
for (const std::string& line : lines) {
|
||||
if (objective) {
|
||||
largest_token = largest_token || absl::StrContains(line, "largest");
|
||||
value_token = value_token || absl::StrContains(line, "value");
|
||||
number_token = number_token || absl::StrContains(line, "number");
|
||||
values_token = values_token || absl::StrContains(line, "values");
|
||||
coefficient = coefficient || absl::StrContains(line, "coefficient");
|
||||
}
|
||||
|
||||
if (coefficient) {
|
||||
CHECK_EQ(kCoefficientNo,
|
||||
kConstraintCoefficientNo + kVariableCoefficientNo);
|
||||
objective = false;
|
||||
if (absl::StrContains(line, "=")) {
|
||||
std::vector<std::string> tokens =
|
||||
absl::StrSplit(line, ' ', absl::SkipEmpty());
|
||||
CHECK_GE(tokens.size(), 3);
|
||||
coefficients.push_back(strtoint32(tokens[2]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (coefficient) {
|
||||
CHECK_EQ(kCoefficientNo, coefficients.size());
|
||||
for (int i = 0; i < kCoefficientNo; i++) {
|
||||
if (i < kConstraintCoefficientNo) {
|
||||
constraint_weights_[i] = coefficients[i];
|
||||
} else {
|
||||
variable_weights_[i - kConstraintCoefficientNo] = coefficients[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (largest_token && value_token) {
|
||||
objective_ = "Minimize the largest assigned value.";
|
||||
} else if (number_token && values_token) {
|
||||
objective_ = "Minimize the number of assigned values.";
|
||||
} else {
|
||||
// Should not reach this point.
|
||||
LOG(WARNING) << "Cannot read the objective of the instance.";
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(user): Make FindComponents linear instead of quadratic.
|
||||
void FindComponents(const std::vector<FapConstraint>& constraints,
|
||||
const absl::btree_map<int, FapVariable>& variables,
|
||||
const int maximum_variable_id,
|
||||
absl::flat_hash_map<int, FapComponent>* components) {
|
||||
std::vector<int> in_component(maximum_variable_id + 1, -1);
|
||||
int constraint_index = 0;
|
||||
for (const FapConstraint& constraint : constraints) {
|
||||
const int variable_id1 = constraint.variable1;
|
||||
const int variable_id2 = constraint.variable2;
|
||||
const FapVariable& variable1 = gtl::FindOrDie(variables, variable_id1);
|
||||
const FapVariable& variable2 = gtl::FindOrDie(variables, variable_id2);
|
||||
CHECK_LT(variable_id1, in_component.size());
|
||||
CHECK_LT(variable_id2, in_component.size());
|
||||
if (in_component[variable_id1] < 0 && in_component[variable_id2] < 0) {
|
||||
// None of the variables belong to an existing component.
|
||||
// Create a new one.
|
||||
FapComponent component;
|
||||
const int component_index = constraint_index;
|
||||
gtl::InsertOrUpdate(&(component.variables), variable_id1, variable1);
|
||||
gtl::InsertOrUpdate(&(component.variables), variable_id2, variable2);
|
||||
in_component[variable_id1] = component_index;
|
||||
in_component[variable_id2] = component_index;
|
||||
component.constraints.push_back(constraint);
|
||||
gtl::InsertOrUpdate(components, component_index, component);
|
||||
} else if (in_component[variable_id1] >= 0 &&
|
||||
in_component[variable_id2] < 0) {
|
||||
// If variable1 belongs to an existing component, variable2 should
|
||||
// also be included in the same component.
|
||||
const int component_index = in_component[variable_id1];
|
||||
CHECK(components->contains(component_index));
|
||||
gtl::InsertOrUpdate(&((*components)[component_index].variables),
|
||||
variable_id2, variable2);
|
||||
in_component[variable_id2] = component_index;
|
||||
(*components)[component_index].constraints.push_back(constraint);
|
||||
} else if (in_component[variable_id1] < 0 &&
|
||||
in_component[variable_id2] >= 0) {
|
||||
// If variable2 belongs to an existing component, variable1 should
|
||||
// also be included in the same component.
|
||||
const int component_index = in_component[variable_id2];
|
||||
CHECK(components->contains(component_index));
|
||||
gtl::InsertOrUpdate(&((*components)[component_index].variables),
|
||||
variable_id1, variable1);
|
||||
in_component[variable_id1] = component_index;
|
||||
(*components)[component_index].constraints.push_back(constraint);
|
||||
} else {
|
||||
// The current constraint connects two different components.
|
||||
const int component_index1 = in_component[variable_id1];
|
||||
const int component_index2 = in_component[variable_id2];
|
||||
const int min_component_index =
|
||||
std::min(component_index1, component_index2);
|
||||
const int max_component_index =
|
||||
std::max(component_index1, component_index2);
|
||||
CHECK(components->contains(min_component_index));
|
||||
CHECK(components->contains(max_component_index));
|
||||
if (min_component_index != max_component_index) {
|
||||
// Update the component_index of maximum indexed component's variables.
|
||||
for (const auto& variable :
|
||||
(*components)[max_component_index].variables) {
|
||||
int variable_id = variable.first;
|
||||
in_component[variable_id] = min_component_index;
|
||||
}
|
||||
// Insert all the variables of the maximum indexed component to the
|
||||
// variables of the minimum indexed component.
|
||||
((*components)[min_component_index])
|
||||
.variables.insert(
|
||||
((*components)[max_component_index]).variables.begin(),
|
||||
((*components)[max_component_index]).variables.end());
|
||||
// Insert all the constraints of the maximum indexed component to the
|
||||
// constraints of the minimum indexed component.
|
||||
((*components)[min_component_index])
|
||||
.constraints.insert(
|
||||
((*components)[min_component_index]).constraints.end(),
|
||||
((*components)[max_component_index]).constraints.begin(),
|
||||
((*components)[max_component_index]).constraints.end());
|
||||
(*components)[min_component_index].constraints.push_back(constraint);
|
||||
// Delete the maximum indexed component from the components set.
|
||||
components->erase(max_component_index);
|
||||
} else {
|
||||
// Both variables belong to the same component, just add the constraint.
|
||||
(*components)[min_component_index].constraints.push_back(constraint);
|
||||
}
|
||||
}
|
||||
constraint_index++;
|
||||
}
|
||||
}
|
||||
|
||||
int EvaluateConstraintImpact(const absl::btree_map<int, FapVariable>& variables,
|
||||
const int max_weight_cost,
|
||||
const FapConstraint constraint) {
|
||||
const FapVariable& variable1 =
|
||||
gtl::FindOrDie(variables, constraint.variable1);
|
||||
const FapVariable& variable2 =
|
||||
gtl::FindOrDie(variables, constraint.variable2);
|
||||
const int degree1 = variable1.degree;
|
||||
const int degree2 = variable2.degree;
|
||||
const int max_degree = std::max(degree1, degree2);
|
||||
const int min_degree = std::min(degree1, degree2);
|
||||
const int operator_impact =
|
||||
constraint.operation == "=" ? max_degree : min_degree;
|
||||
const int kHardnessBias = 10;
|
||||
int hardness_impact = 0;
|
||||
if (constraint.hard) {
|
||||
hardness_impact = max_weight_cost > 0 ? kHardnessBias * max_weight_cost : 0;
|
||||
} else {
|
||||
hardness_impact = constraint.weight_cost;
|
||||
}
|
||||
return max_degree + min_degree + operator_impact + hardness_impact;
|
||||
}
|
||||
|
||||
void ParseInstance(const std::string& data_directory, bool find_components,
|
||||
absl::btree_map<int, FapVariable>* variables,
|
||||
std::vector<FapConstraint>* constraints,
|
||||
std::string* objective, std::vector<int>* frequencies,
|
||||
absl::flat_hash_map<int, FapComponent>* components) {
|
||||
CHECK(variables != nullptr);
|
||||
CHECK(constraints != nullptr);
|
||||
CHECK(objective != nullptr);
|
||||
CHECK(frequencies != nullptr);
|
||||
|
||||
// Parse the data files.
|
||||
VariableParser var(data_directory);
|
||||
var.Parse();
|
||||
*variables = var.variables();
|
||||
const int maximum_variable_id = variables->rbegin()->first;
|
||||
|
||||
ConstraintParser ctr(data_directory);
|
||||
ctr.Parse();
|
||||
*constraints = ctr.constraints();
|
||||
|
||||
DomainParser dom(data_directory);
|
||||
dom.Parse();
|
||||
|
||||
ParametersParser cst(data_directory);
|
||||
cst.Parse();
|
||||
const int maximum_weight_cost = *std::max_element(
|
||||
(cst.constraint_weights()).begin(), (cst.constraint_weights()).end());
|
||||
|
||||
// Make the variables of the instance.
|
||||
for (auto& it : *variables) {
|
||||
it.second.domain = gtl::FindOrDie(dom.domains(), it.second.domain_index);
|
||||
it.second.domain_size = it.second.domain.size();
|
||||
|
||||
if ((it.second.mobility_index == -1) || (it.second.mobility_index == 0)) {
|
||||
it.second.mobility_cost = -1;
|
||||
if (it.second.initial_position != -1) {
|
||||
it.second.hard = true;
|
||||
}
|
||||
} else {
|
||||
it.second.mobility_cost =
|
||||
(cst.variable_weights())[it.second.mobility_index - 1];
|
||||
}
|
||||
}
|
||||
// Make the constraints of the instance.
|
||||
for (FapConstraint& ct : *constraints) {
|
||||
if ((ct.weight_index == -1) || (ct.weight_index == 0)) {
|
||||
ct.weight_cost = -1;
|
||||
ct.hard = true;
|
||||
} else {
|
||||
ct.weight_cost = (cst.constraint_weights())[ct.weight_index - 1];
|
||||
ct.hard = false;
|
||||
}
|
||||
++((*variables)[ct.variable1]).degree;
|
||||
++((*variables)[ct.variable2]).degree;
|
||||
}
|
||||
// Make the available frequencies of the instance.
|
||||
*frequencies = gtl::FindOrDie(dom.domains(), 0);
|
||||
// Make the objective of the instance.
|
||||
*objective = cst.objective();
|
||||
|
||||
if (find_components) {
|
||||
CHECK(components != nullptr);
|
||||
FindComponents(*constraints, *variables, maximum_variable_id, components);
|
||||
// Evaluate each components's constraints impacts.
|
||||
for (auto& component : *components) {
|
||||
for (auto& constraint : component.second.constraints) {
|
||||
constraint.impact = EvaluateConstraintImpact(
|
||||
*variables, maximum_weight_cost, constraint);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (FapConstraint& constraint : *constraints) {
|
||||
constraint.impact =
|
||||
EvaluateConstraintImpact(*variables, maximum_weight_cost, constraint);
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace operations_research
|
||||
#endif // OR_TOOLS_EXAMPLES_FAP_PARSER_H_
|
||||
|
||||
185
examples/cpp/fap_utilities.cc
Normal file
185
examples/cpp/fap_utilities.cc
Normal file
@@ -0,0 +1,185 @@
|
||||
// Copyright 2010-2024 Google LLC
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//
|
||||
|
||||
#include "examples/cpp/fap_utilities.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/btree_map.h"
|
||||
#include "absl/container/btree_set.h"
|
||||
#include "absl/types/span.h"
|
||||
#include "ortools/base/logging.h"
|
||||
#include "ortools/base/map_util.h"
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
bool CheckConstraintSatisfaction(
|
||||
absl::Span<const FapConstraint> data_constraints,
|
||||
absl::Span<const int> variables,
|
||||
const absl::btree_map<int, int>& index_from_key) {
|
||||
bool status = true;
|
||||
for (const FapConstraint& ct : data_constraints) {
|
||||
const int index1 = gtl::FindOrDie(index_from_key, ct.variable1);
|
||||
const int index2 = gtl::FindOrDie(index_from_key, ct.variable2);
|
||||
CHECK_LT(index1, variables.size());
|
||||
CHECK_LT(index2, variables.size());
|
||||
const int var1 = variables[index1];
|
||||
const int var2 = variables[index2];
|
||||
const int absolute_difference = abs(var1 - var2);
|
||||
|
||||
if ((ct.operation == ">") && (absolute_difference <= ct.value)) {
|
||||
LOG(INFO) << " Violation of constraint between variable " << ct.variable1
|
||||
<< " and variable " << ct.variable2 << ".";
|
||||
LOG(INFO) << " Expected |" << var1 << " - " << var2
|
||||
<< "| (= " << absolute_difference << ") > " << ct.value << ".";
|
||||
status = false;
|
||||
} else if ((ct.operation == "=") && (absolute_difference != ct.value)) {
|
||||
LOG(INFO) << " Violation of constraint between variable " << ct.variable1
|
||||
<< " and variable " << ct.variable2 << ".";
|
||||
LOG(INFO) << " Expected |" << var1 << " - " << var2
|
||||
<< "| (= " << absolute_difference << ") = " << ct.value << ".";
|
||||
status = false;
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
bool CheckVariablePosition(
|
||||
const absl::btree_map<int, FapVariable>& data_variables,
|
||||
absl::Span<const int> variables,
|
||||
const absl::btree_map<int, int>& index_from_key) {
|
||||
bool status = true;
|
||||
for (const auto& it : data_variables) {
|
||||
const int index = gtl::FindOrDie(index_from_key, it.first);
|
||||
CHECK_LT(index, variables.size());
|
||||
const int var = variables[index];
|
||||
|
||||
if (it.second.hard && (it.second.initial_position != -1) &&
|
||||
(var != it.second.initial_position)) {
|
||||
LOG(INFO) << " Change of position of hard variable " << it.first << ".";
|
||||
LOG(INFO) << " Expected " << it.second.initial_position
|
||||
<< " instead of given " << var << ".";
|
||||
status = false;
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
int NumberOfAssignedValues(absl::Span<const int> variables) {
|
||||
absl::btree_set<int> assigned(variables.begin(), variables.end());
|
||||
return static_cast<int>(assigned.size());
|
||||
}
|
||||
|
||||
void PrintElapsedTime(const int64_t time1, const int64_t time2) {
|
||||
LOG(INFO) << "End of solving process.";
|
||||
LOG(INFO) << "The Solve method took " << (time2 - time1) / 1000.0
|
||||
<< " seconds.";
|
||||
}
|
||||
|
||||
void PrintResultsHard(SolutionCollector* const collector,
|
||||
const std::vector<IntVar*>& variables,
|
||||
IntVar* const objective_var,
|
||||
const absl::btree_map<int, FapVariable>& data_variables,
|
||||
absl::Span<const FapConstraint> data_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
absl::Span<const int> key_from_index) {
|
||||
LOG(INFO) << "Printing...";
|
||||
LOG(INFO) << "Number of Solutions: " << collector->solution_count();
|
||||
for (int solution_index = 0; solution_index < collector->solution_count();
|
||||
++solution_index) {
|
||||
Assignment* const solution = collector->solution(solution_index);
|
||||
std::vector<int> results(variables.size());
|
||||
LOG(INFO) << "------------------------------------------------------------";
|
||||
LOG(INFO) << "Solution " << solution_index + 1;
|
||||
LOG(INFO) << "Cost: " << solution->Value(objective_var);
|
||||
for (int i = 0; i < variables.size(); ++i) {
|
||||
results[i] = solution->Value(variables[i]);
|
||||
LOG(INFO) << " Variable " << key_from_index[i] << ": " << results[i];
|
||||
}
|
||||
if (CheckConstraintSatisfaction(data_constraints, results,
|
||||
index_from_key)) {
|
||||
LOG(INFO) << "All hard constraints satisfied.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
if (CheckVariablePosition(data_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "All hard variables stayed unharmed.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard variable modification detected.";
|
||||
}
|
||||
|
||||
LOG(INFO) << "Values used: " << NumberOfAssignedValues(results);
|
||||
LOG(INFO) << "Maximum value used: "
|
||||
<< *std::max_element(results.begin(), results.end());
|
||||
LOG(INFO) << " Failures: " << collector->failures(solution_index);
|
||||
}
|
||||
LOG(INFO) << " ============================================================";
|
||||
}
|
||||
|
||||
void PrintResultsSoft(SolutionCollector* const collector,
|
||||
const std::vector<IntVar*>& variables,
|
||||
IntVar* const total_cost,
|
||||
const absl::btree_map<int, FapVariable>& hard_variables,
|
||||
absl::Span<const FapConstraint> hard_constraints,
|
||||
const absl::btree_map<int, FapVariable>& soft_variables,
|
||||
absl::Span<const FapConstraint> soft_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
absl::Span<const int> key_from_index) {
|
||||
LOG(INFO) << "Printing...";
|
||||
LOG(INFO) << "Number of Solutions: " << collector->solution_count();
|
||||
for (int solution_index = 0; solution_index < collector->solution_count();
|
||||
++solution_index) {
|
||||
Assignment* const solution = collector->solution(solution_index);
|
||||
std::vector<int> results(variables.size());
|
||||
LOG(INFO) << "------------------------------------------------------------";
|
||||
LOG(INFO) << "Solution";
|
||||
for (int i = 0; i < variables.size(); ++i) {
|
||||
results[i] = solution->Value(variables[i]);
|
||||
LOG(INFO) << " Variable " << key_from_index[i] << ": " << results[i];
|
||||
}
|
||||
if (CheckConstraintSatisfaction(hard_constraints, results,
|
||||
index_from_key)) {
|
||||
LOG(INFO) << "All hard constraints satisfied.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
if (CheckVariablePosition(hard_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "All hard variables stayed unharmed.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
|
||||
if (CheckConstraintSatisfaction(soft_constraints, results,
|
||||
index_from_key) &&
|
||||
CheckVariablePosition(soft_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "Problem feasible: "
|
||||
"Soft constraints and soft variables satisfied.";
|
||||
LOG(INFO) << " Weighted Sum: " << solution->Value(total_cost);
|
||||
} else {
|
||||
LOG(INFO) << "Problem unfeasible. Optimized weighted sum of violations.";
|
||||
LOG(INFO) << " Weighted Sum: " << solution->Value(total_cost);
|
||||
}
|
||||
|
||||
LOG(INFO) << "Values used: " << NumberOfAssignedValues(results);
|
||||
LOG(INFO) << "Maximum value used: "
|
||||
<< *std::max_element(results.begin(), results.end());
|
||||
LOG(INFO) << " Failures: " << collector->failures(solution_index);
|
||||
}
|
||||
LOG(INFO) << " ============================================================";
|
||||
}
|
||||
|
||||
} // namespace operations_research
|
||||
@@ -19,14 +19,11 @@
|
||||
#define OR_TOOLS_EXAMPLES_FAP_UTILITIES_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/btree_map.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
#include "absl/types/span.h"
|
||||
#include "examples/cpp/fap_parser.h"
|
||||
#include "ortools/base/logging.h"
|
||||
#include "ortools/base/map_util.h"
|
||||
#include "ortools/constraint_solver/constraint_solver.h"
|
||||
|
||||
namespace operations_research {
|
||||
@@ -34,19 +31,19 @@ namespace operations_research {
|
||||
// Checks if the solution given from the Solver satisfies all
|
||||
// the hard binary constraints specified in the ctr.txt.
|
||||
bool CheckConstraintSatisfaction(
|
||||
const std::vector<FapConstraint>& data_constraints,
|
||||
const std::vector<int>& variables,
|
||||
absl::Span<const FapConstraint> data_constraints,
|
||||
absl::Span<const int> variables,
|
||||
const absl::btree_map<int, int>& index_from_key);
|
||||
|
||||
// Checks if the solution given from the Solver has not modified the values of
|
||||
// the variables that were initially assigned and denoted as hard in var.txt.
|
||||
bool CheckVariablePosition(
|
||||
const absl::btree_map<int, FapVariable>& data_variables,
|
||||
const std::vector<int>& variables,
|
||||
absl::Span<const int> variables,
|
||||
const absl::btree_map<int, int>& index_from_key);
|
||||
|
||||
// Counts the number of different values in the variable vector.
|
||||
int NumberOfAssignedValues(const std::vector<int>& variables);
|
||||
int NumberOfAssignedValues(absl::Span<const int> variables);
|
||||
|
||||
// Prints the duration of the solving process.
|
||||
void PrintElapsedTime(int64_t time1, int64_t time2);
|
||||
@@ -56,173 +53,19 @@ void PrintResultsHard(SolutionCollector* collector,
|
||||
const std::vector<IntVar*>& variables,
|
||||
IntVar* objective_var,
|
||||
const absl::btree_map<int, FapVariable>& data_variables,
|
||||
const std::vector<FapConstraint>& data_constraints,
|
||||
absl::Span<const FapConstraint> data_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
const std::vector<int>& key_from_index);
|
||||
absl::Span<const int> key_from_index);
|
||||
|
||||
// Prints the solution found by the Soft Solver for unfeasible instances.
|
||||
void PrintResultsSoft(SolutionCollector* collector,
|
||||
const std::vector<IntVar*>& variables, IntVar* total_cost,
|
||||
const absl::btree_map<int, FapVariable>& hard_variables,
|
||||
const std::vector<FapConstraint>& hard_constraints,
|
||||
absl::Span<const FapConstraint> hard_constraints,
|
||||
const absl::btree_map<int, FapVariable>& soft_variables,
|
||||
const std::vector<FapConstraint>& soft_constraints,
|
||||
absl::Span<const FapConstraint> soft_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
const std::vector<int>& key_from_index);
|
||||
|
||||
bool CheckConstraintSatisfaction(
|
||||
const std::vector<FapConstraint>& data_constraints,
|
||||
const std::vector<int>& variables,
|
||||
const absl::btree_map<int, int>& index_from_key) {
|
||||
bool status = true;
|
||||
for (const FapConstraint& ct : data_constraints) {
|
||||
const int index1 = gtl::FindOrDie(index_from_key, ct.variable1);
|
||||
const int index2 = gtl::FindOrDie(index_from_key, ct.variable2);
|
||||
CHECK_LT(index1, variables.size());
|
||||
CHECK_LT(index2, variables.size());
|
||||
const int var1 = variables[index1];
|
||||
const int var2 = variables[index2];
|
||||
const int absolute_difference = abs(var1 - var2);
|
||||
|
||||
if ((ct.operation == ">") && (absolute_difference <= ct.value)) {
|
||||
LOG(INFO) << " Violation of contraint between variable " << ct.variable1
|
||||
<< " and variable " << ct.variable2 << ".";
|
||||
LOG(INFO) << " Expected |" << var1 << " - " << var2
|
||||
<< "| (= " << absolute_difference << ") > " << ct.value << ".";
|
||||
status = false;
|
||||
} else if ((ct.operation == "=") && (absolute_difference != ct.value)) {
|
||||
LOG(INFO) << " Violation of contraint between variable " << ct.variable1
|
||||
<< " and variable " << ct.variable2 << ".";
|
||||
LOG(INFO) << " Expected |" << var1 << " - " << var2
|
||||
<< "| (= " << absolute_difference << ") = " << ct.value << ".";
|
||||
status = false;
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
bool CheckVariablePosition(const absl::btree_map<int, FapVariable>& data_variables,
|
||||
const std::vector<int>& variables,
|
||||
const absl::btree_map<int, int>& index_from_key) {
|
||||
bool status = true;
|
||||
for (const auto& it : data_variables) {
|
||||
const int index = gtl::FindOrDie(index_from_key, it.first);
|
||||
CHECK_LT(index, variables.size());
|
||||
const int var = variables[index];
|
||||
|
||||
if (it.second.hard && (it.second.initial_position != -1) &&
|
||||
(var != it.second.initial_position)) {
|
||||
LOG(INFO) << " Change of position of hard variable " << it.first << ".";
|
||||
LOG(INFO) << " Expected " << it.second.initial_position
|
||||
<< " instead of given " << var << ".";
|
||||
status = false;
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
int NumberOfAssignedValues(const std::vector<int>& variables) {
|
||||
std::set<int> assigned(variables.begin(), variables.end());
|
||||
return static_cast<int>(assigned.size());
|
||||
}
|
||||
|
||||
void PrintElapsedTime(const int64_t time1, const int64_t time2) {
|
||||
LOG(INFO) << "End of solving process.";
|
||||
LOG(INFO) << "The Solve method took " << (time2 - time1) / 1000.0
|
||||
<< " seconds.";
|
||||
}
|
||||
|
||||
void PrintResultsHard(SolutionCollector* const collector,
|
||||
const std::vector<IntVar*>& variables,
|
||||
IntVar* const objective_var,
|
||||
const absl::btree_map<int, FapVariable>& data_variables,
|
||||
const std::vector<FapConstraint>& data_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
const std::vector<int>& key_from_index) {
|
||||
LOG(INFO) << "Printing...";
|
||||
LOG(INFO) << "Number of Solutions: " << collector->solution_count();
|
||||
for (int solution_index = 0; solution_index < collector->solution_count();
|
||||
++solution_index) {
|
||||
Assignment* const solution = collector->solution(solution_index);
|
||||
std::vector<int> results(variables.size());
|
||||
LOG(INFO) << "------------------------------------------------------------";
|
||||
LOG(INFO) << "Solution " << solution_index + 1;
|
||||
LOG(INFO) << "Cost: " << solution->Value(objective_var);
|
||||
for (int i = 0; i < variables.size(); ++i) {
|
||||
results[i] = solution->Value(variables[i]);
|
||||
LOG(INFO) << " Variable " << key_from_index[i] << ": " << results[i];
|
||||
}
|
||||
if (CheckConstraintSatisfaction(data_constraints, results,
|
||||
index_from_key)) {
|
||||
LOG(INFO) << "All hard constraints satisfied.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
if (CheckVariablePosition(data_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "All hard variables stayed unharmed.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard variable modification detected.";
|
||||
}
|
||||
|
||||
LOG(INFO) << "Values used: " << NumberOfAssignedValues(results);
|
||||
LOG(INFO) << "Maximum value used: "
|
||||
<< *std::max_element(results.begin(), results.end());
|
||||
LOG(INFO) << " Failures: " << collector->failures(solution_index);
|
||||
}
|
||||
LOG(INFO) << " ============================================================";
|
||||
}
|
||||
|
||||
void PrintResultsSoft(SolutionCollector* const collector,
|
||||
const std::vector<IntVar*>& variables,
|
||||
IntVar* const total_cost,
|
||||
const absl::btree_map<int, FapVariable>& hard_variables,
|
||||
const std::vector<FapConstraint>& hard_constraints,
|
||||
const absl::btree_map<int, FapVariable>& soft_variables,
|
||||
const std::vector<FapConstraint>& soft_constraints,
|
||||
const absl::btree_map<int, int>& index_from_key,
|
||||
const std::vector<int>& key_from_index) {
|
||||
LOG(INFO) << "Printing...";
|
||||
LOG(INFO) << "Number of Solutions: " << collector->solution_count();
|
||||
for (int solution_index = 0; solution_index < collector->solution_count();
|
||||
++solution_index) {
|
||||
Assignment* const solution = collector->solution(solution_index);
|
||||
std::vector<int> results(variables.size());
|
||||
LOG(INFO) << "------------------------------------------------------------";
|
||||
LOG(INFO) << "Solution";
|
||||
for (int i = 0; i < variables.size(); ++i) {
|
||||
results[i] = solution->Value(variables[i]);
|
||||
LOG(INFO) << " Variable " << key_from_index[i] << ": " << results[i];
|
||||
}
|
||||
if (CheckConstraintSatisfaction(hard_constraints, results,
|
||||
index_from_key)) {
|
||||
LOG(INFO) << "All hard constraints satisfied.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
if (CheckVariablePosition(hard_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "All hard variables stayed unharmed.";
|
||||
} else {
|
||||
LOG(INFO) << "Warning! Hard constraint violation detected.";
|
||||
}
|
||||
|
||||
if (CheckConstraintSatisfaction(soft_constraints, results,
|
||||
index_from_key) &&
|
||||
CheckVariablePosition(soft_variables, results, index_from_key)) {
|
||||
LOG(INFO) << "Problem feasible: "
|
||||
"Soft constraints and soft variables satisfied.";
|
||||
LOG(INFO) << " Weighted Sum: " << solution->Value(total_cost);
|
||||
} else {
|
||||
LOG(INFO) << "Problem unfeasible. Optimized weighted sum of violations.";
|
||||
LOG(INFO) << " Weighted Sum: " << solution->Value(total_cost);
|
||||
}
|
||||
|
||||
LOG(INFO) << "Values used: " << NumberOfAssignedValues(results);
|
||||
LOG(INFO) << "Maximum value used: "
|
||||
<< *std::max_element(results.begin(), results.end());
|
||||
LOG(INFO) << " Failures: " << collector->failures(solution_index);
|
||||
}
|
||||
LOG(INFO) << " ============================================================";
|
||||
}
|
||||
absl::Span<const int> key_from_index);
|
||||
|
||||
} // namespace operations_research
|
||||
#endif // OR_TOOLS_EXAMPLES_FAP_UTILITIES_H_
|
||||
|
||||
@@ -26,7 +26,7 @@ using Graph = ::util::ReverseArcListGraph<>;
|
||||
using NodeIndex = Graph::NodeIndex;
|
||||
using ArcIndex = Graph::ArcIndex;
|
||||
using MaxFlowT = GenericMaxFlow<Graph>;
|
||||
using FlowQuantity = MaxFlow::FlowQuantityT;
|
||||
using FlowQuantity = MaxFlowT::FlowQuantityT;
|
||||
|
||||
void SolveMaxFlow() {
|
||||
const int num_nodes = 5;
|
||||
|
||||
@@ -112,7 +112,11 @@ int main(int argc, char* argv[]) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
ReadFileToProto(file_name, &model_proto);
|
||||
const absl::Status status = ReadFileToProto(file_name, &model_proto);
|
||||
if (!status.ok()) {
|
||||
LOG(INFO) << status;
|
||||
continue;
|
||||
}
|
||||
MPModelProtoToLinearProgram(model_proto, &linear_program);
|
||||
}
|
||||
if (absl::GetFlag(FLAGS_mps_dump_problem)) {
|
||||
|
||||
@@ -499,13 +499,13 @@ class NetworkRoutingSolver {
|
||||
const std::vector<PathDistance> distances(2 * count_arcs(), 1);
|
||||
|
||||
for (const Demand& demand : demands_array_) {
|
||||
PathContainer paths;
|
||||
PathContainer::BuildInMemoryCompactPathContainer(&paths);
|
||||
auto paths =
|
||||
GenericPathContainer<Graph>::BuildInMemoryCompactPathContainer();
|
||||
|
||||
ComputeOneToManyShortestPaths(graph_, distances, demand.source,
|
||||
{demand.destination}, &paths);
|
||||
|
||||
std::vector<int> path;
|
||||
std::vector<Graph::NodeIndex> path;
|
||||
paths.GetPath(demand.source, demand.destination, &path);
|
||||
CHECK_GE(path.size(), 1);
|
||||
all_min_path_lengths_.push_back(path.size() - 1);
|
||||
@@ -656,13 +656,14 @@ class NetworkRoutingSolver {
|
||||
}
|
||||
|
||||
private:
|
||||
using Graph = ::util::ListGraph<int, int>;
|
||||
int num_nodes() const { return graph_.num_nodes(); }
|
||||
int count_arcs() const { return arcs_data_.size() / 2; }
|
||||
|
||||
std::vector<std::vector<int64_t>> arcs_data_;
|
||||
std::vector<int> arc_capacity_;
|
||||
std::vector<Demand> demands_array_;
|
||||
util::ListGraph<int, int> graph_;
|
||||
Graph graph_;
|
||||
std::vector<int64_t> all_min_path_lengths_;
|
||||
std::vector<std::vector<int>> capacity_;
|
||||
std::vector<std::vector<OnePath>> all_paths_;
|
||||
|
||||
19
examples/cpp/parse_dimacs_assignment.cc
Normal file
19
examples/cpp/parse_dimacs_assignment.cc
Normal file
@@ -0,0 +1,19 @@
|
||||
// Copyright 2010-2024 Google LLC
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "examples/cpp/parse_dimacs_assignment.h"
|
||||
|
||||
#include "absl/flags/flag.h"
|
||||
|
||||
ABSL_FLAG(bool, assignment_maximize_cost, false,
|
||||
"Negate costs so a max-cost assignment is found.");
|
||||
@@ -33,17 +33,16 @@
|
||||
#include "ortools/graph/linear_assignment.h"
|
||||
#include "ortools/util/filelineiter.h"
|
||||
|
||||
ABSL_FLAG(bool, assignment_maximize_cost, false,
|
||||
"Negate costs so a max-cost assignment is found.");
|
||||
ABSL_DECLARE_FLAG(bool, assignment_maximize_cost);
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
template <typename GraphType>
|
||||
class DimacsAssignmentParser {
|
||||
public:
|
||||
using NodeIndex = GraphType::NodeIndex;
|
||||
using ArcIndex = GraphType::ArcIndex;
|
||||
using CostValue = LinearSumAssignment<GraphType>::CostValueT;
|
||||
using NodeIndex = typename GraphType::NodeIndex;
|
||||
using ArcIndex = typename GraphType::ArcIndex;
|
||||
using CostValue = typename LinearSumAssignment<GraphType>::CostValueT;
|
||||
|
||||
explicit DimacsAssignmentParser(absl::string_view filename)
|
||||
: filename_(filename), graph_(nullptr), assignment_(nullptr) {}
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
#include "ortools/constraint_solver/routing_utils.h"
|
||||
#include "ortools/constraint_solver/solver_parameters.pb.h"
|
||||
#include "ortools/graph/connected_components.h"
|
||||
#include "ortools/graph/ebert_graph.h"
|
||||
#include "ortools/graph/graph.h"
|
||||
#include "ortools/graph/linear_assignment.h"
|
||||
#include "ortools/util/bitset.h"
|
||||
#include "ortools/util/optional_boolean.pb.h"
|
||||
@@ -88,6 +88,13 @@
|
||||
#include "ortools/util/sorted_interval_list.h"
|
||||
#include "ortools/util/stats.h"
|
||||
|
||||
namespace {
|
||||
using GraphNodeIndex = int32_t;
|
||||
using GraphArcIndex = int32_t;
|
||||
using Graph = ::util::ListGraph<GraphNodeIndex, GraphArcIndex>;
|
||||
using CostValue = int64_t;
|
||||
} // namespace
|
||||
|
||||
namespace operations_research {
|
||||
class Cross;
|
||||
class Exchange;
|
||||
@@ -3572,8 +3579,8 @@ int64_t RoutingModel::ComputeLowerBound() {
|
||||
return 0;
|
||||
}
|
||||
const int num_nodes = Size() + vehicles_;
|
||||
ForwardStarGraph graph(2 * num_nodes, num_nodes * num_nodes);
|
||||
LinearSumAssignment<ForwardStarGraph> linear_sum_assignment(graph, num_nodes);
|
||||
Graph graph(2 * num_nodes, num_nodes * num_nodes);
|
||||
LinearSumAssignment<Graph> linear_sum_assignment(graph, num_nodes);
|
||||
// Adding arcs for non-end nodes, based on possible values of next variables.
|
||||
// Left nodes in the bipartite are indexed from 0 to num_nodes - 1; right
|
||||
// nodes are indexed from num_nodes to 2 * num_nodes - 1.
|
||||
@@ -3589,8 +3596,8 @@ int64_t RoutingModel::ComputeLowerBound() {
|
||||
}
|
||||
// The index of a right node in the bipartite graph is the index
|
||||
// of the successor offset by the number of nodes.
|
||||
const ArcIndex arc = graph.AddArc(tail, num_nodes + head);
|
||||
const CostValue cost = GetHomogeneousCost(tail, head);
|
||||
const GraphArcIndex arc = graph.AddArc(tail, num_nodes + head);
|
||||
const ::CostValue cost = GetHomogeneousCost(tail, head);
|
||||
linear_sum_assignment.SetArcCost(arc, cost);
|
||||
}
|
||||
}
|
||||
@@ -3598,7 +3605,8 @@ int64_t RoutingModel::ComputeLowerBound() {
|
||||
// Therefore we are creating fake assignments for end nodes, forced to point
|
||||
// to the equivalent start node with a cost of 0.
|
||||
for (int tail = Size(); tail < num_nodes; ++tail) {
|
||||
const ArcIndex arc = graph.AddArc(tail, num_nodes + Start(tail - Size()));
|
||||
const GraphArcIndex arc =
|
||||
graph.AddArc(tail, num_nodes + Start(tail - Size()));
|
||||
linear_sum_assignment.SetArcCost(arc, 0);
|
||||
}
|
||||
if (linear_sum_assignment.ComputeAssignment()) {
|
||||
|
||||
@@ -378,20 +378,17 @@ cc_test(
|
||||
|
||||
cc_library(
|
||||
name = "shortest_paths",
|
||||
srcs = ["shortest_paths.cc"],
|
||||
hdrs = ["shortest_paths.h"],
|
||||
deps = [
|
||||
":ebert_graph",
|
||||
":graph",
|
||||
"//ortools/base",
|
||||
"//ortools/base:adjustable_priority_queue",
|
||||
"//ortools/base:map_util",
|
||||
"//ortools/base:stl_util",
|
||||
"//ortools/base:threadpool",
|
||||
"//ortools/base:timer",
|
||||
"@com_google_absl//absl/base:core_headers",
|
||||
"@com_google_absl//absl/container:flat_hash_map",
|
||||
"@com_google_absl//absl/functional:bind_front",
|
||||
"@com_google_absl//absl/log",
|
||||
"@com_google_absl//absl/log:check",
|
||||
"@com_google_absl//absl/types:span",
|
||||
],
|
||||
@@ -404,10 +401,10 @@ cc_test(
|
||||
tags = ["noasan"], # Times out occasionally in ASAN mode.
|
||||
deps = [
|
||||
":ebert_graph",
|
||||
":graph",
|
||||
":shortest_paths",
|
||||
":strongly_connected_components",
|
||||
"//ortools/base:gmock_main",
|
||||
"//ortools/util:zvector",
|
||||
"@com_google_absl//absl/base:core_headers",
|
||||
"@com_google_absl//absl/log:check",
|
||||
"@com_google_absl//absl/random",
|
||||
@@ -537,11 +534,11 @@ cc_library(
|
||||
":generic_max_flow",
|
||||
":graph",
|
||||
":graphs",
|
||||
":max_flow",
|
||||
"//ortools/base:mathutil",
|
||||
"//ortools/util:saturated_arithmetic",
|
||||
"//ortools/util:stats",
|
||||
"//ortools/util:zvector",
|
||||
"@com_google_absl//absl/base:core_headers",
|
||||
"@com_google_absl//absl/flags:flag",
|
||||
"@com_google_absl//absl/log",
|
||||
"@com_google_absl//absl/log:check",
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
#include "ortools/graph/assignment.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
|
||||
#include "ortools/graph/graph.h"
|
||||
@@ -22,15 +21,10 @@
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
using ArcIndex = int32_t;
|
||||
using NodeIndex = int32_t;
|
||||
using CostValue = int64_t;
|
||||
|
||||
SimpleLinearSumAssignment::SimpleLinearSumAssignment() : num_nodes_(0) {}
|
||||
|
||||
ArcIndex SimpleLinearSumAssignment::AddArcWithCost(NodeIndex left_node,
|
||||
NodeIndex right_node,
|
||||
CostValue cost) {
|
||||
SimpleLinearSumAssignment::ArcIndex SimpleLinearSumAssignment::AddArcWithCost(
|
||||
NodeIndex left_node, NodeIndex right_node, CostValue cost) {
|
||||
const ArcIndex num_arcs = arc_cost_.size();
|
||||
num_nodes_ = std::max(num_nodes_, left_node + 1);
|
||||
num_nodes_ = std::max(num_nodes_, right_node + 1);
|
||||
@@ -40,19 +34,27 @@ ArcIndex SimpleLinearSumAssignment::AddArcWithCost(NodeIndex left_node,
|
||||
return num_arcs;
|
||||
}
|
||||
|
||||
NodeIndex SimpleLinearSumAssignment::NumNodes() const { return num_nodes_; }
|
||||
SimpleLinearSumAssignment::NodeIndex SimpleLinearSumAssignment::NumNodes()
|
||||
const {
|
||||
return num_nodes_;
|
||||
}
|
||||
|
||||
ArcIndex SimpleLinearSumAssignment::NumArcs() const { return arc_cost_.size(); }
|
||||
SimpleLinearSumAssignment::ArcIndex SimpleLinearSumAssignment::NumArcs() const {
|
||||
return arc_cost_.size();
|
||||
}
|
||||
|
||||
NodeIndex SimpleLinearSumAssignment::LeftNode(ArcIndex arc) const {
|
||||
SimpleLinearSumAssignment::NodeIndex SimpleLinearSumAssignment::LeftNode(
|
||||
ArcIndex arc) const {
|
||||
return arc_tail_[arc];
|
||||
}
|
||||
|
||||
NodeIndex SimpleLinearSumAssignment::RightNode(ArcIndex arc) const {
|
||||
SimpleLinearSumAssignment::NodeIndex SimpleLinearSumAssignment::RightNode(
|
||||
ArcIndex arc) const {
|
||||
return arc_head_[arc];
|
||||
}
|
||||
|
||||
CostValue SimpleLinearSumAssignment::Cost(ArcIndex arc) const {
|
||||
SimpleLinearSumAssignment::CostValue SimpleLinearSumAssignment::Cost(
|
||||
ArcIndex arc) const {
|
||||
return arc_cost_[arc];
|
||||
}
|
||||
|
||||
|
||||
@@ -32,10 +32,7 @@
|
||||
// those commonalities are mostly factored out into base classes as
|
||||
// described below. Despite the commonalities, however, each of the
|
||||
// three representations presents a somewhat different interface
|
||||
// because of their different underlying semantics. A quintessential
|
||||
// example is that the AddArc() method, very natural for the
|
||||
// EbertGraph representation, cannot exist for an inherently static
|
||||
// representation like ForwardStaticGraph.
|
||||
// because of their different underlying semantics.
|
||||
//
|
||||
// Many clients are expected to use the interfaces to the graph
|
||||
// objects directly, but some clients are parameterized by graph type
|
||||
@@ -48,8 +45,7 @@
|
||||
// TailArrayManager<> template, which provides a uniform interface for
|
||||
// applications that need to map from arc indices to arc tail nodes,
|
||||
// accounting for the fact that such a mapping has to be requested
|
||||
// explicitly from the ForwardStaticGraph and ForwardStarGraph
|
||||
// representations.
|
||||
// explicitly from the ForwardStarGraph representation.
|
||||
//
|
||||
// There are two base class templates, StarGraphBase, and
|
||||
// EbertGraphBase; their purpose is to hold methods and data
|
||||
@@ -60,11 +56,11 @@
|
||||
// not normally be instantiated by clients:
|
||||
//
|
||||
// (StarGraphBase) |
|
||||
// / \ |
|
||||
// / \ |
|
||||
// / \ |
|
||||
// / \ |
|
||||
// (EbertGraphBase) ForwardStaticGraph |
|
||||
// / |
|
||||
// / |
|
||||
// / |
|
||||
// / |
|
||||
// (EbertGraphBase) |
|
||||
// / \ |
|
||||
// / \ |
|
||||
// EbertGraph ForwardEbertGraph |
|
||||
@@ -151,21 +147,6 @@
|
||||
// + n * sizeof(ArcIndexType)
|
||||
// plus a small constant when the array of arc tails is absent. Allocating
|
||||
// the arc tail array adds another m * sizeof(NodeIndexType).
|
||||
//
|
||||
// The ForwardStaticGraph representation is restricted yet farther
|
||||
// than ForwardEbertGraph, with the benefit that it provides higher
|
||||
// performance to those applications that can use it.
|
||||
// * As with ForwardEbertGraph, the presence of the array of arc
|
||||
// tails is optional.
|
||||
// * The outgoing adjacency list for each node is stored in a
|
||||
// contiguous segment of the head_[] array, obviating the
|
||||
// next_adjacent_arc_ structure entirely and ensuring good locality
|
||||
// of reference for applications that iterate over outgoing
|
||||
// adjacency lists.
|
||||
// * The memory consumption is: m * sizeof(NodeIndexType)
|
||||
// + n * sizeof(ArcIndexType)
|
||||
// plus a small constant when the array of arc tails is absent. Allocating
|
||||
// the arc tail array adds another m * sizeof(NodeIndexType).
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstddef>
|
||||
@@ -191,8 +172,6 @@ template <typename NodeIndexType, typename ArcIndexType>
|
||||
class EbertGraph;
|
||||
template <typename NodeIndexType, typename ArcIndexType>
|
||||
class ForwardEbertGraph;
|
||||
template <typename NodeIndexType, typename ArcIndexType>
|
||||
class ForwardStaticGraph;
|
||||
|
||||
// Standard instantiation of ForwardEbertGraph (named 'ForwardStarGraph') of
|
||||
// EbertGraph (named 'StarGraph'); and relevant type shortcuts. Unless their use
|
||||
@@ -206,10 +185,6 @@ typedef int64_t FlowQuantity;
|
||||
typedef int64_t CostValue;
|
||||
typedef EbertGraph<NodeIndex, ArcIndex> StarGraph;
|
||||
typedef ForwardEbertGraph<NodeIndex, ArcIndex> ForwardStarGraph;
|
||||
typedef ZVector<NodeIndex> NodeIndexArray;
|
||||
typedef ZVector<ArcIndex> ArcIndexArray;
|
||||
typedef ZVector<FlowQuantity> QuantityArray;
|
||||
typedef ZVector<CostValue> CostArray;
|
||||
|
||||
// Adapt our old iteration style to support range-based for loops. Add typedefs
|
||||
// required by std::iterator_traits.
|
||||
@@ -536,387 +511,6 @@ class StarGraphBase {
|
||||
}
|
||||
};
|
||||
|
||||
template <typename NodeIndexType, typename ArcIndexType>
|
||||
class PermutationIndexComparisonByArcHead {
|
||||
public:
|
||||
explicit PermutationIndexComparisonByArcHead(
|
||||
const ZVector<NodeIndexType>& head)
|
||||
: head_(head) {}
|
||||
|
||||
bool operator()(ArcIndexType a, ArcIndexType b) const {
|
||||
return head_[a] < head_[b];
|
||||
}
|
||||
|
||||
private:
|
||||
const ZVector<NodeIndexType>& head_;
|
||||
};
|
||||
|
||||
template <typename NodeIndexType, typename ArcIndexType>
|
||||
class ABSL_DEPRECATED("Use `::util::StaticGraph<>` instead.") ForwardStaticGraph
|
||||
: public StarGraphBase<NodeIndexType, ArcIndexType,
|
||||
ForwardStaticGraph<NodeIndexType, ArcIndexType> > {
|
||||
typedef StarGraphBase<NodeIndexType, ArcIndexType,
|
||||
ForwardStaticGraph<NodeIndexType, ArcIndexType> >
|
||||
Base;
|
||||
friend class StarGraphBase<NodeIndexType, ArcIndexType,
|
||||
ForwardStaticGraph<NodeIndexType, ArcIndexType> >;
|
||||
|
||||
using Base::ArcDebugString;
|
||||
using Base::NodeDebugString;
|
||||
|
||||
using Base::first_incident_arc_;
|
||||
using Base::head_;
|
||||
using Base::max_num_arcs_;
|
||||
using Base::max_num_nodes_;
|
||||
using Base::num_arcs_;
|
||||
using Base::num_nodes_;
|
||||
|
||||
public:
|
||||
#if !defined(SWIG)
|
||||
using Base::end_arc_index;
|
||||
using Base::Head;
|
||||
using Base::IsNodeValid;
|
||||
|
||||
using Base::kFirstArc;
|
||||
using Base::kFirstNode;
|
||||
using Base::kNilArc;
|
||||
#endif // SWIG
|
||||
|
||||
typedef NodeIndexType NodeIndex;
|
||||
typedef ArcIndexType ArcIndex;
|
||||
|
||||
// TODO(user): Configure SWIG to handle the
|
||||
// CycleHandlerForAnnotatedArcs class.
|
||||
#if !defined(SWIG)
|
||||
class CycleHandlerForAnnotatedArcs
|
||||
: public ArrayIndexCycleHandler<NodeIndexType, ArcIndexType> {
|
||||
typedef ArrayIndexCycleHandler<NodeIndexType, ArcIndexType> Base;
|
||||
|
||||
public:
|
||||
CycleHandlerForAnnotatedArcs(
|
||||
PermutationCycleHandler<ArcIndexType>* annotation_handler,
|
||||
NodeIndexType* data)
|
||||
: ArrayIndexCycleHandler<NodeIndexType, ArcIndexType>(&data[kFirstArc]),
|
||||
annotation_handler_(annotation_handler) {}
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
CycleHandlerForAnnotatedArcs(const CycleHandlerForAnnotatedArcs&) = delete;
|
||||
CycleHandlerForAnnotatedArcs& operator=(
|
||||
const CycleHandlerForAnnotatedArcs&) = delete;
|
||||
|
||||
void SetTempFromIndex(ArcIndexType source) override {
|
||||
Base::SetTempFromIndex(source);
|
||||
annotation_handler_->SetTempFromIndex(source);
|
||||
}
|
||||
|
||||
void SetIndexFromIndex(ArcIndexType source,
|
||||
ArcIndexType destination) const override {
|
||||
Base::SetIndexFromIndex(source, destination);
|
||||
annotation_handler_->SetIndexFromIndex(source, destination);
|
||||
}
|
||||
|
||||
void SetIndexFromTemp(ArcIndexType destination) const override {
|
||||
Base::SetIndexFromTemp(destination);
|
||||
annotation_handler_->SetIndexFromTemp(destination);
|
||||
}
|
||||
|
||||
private:
|
||||
PermutationCycleHandler<ArcIndexType>* annotation_handler_;
|
||||
};
|
||||
#endif // SWIG
|
||||
|
||||
// Constructor for use by GraphBuilderFromArcs instances and direct
|
||||
// clients that want to materialize a graph in one step.
|
||||
// Materializing all at once is the only choice available with a
|
||||
// static graph.
|
||||
//
|
||||
// Args:
|
||||
// sort_arcs_by_head: determines whether arcs incident to each tail
|
||||
// node are sorted by head node.
|
||||
// client_cycle_handler: if non-NULL, mediates the permutation of
|
||||
// arbitrary annotation data belonging to the client according
|
||||
// to the permutation applied to the arcs in forming the
|
||||
// graph. Two permutations may be composed to form the final one
|
||||
// that affects the arcs. First, the arcs are always permuted to
|
||||
// group them by tail node because ForwardStaticGraph requires
|
||||
// this. Second, if each node's outgoing arcs are sorted by head
|
||||
// node (according to sort_arcs_by_head), that sorting implies
|
||||
// an additional permutation on the arcs.
|
||||
ForwardStaticGraph(
|
||||
const NodeIndexType num_nodes, const ArcIndexType num_arcs,
|
||||
const bool sort_arcs_by_head,
|
||||
std::vector<std::pair<NodeIndexType, NodeIndexType> >* client_input_arcs,
|
||||
// TODO(user): For some reason, SWIG breaks if the
|
||||
// operations_research namespace is not explicit in the
|
||||
// following argument declaration.
|
||||
operations_research::PermutationCycleHandler<ArcIndexType>* const
|
||||
client_cycle_handler) {
|
||||
max_num_arcs_ = num_arcs;
|
||||
num_arcs_ = num_arcs;
|
||||
max_num_nodes_ = num_nodes;
|
||||
// A more convenient name for a parameter required by style to be
|
||||
// a pointer, because we modify its referent.
|
||||
std::vector<std::pair<NodeIndexType, NodeIndexType> >& input_arcs =
|
||||
*client_input_arcs;
|
||||
|
||||
// We coopt the first_incident_arc_ array as a node-indexed vector
|
||||
// used for two purposes related to degree before setting up its
|
||||
// final values. First, it counts the out-degree of each
|
||||
// node. Second, it is reused to count the number of arcs outgoing
|
||||
// from each node that have already been put in place from the
|
||||
// given input_arcs. We reserve an extra entry as a sentinel at
|
||||
// the end.
|
||||
first_incident_arc_.Reserve(kFirstNode, kFirstNode + num_nodes);
|
||||
first_incident_arc_.SetAll(0);
|
||||
for (ArcIndexType arc = kFirstArc; arc < kFirstArc + num_arcs; ++arc) {
|
||||
first_incident_arc_[kFirstNode + input_arcs[arc].first] += 1;
|
||||
// Take this opportunity to see how many nodes are really
|
||||
// mentioned in the arc list.
|
||||
num_nodes_ = std::max(
|
||||
num_nodes_, static_cast<NodeIndexType>(input_arcs[arc].first + 1));
|
||||
num_nodes_ = std::max(
|
||||
num_nodes_, static_cast<NodeIndexType>(input_arcs[arc].second + 1));
|
||||
}
|
||||
ArcIndexType next_arc = kFirstArc;
|
||||
for (NodeIndexType node = 0; node < num_nodes; ++node) {
|
||||
ArcIndexType degree = first_incident_arc_[kFirstNode + node];
|
||||
first_incident_arc_[kFirstNode + node] = next_arc;
|
||||
next_arc += degree;
|
||||
}
|
||||
DCHECK_EQ(num_arcs, next_arc);
|
||||
head_.Reserve(kFirstArc, kFirstArc + num_arcs - 1);
|
||||
std::unique_ptr<ArcIndexType[]> arc_permutation;
|
||||
if (client_cycle_handler != nullptr) {
|
||||
arc_permutation.reset(new ArcIndexType[end_arc_index()]);
|
||||
for (ArcIndexType input_arc = 0; input_arc < num_arcs; ++input_arc) {
|
||||
NodeIndexType tail = input_arcs[input_arc].first;
|
||||
NodeIndexType head = input_arcs[input_arc].second;
|
||||
ArcIndexType arc = first_incident_arc_[kFirstNode + tail];
|
||||
// The head_ entry will get permuted into the right place
|
||||
// later.
|
||||
head_[kFirstArc + input_arc] = kFirstNode + head;
|
||||
arc_permutation[kFirstArc + arc] = input_arc;
|
||||
first_incident_arc_[kFirstNode + tail] += 1;
|
||||
}
|
||||
} else {
|
||||
if (sizeof(input_arcs[0].first) >= sizeof(first_incident_arc_[0])) {
|
||||
// We reuse the input_arcs[].first entries to hold our
|
||||
// mapping to the head_ array. This allows us to spread out
|
||||
// cache badness.
|
||||
for (ArcIndexType input_arc = 0; input_arc < num_arcs; ++input_arc) {
|
||||
NodeIndexType tail = input_arcs[input_arc].first;
|
||||
ArcIndexType arc = first_incident_arc_[kFirstNode + tail];
|
||||
first_incident_arc_[kFirstNode + tail] = arc + 1;
|
||||
input_arcs[input_arc].first = static_cast<NodeIndexType>(arc);
|
||||
}
|
||||
for (ArcIndexType input_arc = 0; input_arc < num_arcs; ++input_arc) {
|
||||
ArcIndexType arc =
|
||||
static_cast<ArcIndexType>(input_arcs[input_arc].first);
|
||||
NodeIndexType head = input_arcs[input_arc].second;
|
||||
head_[kFirstArc + arc] = kFirstNode + head;
|
||||
}
|
||||
} else {
|
||||
// We cannot reuse the input_arcs[].first entries so we map to
|
||||
// the head_ array in a single loop.
|
||||
for (ArcIndexType input_arc = 0; input_arc < num_arcs; ++input_arc) {
|
||||
NodeIndexType tail = input_arcs[input_arc].first;
|
||||
NodeIndexType head = input_arcs[input_arc].second;
|
||||
ArcIndexType arc = first_incident_arc_[kFirstNode + tail];
|
||||
first_incident_arc_[kFirstNode + tail] = arc + 1;
|
||||
head_[kFirstArc + arc] = kFirstNode + head;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Shift the entries in first_incident_arc_ to compensate for the
|
||||
// counting each one has done through its incident arcs. Note that
|
||||
// there is a special sentry element at the end of
|
||||
// first_incident_arc_.
|
||||
for (NodeIndexType node = kFirstNode + num_nodes; node > /* kFirstNode */ 0;
|
||||
--node) {
|
||||
first_incident_arc_[node] = first_incident_arc_[node - 1];
|
||||
}
|
||||
first_incident_arc_[kFirstNode] = kFirstArc;
|
||||
if (sort_arcs_by_head) {
|
||||
ArcIndexType begin = first_incident_arc_[kFirstNode];
|
||||
if (client_cycle_handler != nullptr) {
|
||||
for (NodeIndexType node = 0; node < num_nodes; ++node) {
|
||||
ArcIndexType end = first_incident_arc_[node + 1];
|
||||
std::sort(
|
||||
&arc_permutation[begin], &arc_permutation[end],
|
||||
PermutationIndexComparisonByArcHead<NodeIndexType, ArcIndexType>(
|
||||
head_));
|
||||
begin = end;
|
||||
}
|
||||
} else {
|
||||
for (NodeIndexType node = 0; node < num_nodes; ++node) {
|
||||
ArcIndexType end = first_incident_arc_[node + 1];
|
||||
// The second argument in the following has a strange index
|
||||
// expression because ZVector claims that no index is valid
|
||||
// unless it refers to an element in the vector. In particular
|
||||
// an index one past the end is invalid.
|
||||
ArcIndexType begin_index = (begin < num_arcs ? begin : begin - 1);
|
||||
ArcIndexType begin_offset = (begin < num_arcs ? 0 : 1);
|
||||
ArcIndexType end_index = (end > 0 ? end - 1 : end);
|
||||
ArcIndexType end_offset = (end > 0 ? 1 : 0);
|
||||
std::sort(&head_[begin_index] + begin_offset,
|
||||
&head_[end_index] + end_offset);
|
||||
begin = end;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (client_cycle_handler != nullptr && num_arcs > 0) {
|
||||
// Apply the computed permutation if we haven't already.
|
||||
CycleHandlerForAnnotatedArcs handler_for_constructor(
|
||||
client_cycle_handler, &head_[kFirstArc] - kFirstArc);
|
||||
// We use a permutation cycle handler to place the head array
|
||||
// indices and permute the client's arc annotation data along
|
||||
// with them.
|
||||
PermutationApplier<ArcIndexType> permutation(&handler_for_constructor);
|
||||
permutation.Apply(&arc_permutation[0], kFirstArc, end_arc_index());
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the tail or start-node of arc.
|
||||
NodeIndexType Tail(const ArcIndexType arc) const {
|
||||
DCHECK(CheckArcValidity(arc));
|
||||
DCHECK(CheckTailIndexValidity(arc));
|
||||
return (*tail_)[arc];
|
||||
}
|
||||
|
||||
// Returns true if arc is incoming to node.
|
||||
bool IsIncoming(ArcIndexType arc, NodeIndexType node) const {
|
||||
return Head(arc) == node;
|
||||
}
|
||||
|
||||
// Utility function to check that an arc index is within the bounds.
|
||||
// It is exported so that users of the ForwardStaticGraph class can use it.
|
||||
// To be used in a DCHECK.
|
||||
bool CheckArcBounds(const ArcIndexType arc) const {
|
||||
return ((arc == kNilArc) || (arc >= kFirstArc && arc < max_num_arcs_));
|
||||
}
|
||||
|
||||
// Utility function to check that an arc index is within the bounds AND
|
||||
// different from kNilArc.
|
||||
// It is exported so that users of the ForwardStaticGraph class can use it.
|
||||
// To be used in a DCHECK.
|
||||
bool CheckArcValidity(const ArcIndexType arc) const {
|
||||
return ((arc != kNilArc) && (arc >= kFirstArc && arc < max_num_arcs_));
|
||||
}
|
||||
|
||||
// Returns true if arc is a valid index into the (*tail_) array.
|
||||
bool CheckTailIndexValidity(const ArcIndexType arc) const {
|
||||
return ((tail_ != nullptr) && (arc >= kFirstArc) &&
|
||||
(arc <= tail_->max_index()));
|
||||
}
|
||||
|
||||
ArcIndexType NextOutgoingArc(const NodeIndexType node,
|
||||
ArcIndexType arc) const {
|
||||
DCHECK(IsNodeValid(node));
|
||||
DCHECK(CheckArcValidity(arc));
|
||||
++arc;
|
||||
if (arc < first_incident_arc_[node + 1]) {
|
||||
return arc;
|
||||
} else {
|
||||
return kNilArc;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns a debug string containing all the information contained in the
|
||||
// data structure in raw form.
|
||||
std::string DebugString() const {
|
||||
std::string result = "Arcs:(node) :\n";
|
||||
for (ArcIndexType arc = kFirstArc; arc < num_arcs_; ++arc) {
|
||||
result += " " + ArcDebugString(arc) + ":(" + NodeDebugString(head_[arc]) +
|
||||
")\n";
|
||||
}
|
||||
result += "Node:First arc :\n";
|
||||
for (NodeIndexType node = kFirstNode; node <= num_nodes_; ++node) {
|
||||
result += " " + NodeDebugString(node) + ":" +
|
||||
ArcDebugString(first_incident_arc_[node]) + "\n";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
bool BuildTailArray() {
|
||||
// If (*tail_) is already allocated, we have the invariant that
|
||||
// its contents are canonical, so we do not need to do anything
|
||||
// here in that case except return true.
|
||||
if (tail_ == nullptr) {
|
||||
if (!RepresentationClean()) {
|
||||
// We have been asked to build the (*tail_) array, but we have
|
||||
// no valid information from which to build it. The graph is
|
||||
// in an unrecoverable, inconsistent state.
|
||||
return false;
|
||||
}
|
||||
// Reallocate (*tail_) and rebuild its contents from the
|
||||
// adjacency lists.
|
||||
tail_.reset(new ZVector<NodeIndexType>);
|
||||
tail_->Reserve(kFirstArc, max_num_arcs_ - 1);
|
||||
typename Base::NodeIterator node_it(*this);
|
||||
for (; node_it.Ok(); node_it.Next()) {
|
||||
NodeIndexType node = node_it.Index();
|
||||
typename Base::OutgoingArcIterator arc_it(*this, node);
|
||||
for (; arc_it.Ok(); arc_it.Next()) {
|
||||
(*tail_)[arc_it.Index()] = node;
|
||||
}
|
||||
}
|
||||
}
|
||||
DCHECK(TailArrayComplete());
|
||||
return true;
|
||||
}
|
||||
|
||||
void ReleaseTailArray() { tail_.reset(nullptr); }
|
||||
|
||||
// To be used in a DCHECK().
|
||||
bool TailArrayComplete() const {
|
||||
CHECK(tail_);
|
||||
for (ArcIndexType arc = kFirstArc; arc < num_arcs_; ++arc) {
|
||||
CHECK(CheckTailIndexValidity(arc));
|
||||
CHECK(IsNodeValid((*tail_)[arc]));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
bool IsDirect() const { return true; }
|
||||
bool RepresentationClean() const { return true; }
|
||||
bool IsOutgoing(const NodeIndexType node,
|
||||
const ArcIndexType unused_arc) const {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Returns the first arc in node's incidence list.
|
||||
ArcIndexType FirstOutgoingOrOppositeIncomingArc(NodeIndexType node) const {
|
||||
DCHECK(RepresentationClean());
|
||||
DCHECK(IsNodeValid(node));
|
||||
ArcIndexType result = first_incident_arc_[node];
|
||||
return ((result != first_incident_arc_[node + 1]) ? result : kNilArc);
|
||||
}
|
||||
|
||||
// Utility method that finds the next outgoing arc.
|
||||
ArcIndexType FindNextOutgoingArc(ArcIndexType arc) const {
|
||||
DCHECK(CheckArcBounds(arc));
|
||||
return arc;
|
||||
}
|
||||
|
||||
// Array of node indices, not always present. (*tail_)[i] contains
|
||||
// the tail node of arc i. This array is not needed for normal graph
|
||||
// traversal operations, but is used in optimizing the graph's
|
||||
// layout so arcs are grouped by tail node, and can be used in one
|
||||
// approach to serializing the graph.
|
||||
//
|
||||
// Invariants: At any time when we are not executing a method of
|
||||
// this class, either tail_ == NULL or the tail_ array's contents
|
||||
// are kept canonical. If tail_ != NULL, any method that modifies
|
||||
// adjacency lists must also ensure (*tail_) is modified
|
||||
// correspondingly. The converse does not hold: Modifications to
|
||||
// (*tail_) are allowed without updating the adjacency lists. If
|
||||
// such modifications take place, representation_clean_ must be set
|
||||
// to false, of course, to indicate that the adjacency lists are no
|
||||
// longer current.
|
||||
std::unique_ptr<ZVector<NodeIndexType> > tail_;
|
||||
};
|
||||
|
||||
// The index of the 'nil' node in the graph.
|
||||
template <typename NodeIndexType, typename ArcIndexType, typename DerivedGraph>
|
||||
const NodeIndexType
|
||||
@@ -1913,12 +1507,6 @@ struct graph_traits<ForwardEbertGraph<NodeIndexType, ArcIndexType> > {
|
||||
static constexpr bool is_dynamic = true;
|
||||
};
|
||||
|
||||
template <typename NodeIndexType, typename ArcIndexType>
|
||||
struct graph_traits<ForwardStaticGraph<NodeIndexType, ArcIndexType> > {
|
||||
static constexpr bool has_reverse_arcs = false;
|
||||
static constexpr bool is_dynamic = false;
|
||||
};
|
||||
|
||||
namespace or_internal {
|
||||
|
||||
// The TailArrayBuilder class template is not expected to be used by
|
||||
|
||||
@@ -633,9 +633,8 @@ TYPED_TEST(DebugStringEbertGraphTest, Test2) {
|
||||
template <typename GraphType>
|
||||
class DebugStringTestWithGraphBuildManager : public ::testing::Test {};
|
||||
|
||||
typedef ::testing::Types<
|
||||
EbertGraph<int16_t, int16_t>, ForwardEbertGraph<int16_t, int16_t>,
|
||||
ForwardStaticGraph<int16_t, int16_t>, ForwardStaticGraph<int16_t, int32_t> >
|
||||
typedef ::testing::Types<EbertGraph<int16_t, int16_t>,
|
||||
ForwardEbertGraph<int16_t, int16_t> >
|
||||
GraphTypesForDebugStringTestWithGraphBuildManager;
|
||||
|
||||
TYPED_TEST_SUITE(DebugStringTestWithGraphBuildManager,
|
||||
|
||||
@@ -510,19 +510,6 @@ class GenericMaxFlow : public MaxFlowStatusClass {
|
||||
mutable StatsGroup stats_;
|
||||
};
|
||||
|
||||
#if !SWIG
|
||||
|
||||
// Default instance MaxFlow that uses StarGraph. Note that we cannot just use a
|
||||
// typedef because of dependent code expecting MaxFlow to be a real class.
|
||||
// TODO(user): Modify this code and remove it.
|
||||
class MaxFlow : public GenericMaxFlow<StarGraph> {
|
||||
public:
|
||||
MaxFlow(const StarGraph* graph, NodeIndex source, NodeIndex target)
|
||||
: GenericMaxFlow(graph, source, target) {}
|
||||
};
|
||||
|
||||
#endif // SWIG
|
||||
|
||||
template <typename Element, typename IntegerPriority>
|
||||
bool PriorityQueueWithRestrictedPush<Element, IntegerPriority>::IsEmpty()
|
||||
const {
|
||||
|
||||
@@ -103,11 +103,9 @@ class LinearSumAssignmentTestWithGraphBuilder : public ::testing::Test {};
|
||||
|
||||
typedef ::testing::Types<
|
||||
EbertGraph<int16_t, int16_t>, ForwardEbertGraph<int16_t, int16_t>,
|
||||
ForwardStaticGraph<int16_t, int16_t>, EbertGraph<int16_t, ArcIndex>,
|
||||
ForwardEbertGraph<int16_t, ArcIndex>, ForwardStaticGraph<int16_t, ArcIndex>,
|
||||
EbertGraph<int16_t, ArcIndex>, ForwardEbertGraph<int16_t, ArcIndex>,
|
||||
EbertGraph<NodeIndex, int16_t>, ForwardEbertGraph<NodeIndex, int16_t>,
|
||||
ForwardStaticGraph<NodeIndex, int16_t>, StarGraph, ForwardStarGraph,
|
||||
util::ListGraph<>, util::ReverseArcListGraph<>>
|
||||
StarGraph, ForwardStarGraph, util::ListGraph<>, util::ReverseArcListGraph<>>
|
||||
GraphTypesForAssignmentTestingWithGraphBuilder;
|
||||
|
||||
TYPED_TEST_SUITE(LinearSumAssignmentTestWithGraphBuilder,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/flags/flag.h"
|
||||
#include "absl/log/check.h"
|
||||
#include "absl/strings/str_format.h"
|
||||
@@ -30,8 +31,8 @@
|
||||
#include "ortools/graph/generic_max_flow.h"
|
||||
#include "ortools/graph/graph.h"
|
||||
#include "ortools/graph/graphs.h"
|
||||
#include "ortools/graph/max_flow.h"
|
||||
#include "ortools/util/saturated_arithmetic.h"
|
||||
#include "ortools/util/stats.h"
|
||||
|
||||
// TODO(user): Remove these flags and expose the parameters in the API.
|
||||
// New clients, please do not use these flags!
|
||||
@@ -360,8 +361,8 @@ bool GenericMinCostFlow<Graph, ArcFlowType,
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
FlowQuantity GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Flow(
|
||||
ArcIndex arc) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Flow(
|
||||
ArcIndex arc) const -> FlowQuantity {
|
||||
if (IsArcDirect(arc)) {
|
||||
return residual_arc_capacity_[Opposite(arc)];
|
||||
} else {
|
||||
@@ -371,9 +372,8 @@ FlowQuantity GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Flow(
|
||||
|
||||
// We use the equations given in the comment of residual_arc_capacity_.
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
FlowQuantity
|
||||
GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Capacity(
|
||||
ArcIndex arc) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Capacity(
|
||||
ArcIndex arc) const -> FlowQuantity {
|
||||
if (IsArcDirect(arc)) {
|
||||
return residual_arc_capacity_[arc] + residual_arc_capacity_[Opposite(arc)];
|
||||
} else {
|
||||
@@ -382,16 +382,16 @@ GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Capacity(
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
CostValue GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::UnitCost(
|
||||
ArcIndex arc) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::UnitCost(
|
||||
ArcIndex arc) const -> CostValue {
|
||||
DCHECK(IsArcValid(arc));
|
||||
DCHECK_EQ(uint64_t{1}, cost_scaling_factor_);
|
||||
return scaled_arc_unit_cost_[arc];
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
FlowQuantity GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Supply(
|
||||
NodeIndex node) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Supply(
|
||||
NodeIndex node) const -> FlowQuantity {
|
||||
DCHECK(graph_->IsNodeValid(node));
|
||||
return node_excess_[node];
|
||||
}
|
||||
@@ -417,16 +417,14 @@ bool GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::IsActive(
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
CostValue
|
||||
GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::ReducedCost(
|
||||
ArcIndex arc) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::ReducedCost(
|
||||
ArcIndex arc) const -> CostValue {
|
||||
return FastReducedCost(arc, node_potential_[Tail(arc)]);
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
CostValue
|
||||
GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::FastReducedCost(
|
||||
ArcIndex arc, CostValue tail_potential) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::FastReducedCost(
|
||||
ArcIndex arc, CostValue tail_potential) const -> CostValue {
|
||||
DCHECK_EQ(node_potential_[Tail(arc)], tail_potential);
|
||||
DCHECK(graph_->IsNodeValid(Tail(arc)));
|
||||
DCHECK(graph_->IsNodeValid(Head(arc)));
|
||||
@@ -440,9 +438,8 @@ GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::FastReducedCost(
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
typename GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::ArcIndex
|
||||
GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::
|
||||
GetFirstOutgoingOrOppositeIncomingArc(NodeIndex node) const {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::
|
||||
GetFirstOutgoingOrOppositeIncomingArc(NodeIndex node) const -> ArcIndex {
|
||||
OutgoingOrOppositeIncomingArcIterator arc_it(*graph_, node);
|
||||
return arc_it.Index();
|
||||
}
|
||||
@@ -478,8 +475,8 @@ bool GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::Solve() {
|
||||
}
|
||||
|
||||
template <typename Graph, typename ArcFlowType, typename ArcScaledCostType>
|
||||
CostValue
|
||||
GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::GetOptimalCost() {
|
||||
auto GenericMinCostFlow<Graph, ArcFlowType, ArcScaledCostType>::GetOptimalCost()
|
||||
-> CostValue {
|
||||
if (status_ != OPTIMAL) {
|
||||
return 0;
|
||||
}
|
||||
@@ -1034,10 +1031,9 @@ void SimpleMinCostFlow::SetNodeSupply(NodeIndex node, FlowQuantity supply) {
|
||||
node_supply_[node] = supply;
|
||||
}
|
||||
|
||||
ArcIndex SimpleMinCostFlow::AddArcWithCapacityAndUnitCost(NodeIndex tail,
|
||||
NodeIndex head,
|
||||
FlowQuantity capacity,
|
||||
CostValue unit_cost) {
|
||||
SimpleMinCostFlow::ArcIndex SimpleMinCostFlow::AddArcWithCapacityAndUnitCost(
|
||||
NodeIndex tail, NodeIndex head, FlowQuantity capacity,
|
||||
CostValue unit_cost) {
|
||||
ResizeNodeVectors(std::max(tail, head));
|
||||
const ArcIndex arc = arc_tail_.size();
|
||||
arc_tail_.push_back(tail);
|
||||
@@ -1047,7 +1043,7 @@ ArcIndex SimpleMinCostFlow::AddArcWithCapacityAndUnitCost(NodeIndex tail,
|
||||
return arc;
|
||||
}
|
||||
|
||||
ArcIndex SimpleMinCostFlow::PermutedArc(ArcIndex arc) {
|
||||
SimpleMinCostFlow::ArcIndex SimpleMinCostFlow::PermutedArc(ArcIndex arc) {
|
||||
return arc < arc_permutation_.size() ? arc_permutation_[arc] : arc;
|
||||
}
|
||||
|
||||
@@ -1171,31 +1167,46 @@ SimpleMinCostFlow::Status SimpleMinCostFlow::SolveWithPossibleAdjustment(
|
||||
return min_cost_flow.status();
|
||||
}
|
||||
|
||||
CostValue SimpleMinCostFlow::OptimalCost() const { return optimal_cost_; }
|
||||
SimpleMinCostFlow::CostValue SimpleMinCostFlow::OptimalCost() const {
|
||||
return optimal_cost_;
|
||||
}
|
||||
|
||||
FlowQuantity SimpleMinCostFlow::MaximumFlow() const { return maximum_flow_; }
|
||||
SimpleMinCostFlow::FlowQuantity SimpleMinCostFlow::MaximumFlow() const {
|
||||
return maximum_flow_;
|
||||
}
|
||||
|
||||
FlowQuantity SimpleMinCostFlow::Flow(ArcIndex arc) const {
|
||||
SimpleMinCostFlow::FlowQuantity SimpleMinCostFlow::Flow(ArcIndex arc) const {
|
||||
return arc_flow_[arc];
|
||||
}
|
||||
|
||||
NodeIndex SimpleMinCostFlow::NumNodes() const { return node_supply_.size(); }
|
||||
SimpleMinCostFlow::SimpleMinCostFlow::NodeIndex SimpleMinCostFlow::NumNodes()
|
||||
const {
|
||||
return node_supply_.size();
|
||||
}
|
||||
|
||||
ArcIndex SimpleMinCostFlow::NumArcs() const { return arc_tail_.size(); }
|
||||
SimpleMinCostFlow::ArcIndex SimpleMinCostFlow::NumArcs() const {
|
||||
return arc_tail_.size();
|
||||
}
|
||||
|
||||
ArcIndex SimpleMinCostFlow::Tail(ArcIndex arc) const { return arc_tail_[arc]; }
|
||||
SimpleMinCostFlow::ArcIndex SimpleMinCostFlow::Tail(ArcIndex arc) const {
|
||||
return arc_tail_[arc];
|
||||
}
|
||||
|
||||
ArcIndex SimpleMinCostFlow::Head(ArcIndex arc) const { return arc_head_[arc]; }
|
||||
SimpleMinCostFlow::ArcIndex SimpleMinCostFlow::Head(ArcIndex arc) const {
|
||||
return arc_head_[arc];
|
||||
}
|
||||
|
||||
FlowQuantity SimpleMinCostFlow::Capacity(ArcIndex arc) const {
|
||||
SimpleMinCostFlow::FlowQuantity SimpleMinCostFlow::Capacity(
|
||||
ArcIndex arc) const {
|
||||
return arc_capacity_[arc];
|
||||
}
|
||||
|
||||
CostValue SimpleMinCostFlow::UnitCost(ArcIndex arc) const {
|
||||
SimpleMinCostFlow::CostValue SimpleMinCostFlow::UnitCost(ArcIndex arc) const {
|
||||
return arc_cost_[arc];
|
||||
}
|
||||
|
||||
FlowQuantity SimpleMinCostFlow::Supply(NodeIndex node) const {
|
||||
SimpleMinCostFlow::FlowQuantity SimpleMinCostFlow::Supply(
|
||||
NodeIndex node) const {
|
||||
return node_supply_[node];
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
// [START program]
|
||||
// From Bradley, Hax and Maganti, 'Applied Mathematical Programming', figure 8.1
|
||||
// [START import]
|
||||
#include <cstddef>
|
||||
#include <cstdint>
|
||||
#include <vector>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// This file contains functions to compute shortest paths on Ebert graphs using
|
||||
// This file contains functions to compute shortest paths on graphs using
|
||||
// Dijkstra's algorithm,
|
||||
// E.W. Dijkstra, "A note on two problems in connexion with graphs". Numerische
|
||||
// Mathematik 1:269–271, 1959. See for example:
|
||||
@@ -65,13 +65,21 @@
|
||||
#include <cstdint>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/container/flat_hash_map.h"
|
||||
#include "absl/functional/bind_front.h"
|
||||
#include "absl/log/check.h"
|
||||
#include "absl/types/span.h"
|
||||
#include "ortools/base/adjustable_priority_queue-inl.h"
|
||||
#include "ortools/base/adjustable_priority_queue.h"
|
||||
#include "ortools/base/logging.h"
|
||||
#include "ortools/graph/ebert_graph.h"
|
||||
#include "ortools/graph/graph.h"
|
||||
|
||||
#include "ortools/base/map_util.h"
|
||||
#include "ortools/base/stl_util.h"
|
||||
#include "ortools/base/threadpool.h"
|
||||
#include "ortools/base/timer.h"
|
||||
namespace operations_research {
|
||||
|
||||
// Storing distances on 32 bits to limit memory consumption of distance
|
||||
@@ -82,82 +90,112 @@ typedef uint32_t PathDistance;
|
||||
const PathDistance kDisconnectedPathDistance =
|
||||
std::numeric_limits<uint32_t>::max();
|
||||
|
||||
namespace internal {
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class PathContainerImpl;
|
||||
} // namespace internal
|
||||
|
||||
// Container class storing paths and distances along the paths. It is used in
|
||||
// shortest path computation functions to store resulting shortest paths.
|
||||
// Usage example iterating on the path between nodes from and to:
|
||||
// Usage example iterating on the path between nodes `from` and `to`:
|
||||
// PathContainer path_container;
|
||||
// PathContainer::BuildInMemoryCompactPathContainer(&path_container);
|
||||
// ... fill up container ...
|
||||
// const NodeIndex from =...;
|
||||
// NodeIndex to =...;
|
||||
// // ... fill up container ...
|
||||
// const PathContainer::NodeIndex from =...;
|
||||
// PathContainer::NodeIndex to =...;
|
||||
// while (to != from) {
|
||||
// LOG(INFO) << to;
|
||||
// to = path_container.GetPenultimateNodeInPath(from, to);
|
||||
// }
|
||||
class PathContainer {
|
||||
template <class GraphType>
|
||||
class GenericPathContainer {
|
||||
public:
|
||||
PathContainer();
|
||||
using NodeIndex = typename GraphType::NodeIndex;
|
||||
using Impl = internal::PathContainerImpl<NodeIndex, GraphType::kNilNode>;
|
||||
|
||||
// TODO(b/385094969): Remove this when all clients are migrated, and use
|
||||
// factory functions instead.
|
||||
GenericPathContainer();
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
PathContainer(const PathContainer&) = delete;
|
||||
PathContainer& operator=(const PathContainer&) = delete;
|
||||
GenericPathContainer(const GenericPathContainer&) = delete;
|
||||
GenericPathContainer& operator=(const GenericPathContainer&) = delete;
|
||||
|
||||
~PathContainer();
|
||||
~GenericPathContainer();
|
||||
|
||||
// Returns the distance between node 'from' and node 'to' following the path
|
||||
// out of 'from' and into 'to'. Note that if 'from' == 'to', the distance is
|
||||
// not necessarily 0 if the path out of 'to' and back into 'to' has a distance
|
||||
// Returns the distance between node `from` and node `to` following the path
|
||||
// out of `from` and into `to`. Note that if `from` == `to`, the distance is
|
||||
// not necessarily 0 if the path out of `to` and back into `to` has a distance
|
||||
// greater than 0. If you do require the distance to be 0 in this case, add to
|
||||
// the graph an arc from 'to' to itself with a length of 0.
|
||||
// If nodes are not connected, returns kDisconnectedPathDistance.
|
||||
// the graph an arc from `to` to itself with a length of 0.
|
||||
// If nodes are not connected, returns `kDisconnectedPathDistance`.
|
||||
PathDistance GetDistance(NodeIndex from, NodeIndex to) const;
|
||||
|
||||
// Returns the penultimate node on the path out of node 'from' into node 'to'
|
||||
// (the direct predecessor of node 'to' on the path).
|
||||
// If 'from' == 'to', the penultimate node is 'to' only if the shortest path
|
||||
// from 'to' to itself is composed of the arc ('to, 'to'), which might not be
|
||||
// Returns the penultimate node on the path out of node `from` into node `to`
|
||||
// (the direct predecessor of node `to` on the path).
|
||||
// If `from` == `to`, the penultimate node is `to` only if the shortest path
|
||||
// from `to` to itself is composed of the arc (`to, `to`), which might not be
|
||||
// the case if either this arc doesn't exist or if the length of this arc is
|
||||
// greater than the distance of an alternate path.
|
||||
// If nodes are not connected, returns StarGraph::kNilNode.
|
||||
// If nodes are not connected, returns `GraphType::kNilNode`.
|
||||
NodeIndex GetPenultimateNodeInPath(NodeIndex from, NodeIndex to) const;
|
||||
|
||||
// Returns path nodes from node "from" to node "to" in a ordered vector.
|
||||
// The vector starts with 'from' and ends with 'to', if both nodes are
|
||||
// Returns path nodes from node `from` to node `to` in the order in which they
|
||||
// appear along the path.
|
||||
// The vector starts with `from` and ends with `to`, if both nodes are
|
||||
// connected (otherwise an empty vector is returned).
|
||||
void GetPath(NodeIndex from, NodeIndex to,
|
||||
std::vector<NodeIndex>* path) const;
|
||||
|
||||
// For internal use only. Returns the internal container implementation.
|
||||
PathContainerImpl* GetImplementation() const;
|
||||
|
||||
// Builds a path container which only stores distances between path nodes.
|
||||
static void BuildPathDistanceContainer(PathContainer* path_container);
|
||||
static GenericPathContainer BuildPathDistanceContainer();
|
||||
|
||||
ABSL_DEPRECATED("Use factory function BuildPathDistanceContainer instead.")
|
||||
static void BuildPathDistanceContainer(GenericPathContainer* path_container);
|
||||
|
||||
// Builds a path container which stores explicit paths and distances between
|
||||
// path nodes in a memory-compact representation.
|
||||
// In this case GetPenultimateNodeInPath() is O(log(path_tree_size)),
|
||||
// path_tree_size being the size of a tree of paths from a source node (in
|
||||
// In this case `GetPenultimateNodeInPath()` is `O(log(path_tree_size))`,
|
||||
// `path_tree_size` being the size of a tree of paths from a source node (in
|
||||
// practice it is equal to the number of nodes in the graph if all nodes
|
||||
// are strongly connected).
|
||||
// GetPath is O(log(path_tree_size) + path_size), where path_size is the
|
||||
// `GetPath` is `O(log(path_tree_size) + path_size)`, where `path_size` is the
|
||||
// size of the resulting path; note this is faster than successive calls
|
||||
// to GetPenultimateNodeInPath() which would result in
|
||||
// O(log(path_tree_size) * path_size).
|
||||
static void BuildInMemoryCompactPathContainer(PathContainer* path_container);
|
||||
// to `GetPenultimateNodeInPath()` which would result in
|
||||
// `O(log(path_tree_size) * path_size)`.
|
||||
static GenericPathContainer BuildInMemoryCompactPathContainer();
|
||||
|
||||
ABSL_DEPRECATED(
|
||||
"Use factory function BuildInMemoryCompactPathContainer instead.")
|
||||
static void BuildInMemoryCompactPathContainer(
|
||||
GenericPathContainer* path_container);
|
||||
|
||||
// TODO(user): Add save-to-disk container.
|
||||
// TODO(user): Add BuildInMemoryFastPathContainer(), which does
|
||||
// GetPenultimateNodeInPath() in O(1).
|
||||
// TODO(user): Add `BuildInMemoryFastPathContainer()`, which does
|
||||
// `GetPenultimateNodeInPath()` in `O(1)`.
|
||||
|
||||
// For internal use only. Returns the internal container implementation.
|
||||
Impl* GetImplementation() const { return container_.get(); }
|
||||
|
||||
private:
|
||||
std::unique_ptr<PathContainerImpl> container_;
|
||||
explicit GenericPathContainer(std::unique_ptr<Impl> impl)
|
||||
: container_(std::move(impl)) {}
|
||||
|
||||
std::unique_ptr<Impl> container_;
|
||||
};
|
||||
|
||||
// TODO(b/385094969): Remove this alias when all clients are migrated.
|
||||
class LegacyIgnoredGraphType {
|
||||
public:
|
||||
using NodeIndex = int32_t;
|
||||
static constexpr NodeIndex kNilNode = std::numeric_limits<NodeIndex>::max();
|
||||
};
|
||||
using PathContainer = GenericPathContainer<LegacyIgnoredGraphType>;
|
||||
|
||||
// Utility function which returns a vector containing all nodes of a graph.
|
||||
template <class GraphType>
|
||||
void GetGraphNodes(const GraphType& graph, std::vector<NodeIndex>* nodes) {
|
||||
void GetGraphNodes(const GraphType& graph,
|
||||
std::vector<typename GraphType::NodeIndex>* nodes) {
|
||||
CHECK(nodes != nullptr);
|
||||
nodes->clear();
|
||||
nodes->reserve(graph.num_nodes());
|
||||
@@ -179,41 +217,43 @@ void GetGraphNodesFromGraph(const GraphType& graph,
|
||||
}
|
||||
|
||||
// In all the functions below the arc_lengths vector represents the lengths of
|
||||
// the arcs of the graph (arc_lengths[arc] is the length of arc).
|
||||
// Resulting shortest paths are stored in a path container 'path_container'.
|
||||
// the arcs of the graph (`arc_lengths[arc]` is the length of `arc`).
|
||||
// Resulting shortest paths are stored in a path container `path_container`.
|
||||
|
||||
// Computes shortest paths from the node 'source' to all nodes in the graph.
|
||||
// Computes shortest paths from the node `source` to all nodes in the graph.
|
||||
template <class GraphType>
|
||||
void ComputeOneToAllShortestPaths(const GraphType& graph,
|
||||
const std::vector<PathDistance>& arc_lengths,
|
||||
typename GraphType::NodeIndex source,
|
||||
PathContainer* const path_container) {
|
||||
void ComputeOneToAllShortestPaths(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
typename GraphType::NodeIndex source,
|
||||
GenericPathContainer<GraphType>* const path_container) {
|
||||
std::vector<typename GraphType::NodeIndex> all_nodes;
|
||||
GetGraphNodesFromGraph<GraphType>(graph, &all_nodes);
|
||||
ComputeOneToManyShortestPaths(graph, arc_lengths, source, all_nodes,
|
||||
path_container);
|
||||
}
|
||||
|
||||
// Computes shortest paths from the node 'source' to nodes in 'destinations'.
|
||||
template <class GraphType>
|
||||
// Computes shortest paths from the node `source` to nodes in `destinations`.
|
||||
// TODO(b/385094969): Remove second template parameter when all clients are
|
||||
// migrated.
|
||||
template <class GraphType, class PathContainerGraphType>
|
||||
void ComputeOneToManyShortestPaths(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
typename GraphType::NodeIndex source,
|
||||
const std::vector<typename GraphType::NodeIndex>& destinations,
|
||||
PathContainer* const path_container) {
|
||||
GenericPathContainer<PathContainerGraphType>* const path_container) {
|
||||
std::vector<typename GraphType::NodeIndex> sources(1, source);
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, arc_lengths, sources, destinations, 1, path_container);
|
||||
}
|
||||
|
||||
// Computes the shortest path from the node 'source' to the node 'destination'
|
||||
// and returns that path as a vector of nodes. If there is no path from 'source'
|
||||
// to 'destination', the returned vector is empty.
|
||||
// Computes the shortest path from the node `source` to the node `destination`
|
||||
// and returns that path as a vector of nodes. If there is no path from `source`
|
||||
// to `destination`, the returned vector is empty.
|
||||
//
|
||||
// To get distance information, use ComputeOneToManyShortestPaths with a single
|
||||
// destination and a `PathContainer` built with `BuildPathDistanceContainer` (if
|
||||
// you just need the distance) or `BuildInMemoryCompactPathContainer`
|
||||
// (otherwise).
|
||||
// To get distance information, use `ComputeOneToManyShortestPaths` with a
|
||||
// single destination and a `PathContainer` built with
|
||||
// `BuildPathDistanceContainer` (if you just need the distance) or
|
||||
// `BuildInMemoryCompactPathContainer` (otherwise).
|
||||
template <class GraphType>
|
||||
std::vector<typename GraphType::NodeIndex> ComputeOneToOneShortestPath(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
@@ -222,8 +262,8 @@ std::vector<typename GraphType::NodeIndex> ComputeOneToOneShortestPath(
|
||||
std::vector<typename GraphType::NodeIndex> sources(1, source);
|
||||
std::vector<typename GraphType::NodeIndex> destinations(1, destination);
|
||||
|
||||
PathContainer path_container;
|
||||
PathContainer::BuildInMemoryCompactPathContainer(&path_container);
|
||||
auto path_container =
|
||||
GenericPathContainer<GraphType>::BuildInMemoryCompactPathContainer();
|
||||
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, arc_lengths, sources, destinations, 1, &path_container);
|
||||
@@ -233,93 +273,561 @@ std::vector<typename GraphType::NodeIndex> ComputeOneToOneShortestPath(
|
||||
return path;
|
||||
}
|
||||
|
||||
// Computes shortest paths from the nodes in 'sources' to all nodes in the
|
||||
// Computes shortest paths from the nodes in `sources` to all nodes in the
|
||||
// graph.
|
||||
template <class GraphType>
|
||||
void ComputeManyToAllShortestPathsWithMultipleThreads(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<typename GraphType::NodeIndex>& sources, int num_threads,
|
||||
PathContainer* const path_container) {
|
||||
GenericPathContainer<GraphType>* const path_container) {
|
||||
std::vector<typename GraphType::NodeIndex> all_nodes;
|
||||
GetGraphNodesFromGraph<GraphType>(graph, &all_nodes);
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, arc_lengths, sources, all_nodes, num_threads, path_container);
|
||||
}
|
||||
|
||||
// Computes shortest paths from the nodes in 'sources' to the nodes in
|
||||
// 'destinations'.
|
||||
template <class GraphType>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<typename GraphType::NodeIndex>& sources,
|
||||
const std::vector<typename GraphType::NodeIndex>& destinations,
|
||||
int num_threads, PathContainer* const path_container) {
|
||||
(void)graph;
|
||||
(void)arc_lengths;
|
||||
(void)sources;
|
||||
(void)destinations;
|
||||
(void)num_threads;
|
||||
(void)path_container;
|
||||
|
||||
LOG(DFATAL) << "Graph type not supported";
|
||||
}
|
||||
|
||||
// Specialization for supported graph classes.
|
||||
|
||||
using ::util::ListGraph;
|
||||
template <>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const ListGraph<>& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<ListGraph<>::NodeIndex>& sources,
|
||||
const std::vector<ListGraph<>::NodeIndex>& destinations, int num_threads,
|
||||
PathContainer* path_container);
|
||||
|
||||
using ::util::StaticGraph;
|
||||
template <>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const StaticGraph<>& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<StaticGraph<>::NodeIndex>& sources,
|
||||
const std::vector<StaticGraph<>::NodeIndex>& destinations, int num_threads,
|
||||
PathContainer* path_container);
|
||||
|
||||
using ::util::ReverseArcListGraph;
|
||||
template <>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const ReverseArcListGraph<>& graph,
|
||||
const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<ReverseArcListGraph<>::NodeIndex>& sources,
|
||||
const std::vector<ReverseArcListGraph<>::NodeIndex>& destinations,
|
||||
int num_threads, PathContainer* path_container);
|
||||
|
||||
using ::util::ReverseArcStaticGraph;
|
||||
template <>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const ReverseArcStaticGraph<>& graph,
|
||||
const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<ReverseArcStaticGraph<>::NodeIndex>& sources,
|
||||
const std::vector<ReverseArcStaticGraph<>::NodeIndex>& destinations,
|
||||
int num_threads, PathContainer* path_container);
|
||||
|
||||
using ::util::ReverseArcMixedGraph;
|
||||
template <>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const ReverseArcMixedGraph<>& graph,
|
||||
const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<ReverseArcMixedGraph<>::NodeIndex>& sources,
|
||||
const std::vector<ReverseArcMixedGraph<>::NodeIndex>& destinations,
|
||||
int num_threads, PathContainer* path_container);
|
||||
|
||||
// Computes shortest paths between all nodes of the graph.
|
||||
template <class GraphType>
|
||||
// TODO(b/385094969): Remove second template parameter when all clients are
|
||||
// migrated.
|
||||
template <class GraphType, class PathContainerGraphType>
|
||||
void ComputeAllToAllShortestPathsWithMultipleThreads(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
int num_threads, PathContainer* const path_container) {
|
||||
int num_threads,
|
||||
GenericPathContainer<PathContainerGraphType>* const path_container) {
|
||||
std::vector<typename GraphType::NodeIndex> all_nodes;
|
||||
GetGraphNodesFromGraph<GraphType>(graph, &all_nodes);
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, arc_lengths, all_nodes, all_nodes, num_threads, path_container);
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Implementation.
|
||||
// =============================================================================
|
||||
|
||||
namespace internal {
|
||||
|
||||
// Base path container implementation class. Defines virtual functions used to
|
||||
// fill the container (in particular from the shortest path computation
|
||||
// function).
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class PathContainerImpl {
|
||||
public:
|
||||
PathContainerImpl() = default;
|
||||
virtual ~PathContainerImpl() = default;
|
||||
|
||||
// Initializes the container on source and destination node vectors
|
||||
// (`num_nodes` is the total number of nodes in the graph containing source
|
||||
// and destination nodes).
|
||||
// Called before adding any paths to the container.
|
||||
virtual void Initialize(const std::vector<NodeIndex>& sources,
|
||||
const std::vector<NodeIndex>& destinations,
|
||||
NodeIndex num_nodes) = 0;
|
||||
|
||||
// Called when no more path will be added to the container.
|
||||
virtual void Finalize() {}
|
||||
|
||||
// Returns the distance between node `from` and node `to` following the path
|
||||
// out of `from` and into `to`. Note that if `from` == `to`, the distance is
|
||||
// not necessarily 0 if the path out of `to` and back into `to` has a distance
|
||||
// greater than 0. If you do require the distance to be 0 in this case, add to
|
||||
// the graph an arc from `to` to itself with a length of 0.
|
||||
// If nodes are not connected, returns `kDisconnectedPathDistance`.
|
||||
virtual PathDistance GetDistance(NodeIndex from, NodeIndex to) const = 0;
|
||||
|
||||
// Returns the penultimate node on the path out of node `from` into node `to`
|
||||
// (the direct predecessor of node `to` on the path).
|
||||
// If `from` == `to`, the penultimate node is `to` only if the shortest path
|
||||
// from `to` to itself is composed of the arc (`to, `to`), which might not be
|
||||
// the case if either this arc doesn't exist or if the length of this arc is
|
||||
// greater than the distance of an alternate path.
|
||||
// If nodes are not connected, returns `kNilNode`.
|
||||
virtual NodeIndex GetPenultimateNodeInPath(NodeIndex from,
|
||||
NodeIndex to) const = 0;
|
||||
|
||||
// Returns path nodes from node `from` to node `to` in a ordered vector.
|
||||
virtual void GetPath(NodeIndex from, NodeIndex to,
|
||||
std::vector<NodeIndex>* path) const = 0;
|
||||
|
||||
// Adds a path tree rooted at node `from`, and to a set of implicit
|
||||
// destinations:
|
||||
// - `predecessor_in_path_tree[node]` is the predecessor of node `node` in the
|
||||
// path from `from` to `node`, or `kNilNode` if there is no
|
||||
// predecessor (i.e. if `node` is not in the path tree);
|
||||
// - `distance_to_destination[i]` is the distance from `from` to the i-th
|
||||
// destination (see `Initialize()`).
|
||||
virtual void StoreSingleSourcePaths(
|
||||
NodeIndex from, const std::vector<NodeIndex>& predecessor_in_path_tree,
|
||||
const std::vector<PathDistance>& distance_to_destination) = 0;
|
||||
};
|
||||
|
||||
// Class designed to store the tree of paths from a root node to a set of nodes
|
||||
// in a very compact way (over performance).
|
||||
// Memory consumption is in `O(n)` (`n` being the size of the tree) where node
|
||||
// indices are "very" non-contiguous (extremely sparse node indices). It keeps
|
||||
// node-sorted arrays of node and parent pairs, which can be accessed in
|
||||
// `O(log(n))` with a binary search.
|
||||
// The creation of the tree is done in `O(n*log(n))` time.
|
||||
// Note that this class uses temporary memory for each call to `Initialize`
|
||||
// which is only an issue for massive parallel calls; in practice for shortest
|
||||
// paths computation, the number of threads calling `Initialize` is very small
|
||||
// compared to the total number of trees created.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class PathTree {
|
||||
public:
|
||||
PathTree() : nodes_(), parents_() {}
|
||||
|
||||
void Initialize(absl::Span<const NodeIndex> paths,
|
||||
absl::Span<const NodeIndex> destinations);
|
||||
|
||||
// Returns the parent (predecessor) of `node` in the tree in
|
||||
// `O(log(path_tree_size))`, where `path_tree_size` is the size of `nodes_`.
|
||||
NodeIndex GetParent(NodeIndex node) const;
|
||||
|
||||
// Returns the path from node `from` to node `to` in the tree in
|
||||
// `O(log(path_tree_size) + path_size)`, where `path_tree_size` is the size of
|
||||
// `nodes_` and `path_size` is the size of the resulting path.
|
||||
void GetPath(NodeIndex from, NodeIndex to,
|
||||
std::vector<NodeIndex>* path) const;
|
||||
|
||||
private:
|
||||
std::vector<NodeIndex> nodes_;
|
||||
std::vector<int> parents_;
|
||||
};
|
||||
|
||||
// Initializes the tree from a non-sparse representation of the path tree
|
||||
// represented by `paths`. The tree is reduced to the subtree in which nodes in
|
||||
// `destinations` are the leafs.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
void PathTree<NodeIndex, kNilNode>::Initialize(
|
||||
absl::Span<const NodeIndex> paths,
|
||||
absl::Span<const NodeIndex> destinations) {
|
||||
std::vector<bool> node_explored(paths.size(), false);
|
||||
const int destination_size = destinations.size();
|
||||
typedef std::pair<NodeIndex, NodeIndex> NodeParent;
|
||||
std::vector<NodeParent> tree;
|
||||
for (int i = 0; i < destination_size; ++i) {
|
||||
NodeIndex destination = destinations[i];
|
||||
while (!node_explored[destination]) {
|
||||
node_explored[destination] = true;
|
||||
tree.push_back(std::make_pair(destination, paths[destination]));
|
||||
if (paths[destination] != kNilNode) {
|
||||
destination = paths[destination];
|
||||
}
|
||||
}
|
||||
}
|
||||
std::sort(tree.begin(), tree.end());
|
||||
const int num_nodes = tree.size();
|
||||
{
|
||||
absl::flat_hash_map<NodeIndex, int> node_indices;
|
||||
|
||||
for (int i = 0; i < num_nodes; ++i) {
|
||||
node_indices[tree[i].first] = i;
|
||||
}
|
||||
parents_.resize(num_nodes, -1);
|
||||
for (int i = 0; i < num_nodes; ++i) {
|
||||
parents_[i] =
|
||||
::gtl::FindWithDefault(node_indices, tree[i].second, kNilNode);
|
||||
}
|
||||
}
|
||||
nodes_.resize(num_nodes, kNilNode);
|
||||
for (int i = 0; i < num_nodes; ++i) {
|
||||
nodes_[i] = tree[i].first;
|
||||
}
|
||||
}
|
||||
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
NodeIndex PathTree<NodeIndex, kNilNode>::GetParent(NodeIndex node) const {
|
||||
const auto node_position = absl::c_lower_bound(nodes_, node);
|
||||
if (node_position != nodes_.end() && *node_position == node) {
|
||||
const int parent = parents_[node_position - nodes_.begin()];
|
||||
if (parent != kNilNode) {
|
||||
return nodes_[parent];
|
||||
}
|
||||
}
|
||||
return kNilNode;
|
||||
}
|
||||
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
void PathTree<NodeIndex, kNilNode>::GetPath(
|
||||
NodeIndex from, NodeIndex to, std::vector<NodeIndex>* path) const {
|
||||
DCHECK(path != nullptr);
|
||||
path->clear();
|
||||
const auto to_position = absl::c_lower_bound(nodes_, to);
|
||||
if (to_position != nodes_.end() && *to_position == to) {
|
||||
int current_index = to_position - nodes_.begin();
|
||||
NodeIndex current_node = to;
|
||||
while (current_node != from) {
|
||||
path->push_back(current_node);
|
||||
current_index = parents_[current_index];
|
||||
// `from` and `to` are not connected.
|
||||
if (current_index == kNilNode) {
|
||||
path->clear();
|
||||
return;
|
||||
}
|
||||
current_node = nodes_[current_index];
|
||||
}
|
||||
path->push_back(current_node);
|
||||
std::reverse(path->begin(), path->end());
|
||||
}
|
||||
}
|
||||
|
||||
// Path container which only stores distances between path nodes.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class DistanceContainer : public PathContainerImpl<NodeIndex, kNilNode> {
|
||||
public:
|
||||
DistanceContainer() : reverse_sources_(), distances_() {}
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
DistanceContainer(const DistanceContainer&) = delete;
|
||||
DistanceContainer& operator=(const DistanceContainer&) = delete;
|
||||
~DistanceContainer() override = default;
|
||||
void Initialize(const std::vector<NodeIndex>& sources,
|
||||
const std::vector<NodeIndex>& destinations,
|
||||
NodeIndex num_nodes) override {
|
||||
ComputeReverse(sources, num_nodes, &reverse_sources_);
|
||||
ComputeReverse(destinations, num_nodes, &reverse_destinations_);
|
||||
distances_.clear();
|
||||
distances_.resize(sources.size());
|
||||
}
|
||||
PathDistance GetDistance(NodeIndex from, NodeIndex to) const override {
|
||||
return distances_[reverse_sources_[from]][reverse_destinations_[to]];
|
||||
}
|
||||
NodeIndex GetPenultimateNodeInPath(NodeIndex, NodeIndex) const override {
|
||||
LOG(FATAL) << "Path not stored.";
|
||||
return kNilNode;
|
||||
}
|
||||
void GetPath(NodeIndex, NodeIndex, std::vector<NodeIndex>*) const override {
|
||||
LOG(FATAL) << "Path not stored.";
|
||||
}
|
||||
void StoreSingleSourcePaths(
|
||||
NodeIndex from,
|
||||
// `DistanceContainer` only stores distances and not predecessors.
|
||||
const std::vector<NodeIndex>&,
|
||||
const std::vector<PathDistance>& distance_to_destination) override {
|
||||
distances_[reverse_sources_[from]] = distance_to_destination;
|
||||
}
|
||||
|
||||
protected:
|
||||
std::vector<int> reverse_sources_;
|
||||
std::vector<int> reverse_destinations_;
|
||||
|
||||
private:
|
||||
static void ComputeReverse(absl::Span<const NodeIndex> nodes,
|
||||
NodeIndex num_nodes,
|
||||
std::vector<int>* reverse_nodes) {
|
||||
CHECK(reverse_nodes != nullptr);
|
||||
const int kUnassignedIndex = -1;
|
||||
reverse_nodes->clear();
|
||||
reverse_nodes->resize(num_nodes, kUnassignedIndex);
|
||||
for (int i = 0; i < nodes.size(); ++i) {
|
||||
reverse_nodes->at(nodes[i]) = i;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::vector<PathDistance>> distances_;
|
||||
};
|
||||
|
||||
// Path container which stores explicit paths and distances between path nodes.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class InMemoryCompactPathContainer
|
||||
: public DistanceContainer<NodeIndex, kNilNode> {
|
||||
public:
|
||||
using Base = DistanceContainer<NodeIndex, kNilNode>;
|
||||
|
||||
InMemoryCompactPathContainer() : trees_(), destinations_() {}
|
||||
|
||||
// This type is neither copyable nor movable.
|
||||
InMemoryCompactPathContainer(const InMemoryCompactPathContainer&) = delete;
|
||||
InMemoryCompactPathContainer& operator=(const InMemoryCompactPathContainer&) =
|
||||
delete;
|
||||
~InMemoryCompactPathContainer() override = default;
|
||||
void Initialize(const std::vector<NodeIndex>& sources,
|
||||
const std::vector<NodeIndex>& destinations,
|
||||
NodeIndex num_nodes) override {
|
||||
Base::Initialize(sources, destinations, num_nodes);
|
||||
destinations_ = destinations;
|
||||
trees_.clear();
|
||||
trees_.resize(sources.size());
|
||||
}
|
||||
NodeIndex GetPenultimateNodeInPath(NodeIndex from,
|
||||
NodeIndex to) const override {
|
||||
return trees_[Base::reverse_sources_[from]].GetParent(to);
|
||||
}
|
||||
void GetPath(NodeIndex from, NodeIndex to,
|
||||
std::vector<NodeIndex>* path) const override {
|
||||
DCHECK(path != nullptr);
|
||||
trees_[Base::reverse_sources_[from]].GetPath(from, to, path);
|
||||
}
|
||||
void StoreSingleSourcePaths(
|
||||
NodeIndex from, const std::vector<NodeIndex>& predecessor_in_path_tree,
|
||||
const std::vector<PathDistance>& distance_to_destination) override {
|
||||
Base::StoreSingleSourcePaths(from, predecessor_in_path_tree,
|
||||
distance_to_destination);
|
||||
trees_[Base::reverse_sources_[from]].Initialize(predecessor_in_path_tree,
|
||||
destinations_);
|
||||
}
|
||||
|
||||
private:
|
||||
std::vector<PathTree<NodeIndex, kNilNode>> trees_;
|
||||
std::vector<NodeIndex> destinations_;
|
||||
};
|
||||
|
||||
// Priority queue node entry in the boundary of the Dijkstra algorithm.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
class NodeEntry {
|
||||
public:
|
||||
NodeEntry()
|
||||
: heap_index_(-1),
|
||||
distance_(0),
|
||||
node_(kNilNode),
|
||||
settled_(false),
|
||||
is_destination_(false) {}
|
||||
bool operator<(const NodeEntry& other) const {
|
||||
return distance_ > other.distance_;
|
||||
}
|
||||
void SetHeapIndex(int h) {
|
||||
DCHECK_GE(h, 0);
|
||||
heap_index_ = h;
|
||||
}
|
||||
int GetHeapIndex() const { return heap_index_; }
|
||||
void set_distance(PathDistance distance) { distance_ = distance; }
|
||||
PathDistance distance() const { return distance_; }
|
||||
void set_node(NodeIndex node) { node_ = node; }
|
||||
NodeIndex node() const { return node_; }
|
||||
void set_settled(bool settled) { settled_ = settled; }
|
||||
bool settled() const { return settled_; }
|
||||
void set_is_destination(bool is_destination) {
|
||||
is_destination_ = is_destination;
|
||||
}
|
||||
bool is_destination() const { return is_destination_; }
|
||||
|
||||
private:
|
||||
int heap_index_;
|
||||
PathDistance distance_;
|
||||
NodeIndex node_;
|
||||
bool settled_;
|
||||
bool is_destination_;
|
||||
};
|
||||
|
||||
// Updates an entry with the given distance if it's shorter, and then inserts it
|
||||
// in the priority queue (or updates it if it's there already), if needed.
|
||||
// Returns true if the entry was modified, false otherwise.
|
||||
template <class NodeIndex, NodeIndex kNilNode>
|
||||
bool InsertOrUpdateEntry(
|
||||
PathDistance distance, NodeEntry<NodeIndex, kNilNode>* entry,
|
||||
AdjustablePriorityQueue<NodeEntry<NodeIndex, kNilNode>>* priority_queue) {
|
||||
// If one wants to use int64_t for either priority or NodeIndex, one should
|
||||
// consider using packed ints (putting the two bools with heap_index, for
|
||||
// example) in order to stay at 16 bytes instead of 24.
|
||||
static_assert(sizeof(NodeEntry<NodeIndex, kNilNode>) == 16,
|
||||
"node_entry_class_is_not_well_packed");
|
||||
|
||||
DCHECK(priority_queue != nullptr);
|
||||
DCHECK(entry != nullptr);
|
||||
if (!priority_queue->Contains(entry)) {
|
||||
entry->set_distance(distance);
|
||||
priority_queue->Add(entry);
|
||||
return true;
|
||||
} else if (distance < entry->distance()) {
|
||||
entry->set_distance(distance);
|
||||
priority_queue->NoteChangedPriority(entry);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Computes shortest paths from node `source` to nodes in `destinations`
|
||||
// using a binary heap-based Dijkstra algorithm.
|
||||
// TODO(user): Investigate alternate implementation which wouldn't use
|
||||
// AdjustablePriorityQueue.
|
||||
// TODO(b/385094969): Remove second template parameter when all clients are
|
||||
// migrated.
|
||||
template <class GraphType, class PathContainerGraphType>
|
||||
void ComputeOneToManyOnGraph(
|
||||
const GraphType* const graph,
|
||||
const std::vector<PathDistance>* const arc_lengths,
|
||||
typename GraphType::NodeIndex source,
|
||||
const std::vector<typename GraphType::NodeIndex>* const destinations,
|
||||
typename GenericPathContainer<PathContainerGraphType>::Impl* const paths) {
|
||||
using NodeIndex = typename GraphType::NodeIndex;
|
||||
using ArcIndex = typename GraphType::ArcIndex;
|
||||
using NodeEntryT = NodeEntry<NodeIndex, GraphType::kNilNode>;
|
||||
CHECK(graph != nullptr);
|
||||
CHECK(arc_lengths != nullptr);
|
||||
CHECK(destinations != nullptr);
|
||||
CHECK(paths != nullptr);
|
||||
const int num_nodes = graph->num_nodes();
|
||||
std::vector<NodeIndex> predecessor(num_nodes, GraphType::kNilNode);
|
||||
AdjustablePriorityQueue<NodeEntryT> priority_queue;
|
||||
std::vector<NodeEntryT> entries(num_nodes);
|
||||
for (const NodeIndex node : graph->AllNodes()) {
|
||||
entries[node].set_node(node);
|
||||
}
|
||||
// Marking destination node. This is an optimization stopping the search
|
||||
// when all destinations have been reached.
|
||||
for (int i = 0; i < destinations->size(); ++i) {
|
||||
entries[(*destinations)[i]].set_is_destination(true);
|
||||
}
|
||||
// In this implementation the distance of a node to itself isn't necessarily
|
||||
// 0.
|
||||
// So we push successors of source in the queue instead of the source
|
||||
// directly which will avoid marking the source.
|
||||
for (const ArcIndex arc : graph->OutgoingArcs(source)) {
|
||||
const NodeIndex next = graph->Head(arc);
|
||||
if (InsertOrUpdateEntry(arc_lengths->at(arc), &entries[next],
|
||||
&priority_queue)) {
|
||||
predecessor[next] = source;
|
||||
}
|
||||
}
|
||||
int destinations_remaining = destinations->size();
|
||||
while (!priority_queue.IsEmpty()) {
|
||||
NodeEntryT* current = priority_queue.Top();
|
||||
const NodeIndex current_node = current->node();
|
||||
priority_queue.Pop();
|
||||
current->set_settled(true);
|
||||
if (current->is_destination()) {
|
||||
destinations_remaining--;
|
||||
if (destinations_remaining == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const PathDistance current_distance = current->distance();
|
||||
for (const ArcIndex arc : graph->OutgoingArcs(current_node)) {
|
||||
const NodeIndex next = graph->Head(arc);
|
||||
NodeEntryT* const entry = &entries[next];
|
||||
if (!entry->settled()) {
|
||||
DCHECK_GE(current_distance, 0);
|
||||
const PathDistance arc_length = arc_lengths->at(arc);
|
||||
DCHECK_LE(current_distance, kDisconnectedPathDistance - arc_length);
|
||||
if (InsertOrUpdateEntry(current_distance + arc_length, entry,
|
||||
&priority_queue)) {
|
||||
predecessor[next] = current_node;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const int destinations_size = destinations->size();
|
||||
std::vector<PathDistance> distances(destinations_size,
|
||||
kDisconnectedPathDistance);
|
||||
for (int i = 0; i < destinations_size; ++i) {
|
||||
NodeIndex node = destinations->at(i);
|
||||
if (entries[node].settled()) {
|
||||
distances[i] = entries[node].distance();
|
||||
}
|
||||
}
|
||||
paths->StoreSingleSourcePaths(source, predecessor, distances);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
|
||||
template <class GraphType>
|
||||
GenericPathContainer<GraphType>::GenericPathContainer() = default;
|
||||
|
||||
template <class GraphType>
|
||||
GenericPathContainer<GraphType>::~GenericPathContainer() = default;
|
||||
|
||||
template <class GraphType>
|
||||
PathDistance GenericPathContainer<GraphType>::GetDistance(NodeIndex from,
|
||||
NodeIndex to) const {
|
||||
DCHECK(container_ != nullptr);
|
||||
return container_->GetDistance(from, to);
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
typename GenericPathContainer<GraphType>::NodeIndex
|
||||
GenericPathContainer<GraphType>::GetPenultimateNodeInPath(NodeIndex from,
|
||||
NodeIndex to) const {
|
||||
DCHECK(container_ != nullptr);
|
||||
return container_->GetPenultimateNodeInPath(from, to);
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void GenericPathContainer<GraphType>::GetPath(
|
||||
NodeIndex from, NodeIndex to, std::vector<NodeIndex>* path) const {
|
||||
DCHECK(container_ != nullptr);
|
||||
DCHECK(path != nullptr);
|
||||
container_->GetPath(from, to, path);
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void GenericPathContainer<GraphType>::BuildPathDistanceContainer(
|
||||
GenericPathContainer* const path_container) {
|
||||
CHECK(path_container != nullptr);
|
||||
path_container->container_ = std::make_unique<
|
||||
internal::DistanceContainer<NodeIndex, GraphType::kNilNode>>();
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void GenericPathContainer<GraphType>::BuildInMemoryCompactPathContainer(
|
||||
GenericPathContainer* const path_container) {
|
||||
CHECK(path_container != nullptr);
|
||||
path_container->container_ = std::make_unique<
|
||||
internal::InMemoryCompactPathContainer<NodeIndex, GraphType::kNilNode>>();
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
GenericPathContainer<GraphType>
|
||||
GenericPathContainer<GraphType>::BuildPathDistanceContainer() {
|
||||
return GenericPathContainer(
|
||||
std::make_unique<
|
||||
internal::DistanceContainer<NodeIndex, GraphType::kNilNode>>());
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
GenericPathContainer<GraphType>
|
||||
GenericPathContainer<GraphType>::BuildInMemoryCompactPathContainer() {
|
||||
return GenericPathContainer(
|
||||
std::make_unique<internal::InMemoryCompactPathContainer<
|
||||
NodeIndex, GraphType::kNilNode>>());
|
||||
}
|
||||
|
||||
// TODO(b/385094969): Remove second template parameter when all clients are
|
||||
// migrated.
|
||||
template <class GraphType, class PathContainerGraphType>
|
||||
void ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
const GraphType& graph, const std::vector<PathDistance>& arc_lengths,
|
||||
const std::vector<typename GraphType::NodeIndex>& sources,
|
||||
const std::vector<typename GraphType::NodeIndex>& destinations,
|
||||
int num_threads,
|
||||
GenericPathContainer<PathContainerGraphType>* const paths) {
|
||||
static_assert(std::is_same_v<typename GraphType::NodeIndex,
|
||||
typename PathContainerGraphType::NodeIndex>,
|
||||
"use an explicit `GenericPathContainer<T>` instead of using "
|
||||
"`PathContainer`");
|
||||
static_assert(GraphType::kNilNode == PathContainerGraphType::kNilNode,
|
||||
"use an explicit `GenericPathContainer<T>` instead of using "
|
||||
"`PathContainer`");
|
||||
if (graph.num_nodes() > 0) {
|
||||
CHECK_EQ(graph.num_arcs(), arc_lengths.size())
|
||||
<< "Number of arcs in graph must match arc length vector size";
|
||||
// Removing duplicate sources to allow mutex-free implementation (and it's
|
||||
// more efficient); same with destinations for efficiency reasons.
|
||||
std::vector<typename GraphType::NodeIndex> unique_sources = sources;
|
||||
::gtl::STLSortAndRemoveDuplicates(&unique_sources);
|
||||
std::vector<typename GraphType::NodeIndex> unique_destinations =
|
||||
destinations;
|
||||
::gtl::STLSortAndRemoveDuplicates(&unique_destinations);
|
||||
WallTimer timer;
|
||||
timer.Start();
|
||||
auto* const container = paths->GetImplementation();
|
||||
container->Initialize(unique_sources, unique_destinations,
|
||||
graph.num_nodes());
|
||||
{
|
||||
std::unique_ptr<ThreadPool> pool(new ThreadPool(num_threads));
|
||||
pool->StartWorkers();
|
||||
for (int i = 0; i < unique_sources.size(); ++i) {
|
||||
pool->Schedule(absl::bind_front(
|
||||
&internal::ComputeOneToManyOnGraph<GraphType,
|
||||
PathContainerGraphType>,
|
||||
&graph, &arc_lengths, unique_sources[i], &unique_destinations,
|
||||
container));
|
||||
}
|
||||
}
|
||||
container->Finalize();
|
||||
VLOG(2) << "Elapsed time to compute shortest paths: " << timer.Get() << "s";
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace operations_research
|
||||
|
||||
#endif // OR_TOOLS_GRAPH_SHORTEST_PATHS_H_
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <climits>
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
@@ -18,21 +19,20 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/random/distributions.h"
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "isp/fiber/auto_design/utils/parallelizer.h"
|
||||
#include "ortools/base/gmock.h"
|
||||
#include "ortools/base/logging.h"
|
||||
#include "ortools/base/threadlocal.h"
|
||||
#include "ortools/graph/bounded_dijkstra.h"
|
||||
#include "ortools/graph/graph.h"
|
||||
#include "ortools/graph/shortest_paths.h"
|
||||
#include "ortools/graph/test_util.h"
|
||||
|
||||
namespace operations_research {
|
||||
namespace {
|
||||
|
||||
using Graph = StaticGraph<>;
|
||||
using Graph = ::util::StaticGraph<>;
|
||||
|
||||
enum Implementation {
|
||||
BOUNDED_DIJKSTRA = 1,
|
||||
@@ -106,8 +106,8 @@ std::vector<std::vector<uint32_t>> ManyToManyShortestPaths<SHORTEST_PATHS>(
|
||||
const Graph& graph, const std::vector<uint32_t>& arc_costs,
|
||||
const std::vector<int>& srcs, const std::vector<int>& dsts,
|
||||
int num_threads) {
|
||||
PathContainer path_container;
|
||||
PathContainer::BuildPathDistanceContainer(&path_container);
|
||||
auto path_container =
|
||||
GenericPathContainer<Graph>::BuildPathDistanceContainer();
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, arc_costs, srcs, dsts, num_threads, &path_container);
|
||||
std::vector<std::vector<uint32_t>> distances(
|
||||
|
||||
@@ -22,16 +22,17 @@
|
||||
#include "absl/random/random.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "ortools/graph/ebert_graph.h"
|
||||
#include "ortools/graph/graph.h"
|
||||
#include "ortools/graph/strongly_connected_components.h"
|
||||
#include "ortools/util/zvector.h"
|
||||
|
||||
namespace operations_research {
|
||||
|
||||
void CheckPathDataPair(const PathContainer& container,
|
||||
const PathContainer& distance_container,
|
||||
PathDistance expected_distance,
|
||||
NodeIndex expected_predecessor, NodeIndex tail,
|
||||
NodeIndex head) {
|
||||
template <class GraphType>
|
||||
void CheckPathDataPair(
|
||||
const GenericPathContainer<GraphType>& container,
|
||||
const GenericPathContainer<GraphType>& distance_container,
|
||||
PathDistance expected_distance, NodeIndex expected_predecessor,
|
||||
NodeIndex tail, NodeIndex head) {
|
||||
EXPECT_EQ(expected_distance, container.GetDistance(tail, head));
|
||||
EXPECT_EQ(expected_distance, distance_container.GetDistance(tail, head));
|
||||
EXPECT_EQ(expected_predecessor,
|
||||
@@ -59,8 +60,9 @@ void CheckPathDataPair(const PathContainer& container,
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void CheckPathDataRow(const GraphType& graph, const PathContainer& container,
|
||||
const PathContainer& distance_container,
|
||||
void CheckPathDataRow(const GraphType& graph,
|
||||
const GenericPathContainer<GraphType>& container,
|
||||
const GenericPathContainer<GraphType>& distance_container,
|
||||
const NodeIndex expected_paths[],
|
||||
const PathDistance expected_distances[], NodeIndex tail) {
|
||||
int index = tail * graph.num_nodes();
|
||||
@@ -74,12 +76,11 @@ void CheckPathDataRow(const GraphType& graph, const PathContainer& container,
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void CheckPathDataRowFromGraph(const GraphType& graph,
|
||||
const PathContainer& container,
|
||||
const PathContainer& distance_container,
|
||||
const NodeIndex expected_paths[],
|
||||
const PathDistance expected_distances[],
|
||||
NodeIndex tail) {
|
||||
void CheckPathDataRowFromGraph(
|
||||
const GraphType& graph, const GenericPathContainer<GraphType>& container,
|
||||
const GenericPathContainer<GraphType>& distance_container,
|
||||
const NodeIndex expected_paths[], const PathDistance expected_distances[],
|
||||
NodeIndex tail) {
|
||||
int index = tail * graph.num_nodes();
|
||||
for (typename GraphType::NodeIndex head : graph.AllNodes()) {
|
||||
CheckPathDataPair(container, distance_container, expected_distances[index],
|
||||
@@ -89,8 +90,9 @@ void CheckPathDataRowFromGraph(const GraphType& graph,
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void CheckPathData(const GraphType& graph, const PathContainer& container,
|
||||
const PathContainer& distance_container,
|
||||
void CheckPathData(const GraphType& graph,
|
||||
const GenericPathContainer<GraphType>& container,
|
||||
const GenericPathContainer<GraphType>& distance_container,
|
||||
const NodeIndex expected_paths[],
|
||||
const PathDistance expected_distances[]) {
|
||||
for (typename GraphType::NodeIterator iterator(graph); iterator.Ok();
|
||||
@@ -102,22 +104,21 @@ void CheckPathData(const GraphType& graph, const PathContainer& container,
|
||||
}
|
||||
|
||||
template <class GraphType>
|
||||
void CheckPathDataFromGraph(const GraphType& graph,
|
||||
const PathContainer& container,
|
||||
const PathContainer& distance_container,
|
||||
const NodeIndex expected_paths[],
|
||||
const PathDistance expected_distances[]) {
|
||||
void CheckPathDataFromGraph(
|
||||
const GraphType& graph, const GenericPathContainer<GraphType>& container,
|
||||
const GenericPathContainer<GraphType>& distance_container,
|
||||
const NodeIndex expected_paths[], const PathDistance expected_distances[]) {
|
||||
for (typename GraphType::NodeIndex tail : graph.AllNodes()) {
|
||||
CheckPathDataRowFromGraph(graph, container, distance_container,
|
||||
expected_paths, expected_distances, tail);
|
||||
}
|
||||
}
|
||||
|
||||
#define BUILD_CONTAINERS() \
|
||||
PathContainer container; \
|
||||
PathContainer::BuildInMemoryCompactPathContainer(&container); \
|
||||
PathContainer distance_container; \
|
||||
PathContainer::BuildPathDistanceContainer(&distance_container)
|
||||
#define BUILD_CONTAINERS() \
|
||||
auto container = \
|
||||
GenericPathContainer<GraphType>::BuildInMemoryCompactPathContainer(); \
|
||||
auto distance_container = \
|
||||
GenericPathContainer<GraphType>::BuildPathDistanceContainer()
|
||||
|
||||
template <class GraphType>
|
||||
void TestShortestPathsFromGraph(const GraphType& graph,
|
||||
@@ -254,8 +255,9 @@ class GraphShortestPathsDeathTest : public testing::Test {};
|
||||
template <typename GraphType>
|
||||
class GraphShortestPathsTest : public testing::Test {};
|
||||
|
||||
typedef testing::Types<ListGraph<>, StaticGraph<>, ReverseArcListGraph<>,
|
||||
ReverseArcStaticGraph<>, ReverseArcMixedGraph<> >
|
||||
typedef testing::Types<
|
||||
::util::ListGraph<>, ::util::StaticGraph<>, ::util::ReverseArcListGraph<>,
|
||||
::util::ReverseArcStaticGraph<>, ::util::ReverseArcMixedGraph<>>
|
||||
GraphTypesForShortestPathsTesting;
|
||||
|
||||
TYPED_TEST_SUITE(GraphShortestPathsDeathTest,
|
||||
@@ -274,7 +276,7 @@ TYPED_TEST(GraphShortestPathsDeathTest, ShortestPathsEmptyGraph) {
|
||||
|
||||
// Test on a disconnected graph (set of nodes pointing to themselves).
|
||||
TYPED_TEST(GraphShortestPathsDeathTest, ShortestPathsAllDisconnected) {
|
||||
const typename TypeParam::NodeIndex kUnconnected = -1;
|
||||
const typename TypeParam::NodeIndex kUnconnected = TypeParam::kNilNode;
|
||||
const int kNodes = 3;
|
||||
const typename TypeParam::NodeIndex kArcs[][2] = {{0, 0}, {1, 1}, {2, 2}};
|
||||
const PathDistance kArcLengths[] = {0, 0, 0};
|
||||
@@ -349,8 +351,8 @@ TYPED_TEST(GraphShortestPathsDeathTest, MismatchedData) {
|
||||
graph.AddArc(0, 1);
|
||||
graph.AddArc(1, 0);
|
||||
std::vector<PathDistance> lengths = {0};
|
||||
PathContainer container;
|
||||
PathContainer::BuildInMemoryCompactPathContainer(&container);
|
||||
auto container =
|
||||
GenericPathContainer<TypeParam>::BuildInMemoryCompactPathContainer();
|
||||
EXPECT_DEATH(ComputeAllToAllShortestPathsWithMultipleThreads(graph, lengths,
|
||||
1, &container),
|
||||
"Number of arcs in graph must match arc length vector size");
|
||||
@@ -358,10 +360,13 @@ TYPED_TEST(GraphShortestPathsDeathTest, MismatchedData) {
|
||||
|
||||
// Test the case where some sources are not strongly connected to themselves.
|
||||
TYPED_TEST(GraphShortestPathsDeathTest, SourceNotConnectedToItself) {
|
||||
const typename TypeParam::NodeIndex kUnconnected = TypeParam::kNilNode;
|
||||
const int kNodes = 3;
|
||||
const typename TypeParam::NodeIndex kArcs[][2] = {{1, 2}, {2, 2}};
|
||||
const PathDistance kArcLengths[] = {1, 0};
|
||||
const int kExpectedPaths[] = {-1, -1, -1, -1, -1, 1, -1, -1, 2};
|
||||
const int kExpectedPaths[] = {kUnconnected, kUnconnected, kUnconnected,
|
||||
kUnconnected, kUnconnected, 1,
|
||||
kUnconnected, kUnconnected, 2};
|
||||
const PathDistance kExpectedDistances[] = {kDisconnectedPathDistance,
|
||||
kDisconnectedPathDistance,
|
||||
kDisconnectedPathDistance,
|
||||
@@ -414,10 +419,10 @@ TYPED_TEST(GraphShortestPathsTest, DISABLED_LargeRandomShortestPaths) {
|
||||
lengths.push_back(length);
|
||||
}
|
||||
}
|
||||
typename TypeParam::NodeIndex prev_index = -1;
|
||||
typename TypeParam::NodeIndex first_index = -1;
|
||||
typename TypeParam::NodeIndex prev_index = TypeParam::kNilNode;
|
||||
typename TypeParam::NodeIndex first_index = TypeParam::kNilNode;
|
||||
for (const typename TypeParam::NodeIndex node_index : graph.AllNodes()) {
|
||||
if (prev_index != -1) {
|
||||
if (prev_index != TypeParam::kNilNode) {
|
||||
graph.AddArc(prev_index, node_index);
|
||||
lengths.push_back(kConnectionArcLength);
|
||||
} else {
|
||||
@@ -430,7 +435,7 @@ TYPED_TEST(GraphShortestPathsTest, DISABLED_LargeRandomShortestPaths) {
|
||||
std::vector<typename TypeParam::ArcIndex> permutation;
|
||||
graph.Build(&permutation);
|
||||
util::Permute(permutation, &lengths);
|
||||
std::vector<std::vector<typename TypeParam::NodeIndex> > components;
|
||||
std::vector<std::vector<typename TypeParam::NodeIndex>> components;
|
||||
::FindStronglyConnectedComponents(graph.num_nodes(), graph, &components);
|
||||
CHECK_EQ(1, components.size());
|
||||
CHECK_EQ(kSize, components[0].size());
|
||||
@@ -441,18 +446,18 @@ TYPED_TEST(GraphShortestPathsTest, DISABLED_LargeRandomShortestPaths) {
|
||||
sources[i] = absl::Uniform(randomizer, 0, graph.num_nodes());
|
||||
}
|
||||
const int kThreads = 10;
|
||||
PathContainer container;
|
||||
PathContainer::BuildInMemoryCompactPathContainer(&container);
|
||||
auto container =
|
||||
GenericPathContainer<TypeParam>::BuildInMemoryCompactPathContainer();
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, lengths, sources, sources, kThreads, &container);
|
||||
PathContainer distance_container;
|
||||
PathContainer::BuildPathDistanceContainer(&distance_container);
|
||||
auto distance_container =
|
||||
GenericPathContainer<TypeParam>::BuildPathDistanceContainer();
|
||||
ComputeManyToManyShortestPathsWithMultipleThreads(
|
||||
graph, lengths, sources, sources, kThreads, &distance_container);
|
||||
for (int tail = 0; tail < sources.size(); ++tail) {
|
||||
for (int head = 0; head < sources.size(); ++head) {
|
||||
EXPECT_NE(
|
||||
-1, container.GetPenultimateNodeInPath(sources[tail], sources[head]));
|
||||
EXPECT_NE(TypeParam::kNilNode, container.GetPenultimateNodeInPath(
|
||||
sources[tail], sources[head]));
|
||||
EXPECT_NE(kDisconnectedPathDistance,
|
||||
container.GetDistance(sources[tail], sources[head]));
|
||||
EXPECT_NE(kDisconnectedPathDistance,
|
||||
|
||||
Reference in New Issue
Block a user