diff --git a/ortools/glop/parameters.proto b/ortools/glop/parameters.proto index 2344ec5f27..b72999aba4 100644 --- a/ortools/glop/parameters.proto +++ b/ortools/glop/parameters.proto @@ -25,7 +25,7 @@ option java_package = "com.google.ortools.glop"; option java_multiple_files = true; option csharp_namespace = "Google.OrTools.Glop"; -// next id = 69 +// next id = 70 message GlopParameters { // Supported algorithms for scaling: // EQUILIBRATION - progressive scaling by row and column norms until the @@ -480,4 +480,8 @@ message GlopParameters { // evaluating large constraint with variables at their bound shouldn't cause // any overflow. optional double max_valid_magnitude = 199 [default = 1e30]; + + // On some problem like stp3d or pds-100 this makes a huge difference in + // speed and number of iterations of the dual simplex. + optional bool dual_price_prioritize_norm = 69 [default = false]; } diff --git a/ortools/glop/revised_simplex.cc b/ortools/glop/revised_simplex.cc index 6d9d710734..329672bc9e 100644 --- a/ortools/glop/revised_simplex.cc +++ b/ortools/glop/revised_simplex.cc @@ -2150,7 +2150,8 @@ Status RevisedSimplex::DualChooseLeavingVariableRow(RowIndex* leaving_row, // This is not supposed to happen, but better be safe. if (dual_prices_.Size() == 0) { - variable_values_.RecomputeDualPrices(); + variable_values_.RecomputeDualPrices( + parameters_.dual_price_prioritize_norm()); } // Return right away if there is no leaving variable. @@ -2391,12 +2392,16 @@ void RevisedSimplex::MakeBoxedVariableDualFeasible( // errors. Otherwise, this leads to cycling on many of the Netlib problems // since this is called at each iteration (because of the bound-flipping ratio // test). + // + // TODO(user): During an iteration, we might want to switch with a lower + // tolerance bounds flip that were deemed good so that we can more easily make + // progess? + const Fractional threshold = reduced_costs_.GetDualFeasibilityTolerance(); + const DenseRow& variable_values = variable_values_.GetDenseRow(); const DenseRow& reduced_costs = reduced_costs_.GetReducedCosts(); const DenseRow& lower_bounds = variables_info_.GetVariableLowerBounds(); const DenseRow& upper_bounds = variables_info_.GetVariableUpperBounds(); - const Fractional dual_feasibility_tolerance = - reduced_costs_.GetDualFeasibilityTolerance(); const VariableStatusRow& variable_status = variables_info_.GetStatusRow(); for (const ColIndex col : cols) { const Fractional reduced_cost = reduced_costs[col]; @@ -2407,12 +2412,11 @@ void RevisedSimplex::MakeBoxedVariableDualFeasible( DCHECK(variable_values[col] == lower_bounds[col] || variable_values[col] == upper_bounds[col] || status == VariableStatus::BASIC); - if (reduced_cost > dual_feasibility_tolerance && - status == VariableStatus::AT_UPPER_BOUND) { + if (reduced_cost > threshold && status == VariableStatus::AT_UPPER_BOUND) { variables_info_.UpdateToNonBasicStatus(col, VariableStatus::AT_LOWER_BOUND); changed_cols.push_back(col); - } else if (reduced_cost < -dual_feasibility_tolerance && + } else if (reduced_cost < -threshold && status == VariableStatus::AT_LOWER_BOUND) { variables_info_.UpdateToNonBasicStatus(col, VariableStatus::AT_UPPER_BOUND); @@ -3020,6 +3024,7 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, [this, time_limit]() { AdvanceDeterministicTime(time_limit); }); num_consecutive_degenerate_iterations_ = 0; bool refactorize = false; + RefactorizationReason reason = RefactorizationReason::DEFAULT; bound_flip_candidates_.clear(); @@ -3038,9 +3043,17 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, ScopedTimeDistributionUpdater timer(&iteration_stats_.total)); // Trigger a refactorization if one of the class we use request it. + // + // TODO(user): Estimate when variable values are imprecise and refactor too. const bool old_refactorize_value = refactorize; - if (reduced_costs_.NeedsBasisRefactorization()) refactorize = true; - if (dual_edge_norms_.NeedsBasisRefactorization()) refactorize = true; + if (reduced_costs_.NeedsBasisRefactorization()) { + if (!refactorize) reason = RefactorizationReason::RC; + refactorize = true; + } + if (dual_edge_norms_.NeedsBasisRefactorization()) { + if (!refactorize) reason = RefactorizationReason::NORM; + refactorize = true; + } GLOP_RETURN_IF_ERROR(RefactorizeBasisIfNeeded(&refactorize)); // If the basis is refactorized, we recompute all the values in order to @@ -3076,7 +3089,8 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, variables_info_.GetNonBasicBoxedVariables(), /*update_basic_values=*/false); variable_values_.RecomputeBasicVariableValues(); - variable_values_.RecomputeDualPrices(); + variable_values_.RecomputeDualPrices( + parameters_.dual_price_prioritize_norm()); // Computing the objective at each iteration takes time, so we just // check the limit when the basis is refactorized. @@ -3098,7 +3112,8 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, } } - DisplayIterationInfo(/*primal=*/false); + DisplayIterationInfo(/*primal=*/false, reason); + reason = RefactorizationReason::DEFAULT; } else { // Updates from the previous iteration that can be skipped if we // recompute everything (see other case above). @@ -3131,6 +3146,7 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, reduced_costs_.ClearAndRemoveCostShifts(); IF_STATS_ENABLED(timer.AlsoUpdate(&iteration_stats_.refactorize)); refactorize = true; + reason = RefactorizationReason::FINAL_CHECK; continue; } if (feasibility_phase) { @@ -3157,10 +3173,15 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, // We rechoose a potentially different leaving row. Note that if we choose // the same, we shouldn't go back here since the norm will now pass the // test. - const Fractional price = dual_pricing_vector_[leaving_row]; - const DenseColumn& squared_norms = dual_edge_norms_.GetEdgeSquaredNorms(); - dual_prices_.AddOrUpdate(leaving_row, - Square(price) / squared_norms[leaving_row]); + if (feasibility_phase) { + const Fractional price = dual_pricing_vector_[leaving_row]; + const DenseColumn& squared_norms = + dual_edge_norms_.GetEdgeSquaredNorms(); + dual_prices_.AddOrUpdate(leaving_row, + Square(price) / squared_norms[leaving_row]); + } else { + variable_values_.UpdateDualPrices({leaving_row}); + } continue; } update_row_.ComputeUpdateRow(leaving_row); @@ -3181,6 +3202,7 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, VLOG(1) << "No entering column. Double checking..."; IF_STATS_ENABLED(timer.AlsoUpdate(&iteration_stats_.refactorize)); refactorize = true; + reason = RefactorizationReason::FINAL_CHECK; continue; } DCHECK(basis_factorization_.IsRefactorized()); @@ -3212,6 +3234,7 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, VLOG(1) << "Trying not to pivot by " << entering_coeff; IF_STATS_ENABLED(timer.AlsoUpdate(&iteration_stats_.refactorize)); refactorize = true; + reason = RefactorizationReason::SMALL_PIVOT; continue; } @@ -3230,6 +3253,7 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, << direction_infinity_norm_; IF_STATS_ENABLED(timer.AlsoUpdate(&iteration_stats_.refactorize)); refactorize = true; + reason = RefactorizationReason::SMALL_PIVOT; continue; } } @@ -3300,12 +3324,6 @@ Status RevisedSimplex::DualMinimize(bool feasibility_phase, // correct non-basic variable value are needed at the end. variable_values_.SetNonBasicVariableValueFromStatus(leaving_col); - // This is slow, but otherwise we have a really bad precision on the - // variable values ... - if (std::abs(primal_step) * parameters_.primal_feasibility_tolerance() > - 1.0) { - refactorize = true; - } ++num_iterations_; } return Status::OK(); @@ -3547,10 +3565,39 @@ void RevisedSimplex::PropagateParameters() { update_row_.SetParameters(parameters_); } -void RevisedSimplex::DisplayIterationInfo(bool primal) { +void RevisedSimplex::DisplayIterationInfo(bool primal, + RefactorizationReason reason) { if (!logger_->LoggingIsEnabled()) return; const std::string first_word = primal ? "Primal " : "Dual "; + // We display the info on each re-factorization, and it is nice to show what + // trigerred the issue. Note that we don't display normal refactorization when + // we decide that it is worth it for the solve time or we reach the fixed + // refactorization period. + std::string info; + if (reason != RefactorizationReason::DEFAULT) { + switch (reason) { + case RefactorizationReason::DEFAULT: + info = " [default]"; + break; + case RefactorizationReason::SMALL_PIVOT: + info = " [small pivot]"; + break; + case RefactorizationReason::NORM: + info = " [norm]"; + break; + case RefactorizationReason::RC: + info = " [rc]"; + break; + case RefactorizationReason::VAR_VALUES: + info = " [var values]"; + break; + case RefactorizationReason::FINAL_CHECK: + info = " [check]"; + break; + } + } + switch (phase_) { case Phase::FEASIBILITY: { const int64_t iter = num_iterations_; @@ -3572,7 +3619,7 @@ void RevisedSimplex::DisplayIterationInfo(bool primal) { } SOLVER_LOG(logger_, first_word, "feasibility phase, iteration # ", iter, - ", ", name, " = ", absl::StrFormat("%.15E", objective)); + ", ", name, " = ", absl::StrFormat("%.15E", objective), info); break; } case Phase::OPTIMIZATION: { @@ -3584,7 +3631,7 @@ void RevisedSimplex::DisplayIterationInfo(bool primal) { // are the same. const Fractional objective = ComputeInitialProblemObjectiveValue(); SOLVER_LOG(logger_, first_word, "optimization phase, iteration # ", iter, - ", objective = ", absl::StrFormat("%.15E", objective)); + ", objective = ", absl::StrFormat("%.15E", objective), info); break; } case Phase::PUSH: { @@ -3592,7 +3639,7 @@ void RevisedSimplex::DisplayIterationInfo(bool primal) { num_optimization_iterations_; SOLVER_LOG(logger_, first_word, "push phase, iteration # ", iter, ", remaining_variables_to_push = ", - ComputeNumberOfSuperBasicVariables()); + ComputeNumberOfSuperBasicVariables(), info); } } } diff --git a/ortools/glop/revised_simplex.h b/ortools/glop/revised_simplex.h index ff84adffe5..46706e206b 100644 --- a/ortools/glop/revised_simplex.h +++ b/ortools/glop/revised_simplex.h @@ -278,6 +278,15 @@ class RevisedSimplex { enum class Phase { FEASIBILITY, OPTIMIZATION, PUSH }; + enum class RefactorizationReason { + DEFAULT, + SMALL_PIVOT, + NORM, + RC, + VAR_VALUES, + FINAL_CHECK + }; + // Propagates parameters_ to all the other classes that need it. // // TODO(user): Maybe a better design is for them to have a reference to a @@ -304,7 +313,8 @@ class RevisedSimplex { std::string SimpleVariableInfo(ColIndex col) const; // Displays a short string with the current iteration and objective value. - void DisplayIterationInfo(bool primal); + void DisplayIterationInfo(bool primal, RefactorizationReason reason = + RefactorizationReason::DEFAULT); // Displays the error bounds of the current solution. void DisplayErrors(); diff --git a/ortools/glop/variable_values.cc b/ortools/glop/variable_values.cc index cc8638f880..83d3ed1563 100644 --- a/ortools/glop/variable_values.cc +++ b/ortools/glop/variable_values.cc @@ -225,45 +225,72 @@ void VariableValues::UpdateGivenNonBasicVariables( initially_all_zero_scratchpad_.non_zeros.clear(); } -void VariableValues::RecomputeDualPrices() { +void VariableValues::RecomputeDualPrices(bool put_more_importance_on_norm) { SCOPED_TIME_STAT(&stats_); const RowIndex num_rows = matrix_.num_rows(); dual_prices_->ClearAndResize(num_rows); dual_prices_->StartDenseUpdates(); + put_more_importance_on_norm_ = put_more_importance_on_norm; const Fractional tolerance = parameters_.primal_feasibility_tolerance(); const DenseColumn& squared_norms = dual_edge_norms_->GetEdgeSquaredNorms(); - for (RowIndex row(0); row < num_rows; ++row) { - const ColIndex col = basis_[row]; - const Fractional infeasibility = std::max(GetUpperBoundInfeasibility(col), - GetLowerBoundInfeasibility(col)); - if (infeasibility > tolerance) { - dual_prices_->DenseAddOrUpdate( - row, Square(infeasibility) / squared_norms[row]); + if (put_more_importance_on_norm) { + for (RowIndex row(0); row < num_rows; ++row) { + const ColIndex col = basis_[row]; + const Fractional infeasibility = std::max( + GetUpperBoundInfeasibility(col), GetLowerBoundInfeasibility(col)); + if (infeasibility > tolerance) { + dual_prices_->DenseAddOrUpdate( + row, std::abs(infeasibility) / squared_norms[row]); + } + } + } else { + for (RowIndex row(0); row < num_rows; ++row) { + const ColIndex col = basis_[row]; + const Fractional infeasibility = std::max( + GetUpperBoundInfeasibility(col), GetLowerBoundInfeasibility(col)); + if (infeasibility > tolerance) { + dual_prices_->DenseAddOrUpdate( + row, Square(infeasibility) / squared_norms[row]); + } } } } -void VariableValues::UpdateDualPrices(const std::vector& rows) { +void VariableValues::UpdateDualPrices(absl::Span rows) { if (dual_prices_->Size() != matrix_.num_rows()) { - RecomputeDualPrices(); + RecomputeDualPrices(put_more_importance_on_norm_); return; } - // Note(user): this is the same as the code in - // RecomputePrimalInfeasibilityInformation(), but we do need the clear part. + // Note(user): this is the same as the code in RecomputeDualPrices(), but we + // do need the clear part. SCOPED_TIME_STAT(&stats_); const Fractional tolerance = parameters_.primal_feasibility_tolerance(); const DenseColumn& squared_norms = dual_edge_norms_->GetEdgeSquaredNorms(); - for (const RowIndex row : rows) { - const ColIndex col = basis_[row]; - const Fractional infeasibility = std::max(GetUpperBoundInfeasibility(col), - GetLowerBoundInfeasibility(col)); - if (infeasibility > tolerance) { - dual_prices_->AddOrUpdate(row, - Square(infeasibility) / squared_norms[row]); - } else { - dual_prices_->Remove(row); + if (put_more_importance_on_norm_) { + for (const RowIndex row : rows) { + const ColIndex col = basis_[row]; + const Fractional infeasibility = std::max( + GetUpperBoundInfeasibility(col), GetLowerBoundInfeasibility(col)); + if (infeasibility > tolerance) { + dual_prices_->AddOrUpdate(row, + std::abs(infeasibility) / squared_norms[row]); + } else { + dual_prices_->Remove(row); + } + } + } else { + for (const RowIndex row : rows) { + const ColIndex col = basis_[row]; + const Fractional infeasibility = std::max( + GetUpperBoundInfeasibility(col), GetLowerBoundInfeasibility(col)); + if (infeasibility > tolerance) { + dual_prices_->AddOrUpdate(row, + Square(infeasibility) / squared_norms[row]); + } else { + dual_prices_->Remove(row); + } } } } diff --git a/ortools/glop/variable_values.h b/ortools/glop/variable_values.h index ce6f36ef22..154355a481 100644 --- a/ortools/glop/variable_values.h +++ b/ortools/glop/variable_values.h @@ -108,14 +108,21 @@ class VariableValues { // Functions dealing with the primal-infeasible basic variables. A basic // variable is primal-infeasible if its infeasibility is stricly greater than - // the primal feasibility tolerance. These are exactly the dual "prices" and - // are just used during the dual simplex. + // the primal feasibility tolerance. These are exactly the dual "prices" once + // recalled by the norms. This is only used during the dual simplex. // // This information is only available after a call to RecomputeDualPrices() // and has to be kept in sync by calling UpdateDualPrices() for the rows that // changed values. - void RecomputeDualPrices(); - void UpdateDualPrices(const std::vector& row); + // + // TODO(user): On some problem like stp3d.mps or pds-100.mps, using different + // price like abs(infeasibility) / squared_norms give better result. Some + // solver switch according to a criteria like all entry are +1/-1, the column + // have no more than 24 non-zero and the average column size is no more than + // 6! Understand and implement some variant of this? I think the gain is + // mainly because of using sparser vectors? + void RecomputeDualPrices(bool put_more_importance_on_norm = false); + void UpdateDualPrices(absl::Span row); // The primal phase I objective is related to the primal infeasible // information above. The cost of a basic column will be 1 if the variable is @@ -152,6 +159,10 @@ class VariableValues { const VariablesInfo& variables_info_; const BasisFactorization& basis_factorization_; + // This is set by RecomputeDualPrices() so that UpdateDualPrices() use + // the same formula. + bool put_more_importance_on_norm_ = false; + // The dual prices are a normalized version of the primal infeasibility. DualEdgeNorms* dual_edge_norms_; DynamicMaximum* dual_prices_;