Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
bop_base.h
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14#ifndef OR_TOOLS_BOP_BOP_BASE_H_
15#define OR_TOOLS_BOP_BOP_BASE_H_
16
17#include <cstdint>
18#include <limits>
19#include <ostream>
20#include <string>
21#include <vector>
22
23#include "absl/base/thread_annotations.h"
24#include "absl/strings/string_view.h"
25#include "absl/synchronization/mutex.h"
26#include "ortools/base/macros.h"
28#include "ortools/bop/bop_parameters.pb.h"
32#include "ortools/sat/boolean_problem.pb.h"
33#include "ortools/sat/clause.h"
35#include "ortools/util/stats.h"
37
38namespace operations_research {
39namespace bop {
40
41class ProblemState;
42// Forward declaration.
43struct LearnedInfo;
44
45// Base class used to optimize a ProblemState.
46// Optimizers implementing this class are used in a sort of portfolio and
47// are run sequentially or concurrently. See for instance BopRandomLNSOptimizer.
49 public:
50 explicit BopOptimizerBase(absl::string_view name);
51 virtual ~BopOptimizerBase();
52
53 // Returns the name given at construction.
54 const std::string& name() const { return name_; }
55
56 // Returns true if this optimizer should be run on the given problem state.
57 // Some optimizer requires a feasible solution to run for instance.
58 //
59 // Note that a similar effect can be achieved if Optimize() returns ABORT
60 // right away. However, doing the later will lower the chance of this
61 // optimizer to be called again since it will count as a failure to improve
62 // the current state.
63 virtual bool ShouldBeRun(const ProblemState& problem_state) const = 0;
64
65 // Return status of the Optimize() function below.
66 //
67 // TODO(user): To redesign, some are not needed anymore thanks to the
68 // problem state, e.g. IsOptimal().
69 enum Status {
74
75 // Some information was learned and the problem state will need to be
76 // updated. This will trigger a new optimization round.
77 //
78 // TODO(user): replace by learned_info->IsEmpty()? but we will need to clear
79 // the BopSolution there first.
81
82 // This optimizer didn't learn any information yet but can be called again
83 // on the same problem state to resume its work.
85
86 // There is no need to call this optimizer again on the same problem state.
87 ABORT
88 };
89
90 // Tries to infer more information about the problem state, i.e. reduces the
91 // gap by increasing the lower bound or finding a better solution.
92 // Returns SOLUTION_FOUND when a new solution with a better objective cost is
93 // found before a time limit.
94 // The learned information is cleared and the filled with any new information
95 // about the problem, e.g. a new lower bound.
96 //
97 // Preconditions: ShouldBeRun() must returns true.
98 virtual Status Optimize(const BopParameters& parameters,
99 const ProblemState& problem_state,
100 LearnedInfo* learned_info, TimeLimit* time_limit) = 0;
101
102 // Returns a string describing the status.
103 static std::string GetStatusString(Status status);
104
105 protected:
106 const std::string name_;
107
109};
110
111inline std::ostream& operator<<(std::ostream& os,
114 return os;
115}
116
117// This class represents the current state of the problem with all the
118// information that the solver learned about it at a given time.
120 public:
121 explicit ProblemState(const sat::LinearBooleanProblem& problem);
122
123 // This type is neither copyable nor movable.
124 ProblemState(const ProblemState&) = delete;
126
127 // Sets parameters, used for instance to get the tolerance, the gap limit...
128 void SetParameters(const BopParameters& parameters) {
129 parameters_ = parameters;
130 }
131
132 const BopParameters& GetParameters() const { return parameters_; }
133
134 // Sets an assignment preference for each variable.
135 // This is only used for warm start.
136 void set_assignment_preference(const std::vector<bool>& a) {
137 assignment_preference_ = a;
138 }
139 std::vector<bool> assignment_preference() const {
140 return assignment_preference_;
141 }
142
143 // Merges the learned information with the current problem state. For
144 // instance, if variables x, and y are fixed in the current state, and z is
145 // learned to be fixed, the result of the merge will be x, y, and z being
146 // fixed in the problem state.
147 // Note that the LP values contained in the learned information (if any)
148 // will replace the LP values of the problem state, whatever the cost is.
149 // Returns true when the merge has changed the problem state.
150 bool MergeLearnedInfo(const LearnedInfo& learned_info,
151 BopOptimizerBase::Status optimization_status);
152
153 // Returns all the information learned so far.
154 // TODO(user): In the current implementation the learned information only
155 // contains binary clauses added since the last call to
156 // SynchronizationDone().
157 // Add an iterator on the sat::BinaryClauseManager.
159
160 // The stamp represents an upper bound on the number of times the problem
161 // state has been updated. If the stamp changed since last time one has
162 // checked the state, it's worth trying again as it might have changed
163 // (no guarantee).
164 static const int64_t kInitialStampValue;
165 int64_t update_stamp() const { return update_stamp_; }
166
167 // Marks the problem state as optimal.
168 void MarkAsOptimal();
169
170 // Marks the problem state as infeasible.
171 void MarkAsInfeasible();
172
173 // Returns true when the current state is proved to be optimal. In such a case
174 // solution() returns the optimal solution.
175 bool IsOptimal() const {
176 return solution_.IsFeasible() && solution_.GetCost() == lower_bound();
177 }
178
179 // Returns true when the problem is proved to be infeasible.
180 bool IsInfeasible() const { return lower_bound() > upper_bound(); }
181
182 // Returns true when the variable var is fixed in the current problem state.
183 // The value of the fixed variable is returned by GetVariableFixedValue(var).
184 bool IsVariableFixed(VariableIndex var) const { return is_fixed_[var]; }
186 return is_fixed_;
187 }
188
189 // Returns the value of the fixed variable var. Should be only called on fixed
190 // variables (CHECKed).
191 bool GetVariableFixedValue(VariableIndex var) const {
192 return fixed_values_[var];
193 }
195 return fixed_values_;
196 }
197
198 // Returns the values of the LP relaxation of the problem. Returns an empty
199 // vector when the LP has not been populated.
200 const glop::DenseRow& lp_values() const { return lp_values_; }
201
202 // Returns the solution to the current state problem.
203 // Note that the solution might not be feasible because until we find one, it
204 // will just be the all-false assignment.
205 const BopSolution& solution() const { return solution_; }
206
207 // Returns the original problem. Note that the current problem might be
208 // different, e.g. fixed variables, but equivalent, i.e. a solution to one
209 // should be a solution to the other too.
210 const sat::LinearBooleanProblem& original_problem() const {
211 return original_problem_;
212 }
213
214 // Returns the current lower (resp. upper) bound of the objective cost.
215 // For internal use only: this is the unscaled version of the lower (resp.
216 // upper) bound, and so should be compared only to the unscaled cost given by
217 // solution.GetCost().
218 int64_t lower_bound() const { return lower_bound_; }
219 int64_t upper_bound() const { return upper_bound_; }
220
221 // Returns the scaled lower bound of the original problem.
222 double GetScaledLowerBound() const {
223 return (lower_bound() + original_problem_.objective().offset()) *
224 original_problem_.objective().scaling_factor();
225 }
226
227 // Returns the newly added binary clause since the last SynchronizationDone().
228 const std::vector<sat::BinaryClause>& NewlyAddedBinaryClauses() const;
229
230 // Resets what is considered "new" information. This is meant to be called
231 // once all the optimize have been synchronized.
232 void SynchronizationDone();
233
234 private:
235 const sat::LinearBooleanProblem& original_problem_;
236 BopParameters parameters_;
237 int64_t update_stamp_;
240 glop::DenseRow lp_values_;
241 BopSolution solution_;
242 std::vector<bool> assignment_preference_;
243
244 int64_t lower_bound_;
245 int64_t upper_bound_;
246
247 // Manage the set of the problem binary clauses (including the learned ones).
248 sat::BinaryClauseManager binary_clause_manager_;
249};
250
251// This struct represents what has been learned on the problem state by
252// running an optimizer. The goal is then to merge the learned information
253// with the problem state in order to get a more constrained problem to be used
254// by the next called optimizer.
256 explicit LearnedInfo(const sat::LinearBooleanProblem& problem)
257 : fixed_literals(),
258 solution(problem, "AllZero"),
259 lower_bound(std::numeric_limits<int64_t>::min()),
260 lp_values(),
261 binary_clauses() {}
262
263 // Clears all just as if the object were a brand new one. This can be used
264 // to reduce the number of creation / deletion of objects.
265 void Clear() {
266 fixed_literals.clear();
267 lower_bound = std::numeric_limits<int64_t>::min();
268 lp_values.clear();
269 binary_clauses.clear();
270 }
271
272 // Vector of all literals that have been fixed.
273 std::vector<sat::Literal> fixed_literals;
274
275 // New solution. Note that the solution might be infeasible.
277
278 // A lower bound (for multi-threading purpose).
279 int64_t lower_bound;
280
281 // An assignment for the relaxed linear programming problem (can be empty).
282 // This is meant to be the optimal LP solution, but can just be a feasible
283 // solution or any floating point assignment if the LP solver didn't solve
284 // the relaxed problem optimally.
286
287 // New binary clauses.
288 std::vector<sat::BinaryClause> binary_clauses;
289};
290} // namespace bop
291} // namespace operations_research
292#endif // OR_TOOLS_BOP_BOP_BASE_H_
int64_t min
Base class to print a nice summary of a group of statistics.
Definition stats.h:128
BopOptimizerBase(absl::string_view name)
Definition bop_base.cc:44
virtual Status Optimize(const BopParameters &parameters, const ProblemState &problem_state, LearnedInfo *learned_info, TimeLimit *time_limit)=0
@ ABORT
There is no need to call this optimizer again on the same problem state.
Definition bop_base.h:87
virtual bool ShouldBeRun(const ProblemState &problem_state) const =0
const std::string & name() const
Returns the name given at construction.
Definition bop_base.h:54
static std::string GetStatusString(Status status)
Returns a string describing the status.
Definition bop_base.cc:53
bool IsVariableFixed(VariableIndex var) const
Definition bop_base.h:184
void set_assignment_preference(const std::vector< bool > &a)
Definition bop_base.h:136
void MarkAsInfeasible()
Marks the problem state as infeasible.
Definition bop_base.cc:253
const BopSolution & solution() const
Definition bop_base.h:205
const util_intops::StrongVector< VariableIndex, bool > & fixed_values() const
Definition bop_base.h:194
void SetParameters(const BopParameters &parameters)
Sets parameters, used for instance to get the tolerance, the gap limit...
Definition bop_base.h:128
const std::vector< sat::BinaryClause > & NewlyAddedBinaryClauses() const
Returns the newly added binary clause since the last SynchronizationDone().
Definition bop_base.cc:265
std::vector< bool > assignment_preference() const
Definition bop_base.h:139
bool MergeLearnedInfo(const LearnedInfo &learned_info, BopOptimizerBase::Status optimization_status)
Definition bop_base.cc:108
ProblemState(const ProblemState &)=delete
This type is neither copyable nor movable.
const glop::DenseRow & lp_values() const
Definition bop_base.h:200
void MarkAsOptimal()
Marks the problem state as optimal.
Definition bop_base.cc:247
ProblemState(const sat::LinearBooleanProblem &problem)
ProblemState & operator=(const ProblemState &)=delete
const BopParameters & GetParameters() const
Definition bop_base.h:132
static const int64_t kInitialStampValue
Definition bop_base.h:164
double GetScaledLowerBound() const
Returns the scaled lower bound of the original problem.
Definition bop_base.h:222
const util_intops::StrongVector< VariableIndex, bool > & is_fixed() const
Definition bop_base.h:185
bool IsInfeasible() const
Returns true when the problem is proved to be infeasible.
Definition bop_base.h:180
const sat::LinearBooleanProblem & original_problem() const
Definition bop_base.h:210
bool GetVariableFixedValue(VariableIndex var) const
Definition bop_base.h:191
A simple class to manage a set of binary clauses.
Definition clause.h:404
int64_t a
Definition table.cc:44
SatParameters parameters
IntVar * var
absl::Status status
Definition g_gurobi.cc:44
time_limit
Definition solve.cc:22
std::ostream & operator<<(std::ostream &os, BopOptimizerBase::Status status)
Definition bop_base.h:111
In SWIG mode, we don't want anything besides these top-level includes.
STL namespace.
int64_t lower_bound
A lower bound (for multi-threading purpose).
Definition bop_base.h:279
BopSolution solution
New solution. Note that the solution might be infeasible.
Definition bop_base.h:276
LearnedInfo(const sat::LinearBooleanProblem &problem)
Definition bop_base.h:256
std::vector< sat::BinaryClause > binary_clauses
New binary clauses.
Definition bop_base.h:288
std::vector< sat::Literal > fixed_literals
Vector of all literals that have been fixed.
Definition bop_base.h:273