Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
encoding.h
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14// Algorithms to encode constraints into their SAT representation. Currently,
15// this contains one possible encoding of a cardinality constraint as used by
16// the core-based optimization algorithm in optimization.h.
17//
18// This is also known as the incremental totalizer encoding in the literature.
19
20#ifndef OR_TOOLS_SAT_ENCODING_H_
21#define OR_TOOLS_SAT_ENCODING_H_
22
23#include <cstdint>
24#include <deque>
25#include <functional>
26#include <string>
27#include <utility>
28#include <vector>
29
30#include "absl/log/check.h"
31#include "absl/types/span.h"
33#include "ortools/base/macros.h"
34#include "ortools/base/types.h"
35#include "ortools/sat/boolean_problem.pb.h"
40
41namespace operations_research {
42namespace sat {
43
44// This class represents a number in [0, ub]. The encoding uses ub binary
45// variables x_i with i in [0, ub) where x_i means that the number is > i. It is
46// called an EncodingNode, because it represents one node of the tree used to
47// encode a cardinality constraint.
48//
49// In practice, not all literals are explicitly created:
50// - Only the literals in [lb, current_ub) are "active" at a given time.
51// - The represented number is known to be >= lb.
52// - It may be greater than current_ub, but the extra literals will be only
53// created lazily. In all our solves, the literal current_ub - 1, will always
54// be assumed to false (i.e. the number will be <= current_ub - 1).
55// - Note that lb may increase and ub decrease as more information is learned
56// about this node by the sat solver.
57//
58// This is roughly based on the cardinality constraint encoding described in:
59// Bailleux and Yacine Boufkhad, "Efficient CNF Encoding of Boolean Cardinality
60// Constraints", In Proc. of CP 2003, pages 108-122, 2003.
62 public:
63 EncodingNode() = default;
64
65 // Static creation functions.
66 //
67 // The generic version constructs a node with value in [lb, ub].
68 // New literal "<=x" will be constructed using create_lit(x).
69 static EncodingNode ConstantNode(Coefficient weight);
70 static EncodingNode LiteralNode(Literal l, Coefficient weight);
71 static EncodingNode GenericNode(int lb, int ub,
72 std::function<Literal(int x)> create_lit,
73 Coefficient weight);
74
75 // Creates a "full" encoding node on n new variables, the represented number
76 // beeing in [lb, ub = lb + n). The variables are added to the given solver
77 // with the basic implications linking them:
78 // literal(0) >= ... >= literal(n-1)
80 SatSolver* solver);
81
82 // Creates a "lazy" encoding node representing the sum of a and b.
83 // Only one literals will be created by this operation. Note that no clauses
84 // linking it with a or b are added by this function.
88
89 // If we know that all the literals[0] of the given nodes are in "at most one"
90 // relationship, we can create a node that is the sum of them with a simple
91 // encoding. This does create linking implications.
92 void InitializeAmoNode(absl::Span<EncodingNode* const> nodes,
93 SatSolver* solver);
94
95 // Returns a literal with the meaning 'this node number is > i'.
96 // The given i must be in [lb_, current_ub).
97 Literal GreaterThan(int i) const { return literal(i - lb_); }
98
99 // Accessors to size() and literals in [lb, current_ub).
100 int size() const { return literals_.size(); }
101 Literal literal(int i) const {
102 CHECK_GE(i, 0);
103 CHECK_LT(i, literals_.size());
104 return literals_[i];
105 }
106
107 // Sort by decreasing depth first and then by increasing variable index.
108 // This is meant to be used by the priority queue in MergeAllNodesWithPQ().
109 bool operator<(const EncodingNode& other) const {
110 return depth_ > other.depth_ ||
111 (depth_ == other.depth_ && other.for_sorting_ > for_sorting_);
112 }
113
114 // Creates a new literals and increases current_ub.
115 // Returns false if we were already at the upper bound for this node.
116 bool IncreaseCurrentUB(SatSolver* solver);
117
118 // Indicate that the node cannot grow further than its current assumption.
119 void TransformToBoolean(SatSolver* solver);
120
121 // Removes the left-side literals fixed to 1. Note that this increases lb_ and
122 // reduces the number of active literals. It also removes any right-side
123 // literals fixed to 0. If such a literal exists, ub is updated accordingly.
124 //
125 // Return the overall weight increase.
126 Coefficient Reduce(const SatSolver& solver);
127
128 // GetAssumption() might need to create new literals.
129 bool AssumptionIs(Literal other) const;
131 bool HasNoWeight() const;
132 void IncreaseWeightLb();
133
134 // Fix any literal that would cause the weight of this node to go over the
135 // gap.
136 void ApplyWeightUpperBound(Coefficient gap, SatSolver* solver);
137
138 void set_weight(Coefficient w) {
139 weight_lb_ = lb_;
140 weight_ = w;
141 }
142 Coefficient weight() const { return weight_; }
143
144 // The depth is mainly used as an heuristic to decide which nodes to merge
145 // first. See the < operator.
146 void set_depth(int depth) { depth_ = depth; }
147 int depth() const { return depth_; }
148
149 int lb() const { return lb_; }
150 int weight_lb() const { return weight_lb_; }
151 int current_ub() const { return lb_ + literals_.size(); }
152 int ub() const { return ub_; }
153 EncodingNode* child_a() const { return child_a_; }
154 EncodingNode* child_b() const { return child_b_; }
155
156 // We use the solver to display the current values of the literals.
157 std::string DebugString(const VariablesAssignment& assignment) const;
158
159 private:
160 int depth_ = 0;
161 int lb_ = 0;
162 int ub_ = 1;
163 BooleanVariable for_sorting_;
164
165 // The weight is only applied for literal >= this lb.
166 int weight_lb_ = 0;
167
168 Coefficient weight_;
169 EncodingNode* child_a_ = nullptr;
170 EncodingNode* child_b_ = nullptr;
171
172 // If not null, will be used instead of creating new variable directly.
173 std::function<Literal(int x)> create_lit_ = nullptr;
174
175 // The literals of this node in order.
176 std::vector<Literal> literals_;
177};
178
179// Merges the two given EncodingNodes by creating a new node that corresponds to
180// the sum of the two given ones. Only the left-most binary variable is created
181// for the parent node, the other ones will be created later when needed.
182EncodingNode LazyMerge(EncodingNode* a, EncodingNode* b, SatSolver* solver);
183
184// Increases the size of the given node by one. To keep all the needed relations
185// with its children, we also need to increase their size by one, and so on
186// recursively. Also adds all the necessary clauses linking the newly added
187// literals.
188void IncreaseNodeSize(EncodingNode* node, SatSolver* solver);
189
190// Merges the two given EncodingNode by creating a new node that corresponds to
191// the sum of the two given ones. The given upper_bound is interpreted as a
192// bound on this sum, and allows creating fewer binary variables.
193EncodingNode FullMerge(Coefficient upper_bound, EncodingNode* a,
194 EncodingNode* b, SatSolver* solver);
195
196// Merges all the given nodes two by two until there is only one left. Returns
197// the final node which encodes the sum of all the given nodes.
198EncodingNode* MergeAllNodesWithDeque(Coefficient upper_bound,
199 const std::vector<EncodingNode*>& nodes,
200 SatSolver* solver,
201 std::deque<EncodingNode>* repository);
202
203// Same as MergeAllNodesWithDeque() but use a priority queue to merge in
204// priority nodes with smaller sizes. This also enforce that the sum of nodes
205// is greater than its lower bound.
207 Coefficient weight, const std::vector<EncodingNode*>& nodes,
208 SatSolver* solver, std::deque<EncodingNode>* repository);
209
210// Reduces the nodes using the now fixed literals, update the lower-bound, and
211// returns the set of assumptions for the next round of the core-based
212// algorithm. Returns an empty set of assumptions if everything is fixed.
213void ReduceNodes(Coefficient upper_bound, Coefficient* lower_bound,
214 std::vector<EncodingNode*>* nodes, SatSolver* solver);
215std::vector<Literal> ExtractAssumptions(Coefficient stratified_lower_bound,
216 const std::vector<EncodingNode*>& nodes,
217 SatSolver* solver);
218
219// Returns the minimum weight of the nodes in the core. Note that the literal in
220// the core must appear in the same order as the one in nodes.
221Coefficient ComputeCoreMinWeight(const std::vector<EncodingNode*>& nodes,
222 const std::vector<Literal>& core);
223
224// Returns the maximum node weight under the given upper_bound. Returns zero if
225// no such weight exist (note that a node weight is strictly positive, so this
226// make sense).
227Coefficient MaxNodeWeightSmallerThan(const std::vector<EncodingNode*>& nodes,
228 Coefficient upper_bound);
229
230// The class reponsible for processing cores and maintaining a Boolean encoding
231// of the linear objective.
233 public:
235 : params_(*model->GetOrCreate<SatParameters>()),
236 sat_solver_(model->GetOrCreate<SatSolver>()),
237 implications_(model->GetOrCreate<BinaryImplicationGraph>()),
238 random_(model->GetOrCreate<ModelRandomGenerator>()) {}
239
240 // Updates the encoding using the given core. The literals in the core must
241 // match the order in nodes. Returns false if the model become infeasible.
242 bool ProcessCore(absl::Span<const Literal> core, Coefficient min_weight,
243 Coefficient gap, std::string* info);
244
246 repository_.push_back(std::move(node));
247 nodes_.push_back(&repository_.back());
248 }
249
250 // TODO(user): Remove mutable version once refactoring is done.
251 const std::vector<EncodingNode*>& nodes() const { return nodes_; }
252 std::vector<EncodingNode*>* mutable_nodes() { return &nodes_; }
253
254 private:
255 // There is more than one way to create new assumptions and encode the
256 // information from this core. This is slightly different from ProcessCore()
257 // and follow the algorithm used by many of the top max-SAT solver under the
258 // name incremental OLL. This is described in: António Morgado, Carmine
259 // Dodaro, Joao Marques-Silva. "Core-Guided MaxSAT with Soft Cardinality
260 // Constraints". CP 2014. pp. 564-573. António Morgado, Alexey Ignatiev, Joao
261 // Marques-Silva. "MSCG: Robust Core-Guided MaxSAT Solving." JSAT 9. 2014. pp.
262 // 129-134.
263 //
264 // TODO(user): The last time this was tested, it was however not as good as
265 // the ProcessCore() version. That might change as we code/change more
266 // heuristic, so we keep it around.
267 const bool alternative_encoding_ = false;
268
269 // Nodes point into repository_.
270 std::vector<EncodingNode*> nodes_;
271 std::deque<EncodingNode> repository_;
272
273 const SatParameters& params_;
274 SatSolver* sat_solver_;
275 BinaryImplicationGraph* implications_;
276 ModelRandomGenerator* random_;
277};
278
279} // namespace sat
280} // namespace operations_research
281
282#endif // OR_TOOLS_SAT_ENCODING_H_
void InitializeLazyCoreNode(Coefficient weight, EncodingNode *a, EncodingNode *b)
Definition encoding.cc:142
Literal GreaterThan(int i) const
Definition encoding.h:97
bool AssumptionIs(Literal other) const
GetAssumption() might need to create new literals.
Definition encoding.cc:229
void InitializeFullNode(int n, EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition encoding.cc:79
Coefficient Reduce(const SatSolver &solver)
Definition encoding.cc:173
void InitializeLazyNode(EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition encoding.cc:126
int size() const
Accessors to size() and literals in [lb, current_ub).
Definition encoding.h:100
std::string DebugString(const VariablesAssignment &assignment) const
We use the solver to display the current values of the literals.
Definition encoding.cc:254
static EncodingNode GenericNode(int lb, int ub, std::function< Literal(int x)> create_lit, Coefficient weight)
Definition encoding.cc:61
static EncodingNode ConstantNode(Coefficient weight)
Definition encoding.cc:41
void ApplyWeightUpperBound(Coefficient gap, SatSolver *solver)
Definition encoding.cc:195
bool IncreaseCurrentUB(SatSolver *solver)
Definition encoding.cc:157
static EncodingNode LiteralNode(Literal l, Coefficient weight)
Definition encoding.cc:50
void TransformToBoolean(SatSolver *solver)
Indicate that the node cannot grow further than its current assumption.
Definition encoding.cc:209
EncodingNode * child_b() const
Definition encoding.h:154
EncodingNode * child_a() const
Definition encoding.h:153
void InitializeAmoNode(absl::Span< EncodingNode *const > nodes, SatSolver *solver)
Definition encoding.cc:99
bool operator<(const EncodingNode &other) const
Definition encoding.h:109
Literal GetAssumption(SatSolver *solver)
Definition encoding.cc:235
std::vector< EncodingNode * > * mutable_nodes()
Definition encoding.h:252
void AddBaseNode(EncodingNode node)
Definition encoding.h:245
const std::vector< EncodingNode * > & nodes() const
Definition encoding.h:251
bool ProcessCore(absl::Span< const Literal > core, Coefficient min_weight, Coefficient gap, std::string *info)
Definition encoding.cc:586
int64_t a
Definition table.cc:44
double upper_bound
double lower_bound
GRBmodel * model
int literal
EncodingNode * MergeAllNodesWithDeque(Coefficient upper_bound, const std::vector< EncodingNode * > &nodes, SatSolver *solver, std::deque< EncodingNode > *repository)
Definition encoding.cc:439
std::vector< Literal > ExtractAssumptions(Coefficient stratified_lower_bound, const std::vector< EncodingNode * > &nodes, SatSolver *solver)
Definition encoding.cc:547
void ReduceNodes(Coefficient upper_bound, Coefficient *lower_bound, std::vector< EncodingNode * > *nodes, SatSolver *solver)
Definition encoding.cc:501
EncodingNode LazyMerge(EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition encoding.cc:279
EncodingNode FullMerge(Coefficient upper_bound, EncodingNode *a, EncodingNode *b, SatSolver *solver)
Definition encoding.cc:388
EncodingNode * LazyMergeAllNodeWithPQAndIncreaseLb(Coefficient weight, const std::vector< EncodingNode * > &nodes, SatSolver *solver, std::deque< EncodingNode > *repository)
Definition encoding.cc:461
Coefficient ComputeCoreMinWeight(const std::vector< EncodingNode * > &nodes, const std::vector< Literal > &core)
Definition encoding.cc:560
void IncreaseNodeSize(EncodingNode *node, SatSolver *solver)
Definition encoding.cc:289
Coefficient MaxNodeWeightSmallerThan(const std::vector< EncodingNode * > &nodes, Coefficient upper_bound)
Definition encoding.cc:574
In SWIG mode, we don't want anything besides these top-level includes.
trees with all degrees equal w the current value of w
int64_t weight
Definition pack.cc:510
const Variable x
Definition qp_tests.cc:127
int nodes