Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
linear_relaxation.h
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14#ifndef OR_TOOLS_SAT_LINEAR_RELAXATION_H_
15#define OR_TOOLS_SAT_LINEAR_RELAXATION_H_
16
17#include <optional>
18#include <vector>
19
20#include "ortools/sat/cp_model.pb.h"
21#include "ortools/sat/cuts.h"
22#include "ortools/sat/integer.h"
25#include "ortools/sat/model.h"
28
29namespace operations_research {
30namespace sat {
31
33 std::vector<LinearConstraint> linear_constraints;
34 std::vector<std::vector<Literal>> at_most_ones;
35 std::vector<CutGenerator> cut_generators;
36};
37
38// Looks at all the encoding literal (li <=> var == value_i) that have a
39// view and add a linear relaxation of their relationship with var.
40//
41// If the encoding is full, we can just add:
42// - Sum li == 1
43// - var == min_value + Sum li * (value_i - min_value)
44//
45// When the set of such encoding literals do not cover the full domain of var,
46// we do something a bit more involved. Let min_not_encoded/max_not_encoded the
47// min and max value of the domain of var that is NOT part of the encoding.
48// We add:
49// - Sum li <= 1
50// - var >= (Sum li * value_i) + (1 - Sum li) * min_not_encoded
51// - var <= (Sum li * value_i) + (1 - Sum li) * max_not_encoded
52//
53// Note of the special case where min_not_encoded == max_not_encoded that kind
54// of reduce to the full encoding, except with a different "rhs" value.
55//
56// We also increment the corresponding counter if we added something. We
57// consider the relaxation "tight" if the encoding was full or if
58// min_not_encoded == max_not_encoded.
59void AppendRelaxationForEqualityEncoding(IntegerVariable var,
60 const Model& model,
61 LinearRelaxation* relaxation,
62 int* num_tight, int* num_loose);
63
64// This is a different relaxation that use a partial set of literal li such that
65// (li <=> var >= xi). In which case we use the following encoding:
66// - li >= l_{i+1} for all possible i. Note that the xi need to be sorted.
67// - var >= min + l0 * (x0 - min) + Sum_{i>0} li * (xi - x_{i-1})
68// - and same as above for NegationOf(var) for the upper bound.
69//
70// Like for AppendRelaxationForEqualityEncoding() we skip any li that do not
71// have an integer view.
73 const Model& model,
74 LinearRelaxation* relaxation);
75
76// Returns a vector of new literals in exactly one relationship.
77// In addition, this create an IntegerView for all these literals and also add
78// the exactly one to the LinearRelaxation.
79std::vector<Literal> CreateAlternativeLiteralsWithView(
80 int num_literals, Model* model, LinearRelaxation* relaxation);
81
82void AppendBoolOrRelaxation(const ConstraintProto& ct, Model* model,
83 LinearRelaxation* relaxation);
84
85void AppendBoolAndRelaxation(const ConstraintProto& ct, Model* model,
86 LinearRelaxation* relaxation,
87 ActivityBoundHelper* activity_helper = nullptr);
88
89void AppendAtMostOneRelaxation(const ConstraintProto& ct, Model* model,
90 LinearRelaxation* relaxation);
91
92void AppendExactlyOneRelaxation(const ConstraintProto& ct, Model* model,
93 LinearRelaxation* relaxation);
94
95// Adds linearization of int max constraints. Returns a vector of z vars such
96// that: z_vars[l] == 1 <=> target = exprs[l].
97//
98// Consider the Lin Max constraint with d expressions and n variables in the
99// form: target = max {exprs[l] = Sum (wli * xi + bl)}. l in {1,..,d}.
100// Li = lower bound of xi
101// Ui = upper bound of xi.
102// Let zl be in {0,1} for all l in {1,..,d}.
103// The target = exprs[l] when zl = 1.
104//
105// The following is a valid linearization for Lin Max.
106// target >= exprs[l], for all l in {1,..,d}
107// target <= Sum_i(wki * xi) + Sum_l((Nkl + bl) * zl), for all k in {1,..,d}
108// Where Nkl is a large number defined as:
109// Nkl = Sum_i(max((wli - wki)*Li, (wli - wki)*Ui))
110// = Sum (max corner difference for variable i, target expr k, max expr l)
111// Reference: "Strong mixed-integer programming formulations for trained neural
112// networks" by Ross Anderson et. (https://arxiv.org/pdf/1811.01988.pdf).
113// TODO(user): Support linear expression as target.
114void AppendLinMaxRelaxationPart1(const ConstraintProto& ct, Model* model,
115 LinearRelaxation* relaxation);
116
118 IntegerVariable target, const std::vector<Literal>& alternative_literals,
119 const std::vector<LinearExpression>& exprs, Model* model,
120 LinearRelaxation* relaxation);
121
122// Note: This only works if all affine expressions share the same variable.
123void AppendMaxAffineRelaxation(const ConstraintProto& ct, Model* model,
124 LinearRelaxation* relaxation);
125
126// Appends linear constraints to the relaxation. This also handles the
127// relaxation of linear constraints with enforcement literals.
128// A linear constraint lb <= ax <= ub with enforcement literals {ei} is relaxed
129// as following.
130// lb <= (Sum Negated(ei) * (lb - implied_lb)) + ax <= inf
131// -inf <= (Sum Negated(ei) * (ub - implied_ub)) + ax <= ub
132// Where implied_lb and implied_ub are trivial lower and upper bounds of the
133// constraint.
135 const ConstraintProto& ct, bool linearize_enforced_constraints,
136 Model* model, LinearRelaxation* relaxation,
137 ActivityBoundHelper* activity_helper = nullptr);
138
139void AppendSquareRelaxation(const ConstraintProto& ct, Model* m,
140 LinearRelaxation* relaxation);
141
142// Adds linearization of no overlap constraints.
143// It adds an energetic equation linking the duration of all potential tasks to
144// the actual span of the no overlap constraint.
145void AppendNoOverlapRelaxationAndCutGenerator(const ConstraintProto& ct,
146 Model* model,
147 LinearRelaxation* relaxation);
148
149// Adds linearization of cumulative constraints.The second part adds an
150// energetic equation linking the duration of all potential tasks to the actual
151// span * capacity of the cumulative constraint.
152void AppendCumulativeRelaxationAndCutGenerator(const ConstraintProto& ct,
153 Model* model,
154 LinearRelaxation* relaxation);
155
156// Cut generators.
157void AddIntProdCutGenerator(const ConstraintProto& ct, int linearization_level,
158 Model* m, LinearRelaxation* relaxation);
159
160void AddSquareCutGenerator(const ConstraintProto& ct, int linearization_level,
161 Model* m, LinearRelaxation* relaxation);
162
163void AddAllDiffRelaxationAndCutGenerator(const ConstraintProto& ct,
164 int linearization_level, Model* m,
165 LinearRelaxation* relaxation);
166
167void AddLinMaxCutGenerator(const ConstraintProto& ct, Model* m,
168 LinearRelaxation* relaxation);
169
170// Routing relaxation and cut generators.
171
172void AppendCircuitRelaxation(const ConstraintProto& ct, Model* model,
173 LinearRelaxation* relaxation);
174
175void AppendRoutesRelaxation(const ConstraintProto& ct, Model* model,
176 LinearRelaxation* relaxation);
177
178void AddCircuitCutGenerator(const ConstraintProto& ct, Model* m,
179 LinearRelaxation* relaxation);
180
181void AddRoutesCutGenerator(const ConstraintProto& ct, Model* m,
182 LinearRelaxation* relaxation);
183
184// Scheduling relaxations and cut generators.
185
186// Adds linearization of cumulative constraints.The second part adds an
187// energetic equation linking the duration of all potential tasks to the actual
188// span * capacity of the cumulative constraint. It uses the makespan to compute
189// the span of the constraint if defined.
190void AddCumulativeRelaxation(const AffineExpression& capacity,
192 SchedulingDemandHelper* demands,
193 const std::optional<AffineExpression>& makespan,
194 Model* model, LinearRelaxation* relaxation);
195
198 SchedulingDemandHelper* demands,
199 const std::optional<AffineExpression>& makespan,
200 Model* m, LinearRelaxation* relaxation);
201
203 const std::optional<AffineExpression>& makespan,
204 Model* m, LinearRelaxation* relaxation);
205
206void AddNoOverlap2dCutGenerator(const ConstraintProto& ct, Model* m,
207 LinearRelaxation* relaxation);
208
209// Adds linearization of different types of constraints.
210void TryToLinearizeConstraint(const CpModelProto& model_proto,
211 const ConstraintProto& ct,
212 int linearization_level, Model* model,
213 LinearRelaxation* relaxation,
214 ActivityBoundHelper* helper = nullptr);
215
216// Builds the linear relaxation of a CpModelProto.
217LinearRelaxation ComputeLinearRelaxation(const CpModelProto& model_proto,
218 Model* m);
219
220} // namespace sat
221} // namespace operations_research
222
223#endif // OR_TOOLS_SAT_LINEAR_RELAXATION_H_
const Constraint * ct
IntVar * var
GRBmodel * model
void AppendNoOverlapRelaxationAndCutGenerator(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendCumulativeRelaxationAndCutGenerator(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddSquareCutGenerator(const ConstraintProto &ct, int linearization_level, Model *m, LinearRelaxation *relaxation)
void AppendExactlyOneRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddCumulativeCutGenerator(const AffineExpression &capacity, SchedulingConstraintHelper *helper, SchedulingDemandHelper *demands_helper, const std::optional< AffineExpression > &makespan, Model *m, LinearRelaxation *relaxation)
void AppendRoutesRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddCircuitCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
LinearRelaxation ComputeLinearRelaxation(const CpModelProto &model_proto, Model *m)
Builds the linear relaxation of a CpModelProto.
void AppendLinearConstraintRelaxation(const ConstraintProto &ct, bool linearize_enforced_constraints, Model *model, LinearRelaxation *relaxation, ActivityBoundHelper *activity_helper)
void AddIntProdCutGenerator(const ConstraintProto &ct, int linearization_level, Model *m, LinearRelaxation *relaxation)
Cut generators.
void AddRoutesCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
void AddCumulativeRelaxation(const AffineExpression &capacity, SchedulingConstraintHelper *helper, SchedulingDemandHelper *demands_helper, const std::optional< AffineExpression > &makespan, Model *model, LinearRelaxation *relaxation)
Scheduling relaxations and cut generators.
void AppendPartialGreaterThanEncodingRelaxation(IntegerVariable var, const Model &model, LinearRelaxation *relaxation)
void TryToLinearizeConstraint(const CpModelProto &, const ConstraintProto &ct, int linearization_level, Model *model, LinearRelaxation *relaxation, ActivityBoundHelper *activity_helper)
Adds linearization of different types of constraints.
void AppendLinMaxRelaxationPart1(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendCircuitRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
Routing relaxation and cut generators.
void AppendMaxAffineRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendLinMaxRelaxationPart2(IntegerVariable target, const std::vector< Literal > &alternative_literals, const std::vector< LinearExpression > &exprs, Model *model, LinearRelaxation *relaxation)
void AppendSquareRelaxation(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
void AddLinMaxCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
void AppendBoolAndRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation, ActivityBoundHelper *activity_helper)
void AddAllDiffRelaxationAndCutGenerator(const ConstraintProto &ct, int linearization_level, Model *m, LinearRelaxation *relaxation)
void AppendAtMostOneRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AddNoOverlapCutGenerator(SchedulingConstraintHelper *helper, const std::optional< AffineExpression > &makespan, Model *m, LinearRelaxation *relaxation)
void AddNoOverlap2dCutGenerator(const ConstraintProto &ct, Model *m, LinearRelaxation *relaxation)
std::vector< Literal > CreateAlternativeLiteralsWithView(int num_literals, Model *model, LinearRelaxation *relaxation)
void AppendBoolOrRelaxation(const ConstraintProto &ct, Model *model, LinearRelaxation *relaxation)
void AppendRelaxationForEqualityEncoding(IntegerVariable var, const Model &model, LinearRelaxation *relaxation, int *num_tight, int *num_loose)
In SWIG mode, we don't want anything besides these top-level includes.
std::vector< std::vector< Literal > > at_most_ones
std::vector< LinearConstraint > linear_constraints