Google OR-Tools v9.14
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
primary_variables.cc
Go to the documentation of this file.
1// Copyright 2010-2025 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
15
16#include <algorithm>
17#include <cstdint>
18#include <cstdlib>
19#include <deque>
20#include <limits>
21#include <tuple>
22#include <vector>
23
24#include "absl/algorithm/container.h"
25#include "absl/container/btree_map.h"
26#include "absl/container/btree_set.h"
27#include "absl/log/check.h"
28#include "absl/types/span.h"
29#include "ortools/sat/cp_model.pb.h"
31#include "ortools/util/bitset.h"
33
34namespace operations_research {
35namespace sat {
36
37// Declares that if all variables in input_vars and all but one of the
38// variables in deducible_vars are known, then we can deduce the missing one.
39// No variable should appear twice in the inputs.
40//
41// For example, if we have:
42// u + z + x = lin_max(x + 3, y)
43//
44// This function will assign:
45// deducible_vars = {u, z}
46// input_vars = {x, y}
47//
48// This declares that, for example, if x, y and u are all known, then z is
49// known. On the other hand, if everything is known except x, then we can't
50// deduce anything, since for some values of u, z and y, the constraint can be
51// simplified to x + 3 = x + 3.
52void GetRelationshipForConstraint(const ConstraintProto& ct,
53 absl::btree_set<int>* deducible_vars,
54 absl::btree_set<int>* input_vars,
55 int* preferred_to_deduce) {
56 deducible_vars->clear();
57 input_vars->clear();
58 *preferred_to_deduce = -1;
59 switch (ct.constraint_case()) {
60 case ConstraintProto::kLinear: {
61 if (ReadDomainFromProto(ct.linear()).Size() != 1) {
62 return;
63 }
64 if (!ct.enforcement_literal().empty()) {
65 return;
66 }
67 for (int i = 0; i < ct.linear().vars_size(); ++i) {
68 if (ct.linear().coeffs(i) == 0) continue;
69 deducible_vars->insert(ct.linear().vars(i));
70 }
71 return;
72 }
73 case ConstraintProto::kLinMax: {
74 // We can deduce only the variables that are only in the target.
75 for (int i = 0; i < ct.lin_max().target().vars_size(); ++i) {
76 if (ct.lin_max().target().coeffs(i) == 0) continue;
77 deducible_vars->insert(ct.lin_max().target().vars(i));
78 }
79 for (const auto& expr : ct.lin_max().exprs()) {
80 for (const int var : expr.vars()) {
81 input_vars->insert(var);
82 }
83 }
84 for (const int var : *input_vars) {
85 deducible_vars->erase(var);
86 }
87 return;
88 }
89 case ConstraintProto::kIntProd: {
90 absl::btree_map<int, int> variable_appearance_count;
91 for (const int var : ct.int_prod().target().vars()) {
92 variable_appearance_count[var]++;
93 }
94 for (const auto& expr : ct.int_prod().exprs()) {
95 for (const int var : expr.vars()) {
96 variable_appearance_count[var]++;
97 }
98 }
99 for (int i = 0; i < ct.int_prod().target().vars_size(); ++i) {
100 if (ct.int_prod().target().coeffs(i) == 0) continue;
101 const int var = ct.int_prod().target().vars(i);
102 if (variable_appearance_count[var] == 1) {
103 deducible_vars->insert(var);
104 }
105 }
106 for (const auto& expr : ct.int_prod().exprs()) {
107 for (int i = 0; i < expr.vars_size(); ++i) {
108 const int var = expr.vars(i);
109 if (variable_appearance_count[var] == 1) {
110 // We might be tempted to make the variable deducible if the
111 // coefficient is 1, but we risk trying to deduce x from 0 = 0 * x.
112 // TODO(user): do it when the target domain doesn't include 0,
113 // but use preferred_to_deduce to prefer the target.
114 input_vars->insert(var);
115 }
116 }
117 }
118 for (const auto& [var, count] : variable_appearance_count) {
119 if (count != 1) {
120 input_vars->insert(var);
121 }
122 }
123 return;
124 }
125 case ConstraintProto::kExactlyOne: {
126 for (const int lit : ct.exactly_one().literals()) {
127 deducible_vars->insert(PositiveRef(lit));
128 }
129 return;
130 }
131 default:
132 break;
133 }
134}
135
137 const CpModelProto& model, int var_for_target,
138 absl::Span<const int> linear_constraint_indexes,
139 bool var_in_objective_is_negative, ConstraintProto* new_constraint) {
140 // A variable that is in the objective with a positive coefficient and only
141 // appears in inequalities will be at the lowest value that is greater or
142 // equal than the variable domain lower bound and that does not violate any
143 // bound coming from the inequalities. A similar reasoning applies for a
144 // variable with a negative coefficient in the objective.
145 LinearArgumentProto& lin_max = *new_constraint->mutable_lin_max();
146 lin_max.mutable_target()->add_coeffs(var_in_objective_is_negative ? -1 : 1);
147 lin_max.mutable_target()->add_vars(var_for_target);
148 const Domain var_domain =
149 ReadDomainFromProto(model.variables(var_for_target));
150 // Add the bound coming from the variable domain.
151 if (var_in_objective_is_negative) {
152 lin_max.add_exprs()->set_offset(-var_domain.Max());
153 } else {
154 lin_max.add_exprs()->set_offset(var_domain.Min());
155 }
156
157 for (const int c : linear_constraint_indexes) {
158 const LinearConstraintProto& lin = model.constraints(c).linear();
159 int64_t coeff = 0;
160 for (int i = 0; i < lin.coeffs_size(); ++i) {
161 if (lin.vars(i) == var_for_target) {
162 coeff = lin.coeffs(i);
163 break;
164 }
165 }
166 LinearExpressionProto* expr = lin_max.add_exprs();
167 DCHECK_EQ(lin.domain_size(), 2);
168 const int64_t expr_min = lin.domain(0);
169 const int64_t expr_max = lin.domain(1);
170 if ((coeff < 0) == var_in_objective_is_negative) {
171 expr->set_offset(expr_min);
172 } else {
173 expr->set_offset(-expr_max);
174 }
175 const int64_t multiplier =
176 ((coeff < 0) == var_in_objective_is_negative) ? -1 : 1;
177 for (int i = 0; i < lin.coeffs_size(); ++i) {
178 if (lin.vars(i) == var_for_target) {
179 continue;
180 }
181 expr->add_vars(lin.vars(i));
182 expr->add_coeffs(multiplier * lin.coeffs(i));
183 }
184 }
185}
186
187bool IsObjectiveConstraining(const CpModelProto& model) {
188 if (!model.has_objective()) {
189 return false;
190 }
191 if (model.objective().domain().empty()) {
192 return false;
193 }
194 if (model.objective().domain_size() > 2) {
195 return true;
196 }
197 int64_t lb = 0;
198 int64_t ub = 0;
199 for (int i = 0; i < model.objective().vars_size(); ++i) {
200 const int var = model.objective().vars(i);
201 lb += model.objective().coeffs(i) * model.variables(var).domain(0);
202 ub += model.objective().coeffs(i) *
203 model.variables(var).domain(model.variables(var).domain_size() - 1);
204 }
205 if (model.objective().domain(0) > lb || model.objective().domain(1) < ub) {
206 return true;
207 }
208 return false;
209}
210
213 Bitset64<int> var_is_secondary(model.variables_size());
214 Bitset64<int> var_is_primary(model.variables_size());
215 std::vector<int> num_times_variable_appears_as_input(model.variables_size());
216 std::vector<int> num_times_variable_appears_as_deducible(
217 model.variables_size());
218 std::vector<int> num_times_variable_appears_as_preferred_to_deduce(
219 model.variables_size());
220
221 struct ConstraintData {
222 // These sets will hold only the "undecided" variables. When a variable
223 // is marked as primary or secondary, it will be removed.
224 absl::btree_set<int> deducible_vars;
225 absl::btree_set<int> input_vars;
226
227 int preferred_to_deduce;
228
229 // If a variable participates in the model only via linear inequalities and
230 // the objective, and *all* the other variables in those inequalities are
231 // already tagged as primary or secondary, then this variable can be flagged
232 // as a secondary variable and can be computed as a lin_max of the others.
233 bool is_linear_inequality;
234 };
235
236 std::vector<ConstraintData> constraint_data(model.constraints_size());
237 std::vector<std::vector<int>> constraints_per_var(model.variables_size());
238
239 Bitset64<int> var_appears_only_in_objective_and_linear(
240 model.variables_size());
241 Bitset64<int> var_in_objective_is_negative(model.variables_size());
242 if (!IsObjectiveConstraining(model)) {
243 // TODO(user): if we have a constraining objective we can suppose a
244 // non-constraining one + a linear constraint. But this will allow us to
245 // find at most one new secondary variable, since all the variables in the
246 // objective will be connected via this linear constraint.
247 for (int i = 0; i < model.objective().vars_size(); ++i) {
248 if (model.objective().coeffs(i) == 0) continue;
249 var_appears_only_in_objective_and_linear.Set(model.objective().vars(i));
250 if (model.objective().coeffs(i) < 0) {
251 var_in_objective_is_negative.Set(model.objective().vars(i));
252 }
253 }
254 }
255 for (int c = 0; c < model.constraints_size(); ++c) {
256 ConstraintData& data = constraint_data[c];
257 const ConstraintProto& ct = model.constraints(c);
258 data.is_linear_inequality = false;
259 GetRelationshipForConstraint(ct, &data.deducible_vars, &data.input_vars,
260 &data.preferred_to_deduce);
261 // Now prepare the data for the handling the case of variables that only
262 // appear in the objective and linear inequalities. There are two cases:
263 // - either the constraint that is one such linear inequality and we flag it
264 // as such;
265 // - if not, we flag all the variables used in this constraint as appearing
266 // in constraints that are not linear inequalities.
267 if (ct.constraint_case() == ConstraintProto::kLinear &&
268 data.deducible_vars.empty() && // Not allowing to fix a secondary var
269 // directly (i.e., an equality)
270 ct.enforcement_literal().empty() && ct.linear().domain_size() == 2) {
271 // This is the kind of inequality we might use for a lin_max
272 data.is_linear_inequality = true;
273 for (int i = 0; i < ct.linear().vars_size(); ++i) {
274 const int var = ct.linear().vars(i);
275 if (!var_appears_only_in_objective_and_linear.IsSet(var)) {
276 data.input_vars.insert(var);
277 continue;
278 }
279 if (ct.linear().coeffs(i) == 0) continue;
280 if (std::abs(ct.linear().coeffs(i)) == 1) {
281 data.deducible_vars.insert(var);
282 } else {
283 data.input_vars.insert(var);
284 // TODO(user): we can support non-unit coefficients to deduce a
285 // lin_max from the objective. It will become more difficult, since
286 // first we will need to compute the lcm of all coefficients (and
287 // avoid overflow). Then, we will need to build a constraint that will
288 // be div(target, lin_max(exprs) + lcm - 1, lcm).
289 var_appears_only_in_objective_and_linear.Set(var, false);
290 }
291 }
292 } else {
293 // Other kind of constraints, tagged those variables as "used elsewhere".
294 for (const int var : UsedVariables(ct)) {
295 var_appears_only_in_objective_and_linear.Set(var, false);
296 }
297 }
298 }
299
300 // In the loop above, we lazily set some variables as deducible from linear
301 // inequalities if they only appeared in the objective and linear inequalities
302 // when we saw the constraint, but we did not checked how they were used in
303 // following constraints. Now remove them if it was used in other constraints.
304 std::vector<int> deducible_vars_to_remove;
305 for (int c = 0; c < model.constraints_size(); ++c) {
306 deducible_vars_to_remove.clear();
307 ConstraintData& data = constraint_data[c];
308 if (!data.is_linear_inequality) {
309 continue;
310 }
311 for (const int var : data.deducible_vars) {
312 if (!var_appears_only_in_objective_and_linear.IsSet(var)) {
313 deducible_vars_to_remove.push_back(var);
314 data.input_vars.insert(var);
315 }
316 }
317 for (const int var : deducible_vars_to_remove) {
318 data.deducible_vars.erase(var);
319 }
320 if (data.deducible_vars.empty()) {
321 data.is_linear_inequality = false;
322 }
323 }
324
325 for (int c = 0; c < constraint_data.size(); ++c) {
326 ConstraintData& data = constraint_data[c];
327 if (data.deducible_vars.empty()) {
328 data.input_vars.clear();
329 continue;
330 }
331 if (data.preferred_to_deduce != -1) {
332 num_times_variable_appears_as_preferred_to_deduce
333 [data.preferred_to_deduce]++;
334 }
335 for (const int v : data.deducible_vars) {
336 constraints_per_var[v].push_back(c);
337 num_times_variable_appears_as_deducible[v]++;
338 }
339 for (const int v : data.input_vars) {
340 constraints_per_var[v].push_back(c);
341 num_times_variable_appears_as_input[v]++;
342 }
343 }
344
345 // Variables that cannot be potentially deduced using any constraints are
346 // primary. Flag them as such and strip them from our data structures.
347 for (int v = 0; v < model.variables_size(); ++v) {
348 if (num_times_variable_appears_as_deducible[v] != 0) {
349 continue;
350 }
351 num_times_variable_appears_as_input[v] = 0;
352 var_is_primary.Set(v);
353 for (const int c : constraints_per_var[v]) {
354 ConstraintData& data = constraint_data[c];
355 data.deducible_vars.erase(v);
356 data.input_vars.erase(v);
357 }
358 constraints_per_var[v].clear();
359 }
360
361 // Now, for variables that only appear in the objective and linear
362 // inequalities, we count how far we are from being able to deduce their
363 // value. In practice, we count the number of linear inequalities in which
364 // this variable appears alongside another variable that we have not decided
365 // to be primary or secondary yet. When this count reach 0, it means we can
366 // create a lin_max constraint to deduce its value.
367 std::vector<int> count_of_unresolved_linear_inequalities_per_var(
368 model.variables_size());
369 for (int c = 0; c < model.constraints_size(); ++c) {
370 ConstraintData& data = constraint_data[c];
371 if (!data.is_linear_inequality) {
372 continue;
373 }
374 if (data.input_vars.size() + data.deducible_vars.size() > 1) {
375 for (const int v : data.input_vars) {
376 count_of_unresolved_linear_inequalities_per_var[v]++;
377 }
378 for (const int v : data.deducible_vars) {
379 count_of_unresolved_linear_inequalities_per_var[v]++;
380 }
381 }
382 }
383 // Now do a greedy heuristic: we will take each variable in a particular
384 // order, and if the variable can be deduced from other variables that we have
385 // already decided to declare as primary or secondary, we will mark it as
386 // secondary. Otherwise we will mark it as primary. In any case, after we do
387 // that, we will look at all the constraints that uses this variable and see
388 // if it allows to deduce some variable. If yes, mark the variable that can be
389 // deduced as secondary, look at the constraints that uses it, and so on until
390 // we reach a fixed point. The heuristic for the order is to try to process
391 // first the variables that are more "useful" to be marked as primary, so it
392 // allows us to mark more variables as secondary in the following.
393 std::deque<int> vars_queue;
394 for (int v = 0; v < model.variables_size(); ++v) {
395 if (var_is_primary.IsSet(v) || var_is_secondary.IsSet(v)) {
396 continue;
397 }
398 vars_queue.push_back(v);
399 }
400 absl::c_stable_sort(vars_queue, [&](int a, int b) {
401 const int a_diff_usage = num_times_variable_appears_as_deducible[a] -
402 num_times_variable_appears_as_input[a];
403 const int b_diff_usage = num_times_variable_appears_as_deducible[b] -
404 num_times_variable_appears_as_input[b];
405 return std::make_tuple(
406 a_diff_usage,
407 -num_times_variable_appears_as_preferred_to_deduce[a],
408 -num_times_variable_appears_as_deducible[a]) <
409 std::make_tuple(
410 b_diff_usage,
411 -num_times_variable_appears_as_preferred_to_deduce[b],
412 -num_times_variable_appears_as_deducible[b]);
413 });
414 std::vector<int> constraints_to_check;
415 while (!vars_queue.empty()) {
416 const int v = vars_queue.front();
417 vars_queue.pop_front();
418 if (var_is_secondary.IsSet(v) || var_is_primary.IsSet(v)) {
419 continue;
420 }
421 // First, we will decide whether we should mark `v` as secondary or primary
422 // using the equality constraints.
423 for (const int c : constraints_per_var[v]) {
424 ConstraintData& data = constraint_data[c];
425 if (data.is_linear_inequality) {
426 continue;
427 }
428 if (data.deducible_vars.size() + data.input_vars.size() != 1) {
429 // One of it inputs are neither primary nor secondary yet, we cannot
430 // deduce the value of `v` using this constraint.
431 continue;
432 }
433
434 // There is a single undecided variable for this constraint. Thus `v` is
435 // either an input or a deducible variable. Let's check.
436 if (data.deducible_vars.empty()) {
437 // This is a strange case, like `z = lin_max(x, y)`, where `z` and `y`
438 // are primary (we cannot deduce `x`). Let's just flag this constraint
439 // as useless from now on.
440 data.input_vars.clear();
441 continue;
442 }
443 var_is_secondary.Set(v);
444 result.secondary_variables.push_back(v);
445 result.dependency_resolution_constraint.push_back(model.constraints(c));
446 result.redundant_constraint_indices.push_back(c);
447 break;
448 }
449
450 // We couldn't deduce the value of `v` from any constraint, check if it only
451 // appears in linear inequalities.
452 if (!var_is_secondary.IsSet(v)) {
453 if (var_appears_only_in_objective_and_linear.IsSet(v) &&
454 count_of_unresolved_linear_inequalities_per_var[v] == 0) {
455 var_is_secondary.Set(v);
456 result.secondary_variables.push_back(v);
457 result.dependency_resolution_constraint.emplace_back();
459 model, v, constraints_per_var[v],
460 var_in_objective_is_negative.IsSet(v),
461 &result.dependency_resolution_constraint.back());
462 // TODO(user): set result.redundant_constraint_indices?
463 } else {
464 // We can't deduce the value of `v` from what we have so far, flag it as
465 // primary.
466 var_is_primary.Set(v);
467 }
468 }
469
470 // At this point we have decided to make `v` primary or secondary, so we
471 // can remove it from the model and maybe lazily deduce some variables.
472 auto update_constraints_after_var_is_decided = [&](int v) {
473 for (const int c : constraints_per_var[v]) {
474 ConstraintData& data = constraint_data[c];
475 data.deducible_vars.erase(v);
476 data.input_vars.erase(v);
477 if (data.input_vars.size() + data.deducible_vars.size() != 1) {
478 // Two of the variables for this constraint are neither primary nor
479 // secondary yet, we cannot deduce the value of anything using this
480 // constraint.
481 continue;
482 }
483 if (data.is_linear_inequality && data.deducible_vars.size() == 1) {
484 const int deducible_var = *data.deducible_vars.begin();
485 count_of_unresolved_linear_inequalities_per_var[deducible_var]--;
486 if (count_of_unresolved_linear_inequalities_per_var[deducible_var] ==
487 0) {
488 // Now we can deduce a new variable from linears, process it ASAP!
489 vars_queue.push_front(deducible_var);
490 }
491 } else {
492 if (data.deducible_vars.empty()) {
493 // Same as the test for the case `z = lin_max(x, y)` above.
494 data.input_vars.clear();
495 } else {
496 // This constraint fix a secondary variable, enqueue it!
497 constraints_to_check.push_back(c);
498 }
499 }
500 }
501 };
502
503 // In any case, this variable is now decided, so we update the number of
504 // undecided variables in all the constraints that use it.
505 DCHECK(constraints_to_check.empty());
506 update_constraints_after_var_is_decided(v);
507
508 // Now, deduce everything that become trivially deducible until we reach a
509 // fixed point.
510 while (!constraints_to_check.empty()) {
511 const int c = constraints_to_check.back();
512 constraints_to_check.pop_back();
513 ConstraintData& data = constraint_data[c];
514 DCHECK_LE(data.deducible_vars.size(), 1);
515 if (data.deducible_vars.size() != 1) {
516 continue;
517 }
518 const int single_deducible_var = *data.deducible_vars.begin();
519 var_is_secondary.Set(single_deducible_var);
520 update_constraints_after_var_is_decided(single_deducible_var);
521 result.secondary_variables.push_back(single_deducible_var);
522 result.dependency_resolution_constraint.push_back(model.constraints(c));
523 result.redundant_constraint_indices.push_back(c);
524 }
525 }
526
527 for (int i = 0; i < result.secondary_variables.size(); ++i) {
528 const int var = result.secondary_variables[i];
529 ConstraintData data;
530 const ConstraintProto& ct = result.dependency_resolution_constraint[i];
531 GetRelationshipForConstraint(ct, &data.deducible_vars, &data.input_vars,
532 &data.preferred_to_deduce);
533 for (const int v : data.input_vars) {
534 if (var_is_secondary.IsSet(v)) {
535 result.variable_dependencies.push_back({var, v});
536 }
537 }
538 for (const int v : data.deducible_vars) {
539 if (var_is_secondary.IsSet(v) && v != var) {
540 result.variable_dependencies.push_back({var, v});
541 }
542 }
543 }
544 return result;
545}
546
548 const CpModelProto& model, const VariableRelationships& relationships,
549 std::vector<int64_t>* solution) {
550 Bitset64<int> dependent_variables_set(model.variables_size());
551 for (const int var : relationships.secondary_variables) {
552 dependent_variables_set.Set(var);
553 }
554 for (int i = 0; i < relationships.secondary_variables.size(); ++i) {
555 const int var = relationships.secondary_variables[i];
556 const ConstraintProto& ct =
557 relationships.dependency_resolution_constraint[i];
558 switch (ct.constraint_case()) {
559 case ConstraintProto::kLinear: {
560 const LinearConstraintProto& linear = ct.linear();
561 DCHECK_EQ(ReadDomainFromProto(linear).Size(), 1);
562 int64_t sum_of_free_variables = ReadDomainFromProto(linear).Min();
563 int64_t coeff_of_var = 1;
564 for (int j = 0; j < linear.vars_size(); ++j) {
565 if (linear.vars(j) == var) {
566 coeff_of_var = linear.coeffs(j);
567 continue;
568 }
569 DCHECK(!dependent_variables_set.IsSet(linear.vars(j)));
570 sum_of_free_variables -=
571 linear.coeffs(j) * (*solution)[linear.vars(j)];
572 }
573 (*solution)[var] = sum_of_free_variables / coeff_of_var;
574 } break;
575 case ConstraintProto::kLinMax: {
576 int64_t max = std::numeric_limits<int64_t>::min();
577 for (const auto& expr : ct.lin_max().exprs()) {
578 int64_t expr_value = expr.offset();
579 for (int j = 0; j < expr.vars_size(); ++j) {
580 DCHECK(!dependent_variables_set.IsSet(expr.vars(j)));
581 expr_value += expr.coeffs(j) * (*solution)[expr.vars(j)];
582 }
583 max = std::max(max, expr_value);
584 }
585 const LinearExpressionProto& target = ct.lin_max().target();
586 int64_t coeff_of_var = 1;
587 for (int j = 0; j < target.vars_size(); ++j) {
588 if (target.vars(j) == var) {
589 coeff_of_var = target.coeffs(j);
590 continue;
591 }
592 DCHECK(!dependent_variables_set.IsSet(target.vars(j)));
593 max -= target.coeffs(j) * (*solution)[target.vars(j)];
594 }
595 max -= target.offset();
596 (*solution)[var] = max / coeff_of_var;
597 break;
598 }
599 case ConstraintProto::kIntProd: {
600 int64_t product = 1;
601 for (const auto& expr : ct.int_prod().exprs()) {
602 int64_t expr_value = expr.offset();
603 for (int j = 0; j < expr.vars_size(); ++j) {
604 DCHECK(!dependent_variables_set.IsSet(expr.vars(j)));
605 expr_value += (*solution)[expr.vars(j)] * expr.coeffs(j);
606 }
607 product *= expr_value;
608 }
609 int64_t coeff_of_var = 1;
610 const LinearExpressionProto& target = ct.int_prod().target();
611 for (int j = 0; j < target.vars_size(); ++j) {
612 if (target.vars(j) == var) {
613 coeff_of_var = target.coeffs(j);
614 continue;
615 }
616 DCHECK(!dependent_variables_set.IsSet(target.vars(j)));
617 product -= target.coeffs(j) * (*solution)[target.vars(j)];
618 }
619 product -= target.offset();
620 (*solution)[var] = product / coeff_of_var;
621 } break;
622 case ConstraintProto::kExactlyOne: {
623 (*solution)[var] = 0;
624 int sum = 0;
625 for (const int lit : ct.exactly_one().literals()) {
626 const int positive_ref = PositiveRef(lit);
627 DCHECK(positive_ref == var ||
628 !dependent_variables_set.IsSet(positive_ref));
629 sum += RefIsPositive(lit) ? (*solution)[positive_ref]
630 : 1 - (*solution)[positive_ref];
631 }
632 if (sum != 1) {
633 (*solution)[var] ^= 1;
634 }
635 } break;
636 default:
637 break;
638 }
639 dependent_variables_set.Set(var, false);
640 }
641 return true;
642}
643
644} // namespace sat
645} // namespace operations_research
bool IsSet(IndexType i) const
Returns true if the bit at position i is set.
Definition bitset.h:533
void Set(IndexType i)
Sets the bit at position i to 1.
Definition bitset.h:543
void CreateLinMaxFromLinearsAndObjective(const CpModelProto &model, int var_for_target, absl::Span< const int > linear_constraint_indexes, bool var_in_objective_is_negative, ConstraintProto *new_constraint)
std::vector< int > UsedVariables(const ConstraintProto &ct)
void GetRelationshipForConstraint(const ConstraintProto &ct, absl::btree_set< int > *deducible_vars, absl::btree_set< int > *input_vars, int *preferred_to_deduce)
Domain ReadDomainFromProto(const ProtoWithDomain &proto)
Reads a Domain from the domain field of a proto.
bool IsObjectiveConstraining(const CpModelProto &model)
bool ComputeAllVariablesFromPrimaryVariables(const CpModelProto &model, const VariableRelationships &relationships, std::vector< int64_t > *solution)
VariableRelationships ComputeVariableRelationships(const CpModelProto &model)
In SWIG mode, we don't want anything besides these top-level includes.
Select next search node to expand Select next item_i to add this new search node to the search Generate a new search node where item_i is not in the knapsack Check validity of this new partial solution(using propagators) - If valid
std::vector< ConstraintProto > dependency_resolution_constraint
std::vector< std::pair< int, int > > variable_dependencies