Google OR-Tools v9.12
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
cp_model_search.cc
Go to the documentation of this file.
1// Copyright 2010-2025 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
15
16#include <algorithm>
17#include <cstddef>
18#include <cstdint>
19#include <functional>
20#include <limits>
21#include <string>
22#include <utility>
23#include <vector>
24
25#include "absl/algorithm/container.h"
26#include "absl/container/flat_hash_map.h"
27#include "absl/container/flat_hash_set.h"
28#include "absl/log/check.h"
29#include "absl/random/distributions.h"
30#include "absl/strings/str_cat.h"
31#include "absl/strings/string_view.h"
32#include "absl/types/span.h"
34#include "ortools/sat/cp_model.pb.h"
37#include "ortools/sat/integer.h"
41#include "ortools/sat/model.h"
43#include "ortools/sat/sat_parameters.pb.h"
44#include "ortools/sat/util.h"
46
47namespace operations_research {
48namespace sat {
49
51 : mapping_(*model->GetOrCreate<CpModelMapping>()),
52 boolean_assignment_(model->GetOrCreate<Trail>()->Assignment()),
53 integer_trail_(*model->GetOrCreate<IntegerTrail>()),
54 integer_encoder_(*model->GetOrCreate<IntegerEncoder>()),
55 random_(*model->GetOrCreate<ModelRandomGenerator>()) {}
56
57int CpModelView::NumVariables() const { return mapping_.NumProtoVariables(); }
58
59bool CpModelView::IsFixed(int var) const {
60 if (mapping_.IsBoolean(var)) {
61 return boolean_assignment_.VariableIsAssigned(
62 mapping_.Literal(var).Variable());
63 } else if (mapping_.IsInteger(var)) {
64 return integer_trail_.IsFixed(mapping_.Integer(var));
65 }
66 return true; // Default.
67}
68
69int64_t CpModelView::Min(int var) const {
70 if (mapping_.IsBoolean(var)) {
71 const Literal l = mapping_.Literal(var);
72 return boolean_assignment_.LiteralIsTrue(l) ? 1 : 0;
73 } else if (mapping_.IsInteger(var)) {
74 return integer_trail_.LowerBound(mapping_.Integer(var)).value();
75 }
76 return 0; // Default.
77}
78
79int64_t CpModelView::Max(int var) const {
80 if (mapping_.IsBoolean(var)) {
81 const Literal l = mapping_.Literal(var);
82 return boolean_assignment_.LiteralIsFalse(l) ? 0 : 1;
83 } else if (mapping_.IsInteger(var)) {
84 return integer_trail_.UpperBound(mapping_.Integer(var)).value();
85 }
86 return 0; // Default.
87}
88
90 int64_t value) const {
91 DCHECK(!IsFixed(var));
93 if (mapping_.IsBoolean(var)) {
94 DCHECK(value == 0 || value == 1);
95 if (value == 1) {
96 result.boolean_literal_index = mapping_.Literal(var).Index();
97 }
98 } else if (mapping_.IsInteger(var)) {
100 mapping_.Integer(var), IntegerValue(value));
101 }
102 return result;
103}
104
106 int64_t value) const {
107 DCHECK(!IsFixed(var));
109 if (mapping_.IsBoolean(var)) {
110 DCHECK(value == 0 || value == 1);
111 if (value == 0) {
112 result.boolean_literal_index = mapping_.Literal(var).NegatedIndex();
113 }
114 } else if (mapping_.IsInteger(var)) {
115 result.integer_literal = IntegerLiteral::LowerOrEqual(mapping_.Integer(var),
116 IntegerValue(value));
117 }
118 return result;
119}
120
122 DCHECK(!IsFixed(var));
124 if (mapping_.IsBoolean(var)) {
125 result.boolean_literal_index = mapping_.Literal(var).NegatedIndex();
126 } else if (mapping_.IsInteger(var)) {
127 const IntegerVariable variable = mapping_.Integer(var);
128 const std::vector<ValueLiteralPair> encoding =
129 integer_encoder_.FullDomainEncoding(variable);
130
131 // 5 values -> returns the second.
132 // 4 values -> returns the second too.
133 // Array is 0 based.
134 const int target = (static_cast<int>(encoding.size()) + 1) / 2 - 1;
135 result.boolean_literal_index = encoding[target].literal.Index();
136 }
137 return result;
138}
139
141 int64_t ub) const {
142 DCHECK(!IsFixed(var));
144 if (mapping_.IsBoolean(var)) {
145 if (absl::Bernoulli(random_, 0.5)) {
146 result.boolean_literal_index = mapping_.Literal(var).Index();
147 } else {
148 result.boolean_literal_index = mapping_.Literal(var).NegatedIndex();
149 }
150 } else if (mapping_.IsInteger(var)) {
151 if (absl::Bernoulli(random_, 0.5)) {
153 mapping_.Integer(var), IntegerValue(lb + (ub - lb) / 2));
154 } else {
156 mapping_.Integer(var), IntegerValue(ub - (ub - lb) / 2));
157 }
158 }
159 return result;
160}
161
162// Stores one variable and its strategy value.
163struct VarValue {
164 int ref;
165 int64_t value;
166};
167
168namespace {
169
170// TODO(user): Save this somewhere instead of recomputing it.
171bool ModelHasSchedulingConstraints(const CpModelProto& cp_model_proto) {
172 for (const ConstraintProto& ct : cp_model_proto.constraints()) {
173 if (ct.constraint_case() == ConstraintProto::kNoOverlap) return true;
174 if (ct.constraint_case() == ConstraintProto::kCumulative) return true;
175 }
176 return false;
177}
178
179void AddExtraSchedulingPropagators(SatParameters& new_params) {
180 new_params.set_exploit_all_precedences(true);
181 new_params.set_use_hard_precedences_in_cumulative(true);
182 new_params.set_use_overload_checker_in_cumulative(true);
183 new_params.set_use_strong_propagation_in_disjunctive(true);
184 new_params.set_use_timetable_edge_finding_in_cumulative(true);
185 new_params.set_use_conservative_scale_overload_checker(true);
186 new_params.set_max_pairs_pairwise_reasoning_in_no_overlap_2d(5000);
187 new_params.set_use_timetabling_in_no_overlap_2d(true);
188 new_params.set_use_energetic_reasoning_in_no_overlap_2d(true);
189 new_params.set_use_area_energetic_reasoning_in_no_overlap_2d(true);
190 new_params.set_use_try_edge_reasoning_in_no_overlap_2d(true);
191}
192
193// We want a random tie breaking among variables with equivalent values.
194struct NoisyInteger {
195 int64_t value;
196 double noise;
197
198 bool operator<=(const NoisyInteger& other) const {
199 return value < other.value ||
200 (value == other.value && noise <= other.noise);
201 }
202 bool operator>(const NoisyInteger& other) const {
203 return value > other.value || (value == other.value && noise > other.noise);
204 }
205};
206
207} // namespace
208
210 const CpModelProto& cp_model_proto, Model* model) {
211 if (cp_model_proto.search_strategy().empty()) return nullptr;
212
213 std::vector<DecisionStrategyProto> strategies;
214 for (const DecisionStrategyProto& proto : cp_model_proto.search_strategy()) {
215 strategies.push_back(proto);
216 }
217 const auto& view = *model->GetOrCreate<CpModelView>();
218 const auto& parameters = *model->GetOrCreate<SatParameters>();
219 auto* random = model->GetOrCreate<ModelRandomGenerator>();
220
221 // Note that we copy strategies to keep the return function validity
222 // independently of the life of the passed vector.
223 return [&view, &parameters, random, strategies]() {
224 for (const DecisionStrategyProto& strategy : strategies) {
225 int candidate_ref = -1;
226 int64_t candidate_value = std::numeric_limits<int64_t>::max();
227
228 // TODO(user): Improve the complexity if this becomes an issue which
229 // may be the case if we do a fixed_search.
230
231 // To store equivalent variables in randomized search.
232 const bool randomize_decision =
233 parameters.search_random_variable_pool_size() > 1;
234 TopN<int, NoisyInteger> top_variables(
235 randomize_decision ? parameters.search_random_variable_pool_size()
236 : 1);
237
238 for (const LinearExpressionProto& expr : strategy.exprs()) {
239 const int var = expr.vars(0);
240 if (view.IsFixed(var)) continue;
241
242 int64_t coeff = expr.coeffs(0);
243 int64_t offset = expr.offset();
244 int64_t lb = view.Min(var);
245 int64_t ub = view.Max(var);
246 int ref = var;
247 if (coeff < 0) {
248 lb = -view.Max(var);
249 ub = -view.Min(var);
250 coeff = -coeff;
251 ref = NegatedRef(var);
252 }
253
254 int64_t value(0);
255 switch (strategy.variable_selection_strategy()) {
256 case DecisionStrategyProto::CHOOSE_FIRST:
257 break;
258 case DecisionStrategyProto::CHOOSE_LOWEST_MIN:
259 value = coeff * lb + offset;
260 break;
261 case DecisionStrategyProto::CHOOSE_HIGHEST_MAX:
262 value = -(coeff * ub + offset);
263 break;
264 case DecisionStrategyProto::CHOOSE_MIN_DOMAIN_SIZE:
265 // The size of the domain is not multiplied by the coeff.
266 value = ub - lb + 1;
267 break;
268 case DecisionStrategyProto::CHOOSE_MAX_DOMAIN_SIZE:
269 // The size of the domain is not multiplied by the coeff.
270 value = -(ub - lb + 1);
271 break;
272 default:
273 LOG(FATAL) << "Unknown VariableSelectionStrategy "
274 << strategy.variable_selection_strategy();
275 }
276
277 if (randomize_decision) {
278 // We need to use -value as we want the minimum valued variables.
279 // We add a random noise to get improve the entropy.
280 const double noise = absl::Uniform(*random, 0., 1.0);
281 top_variables.Add(ref, {-value, noise});
282 candidate_value = std::min(candidate_value, value);
283 } else if (value < candidate_value) {
284 candidate_ref = ref;
285 candidate_value = value;
286 }
287
288 // We can stop scanning if the variable selection strategy is to use the
289 // first unbound variable and no randomization is needed.
290 if (strategy.variable_selection_strategy() ==
291 DecisionStrategyProto::CHOOSE_FIRST &&
292 !randomize_decision) {
293 break;
294 }
295 }
296
297 // Check if one active variable has been found.
298 if (candidate_value == std::numeric_limits<int64_t>::max()) continue;
299
300 // Pick the winner when decisions are randomized.
301 if (randomize_decision) {
302 const std::vector<int>& candidates = top_variables.UnorderedElements();
303 candidate_ref = candidates[absl::Uniform(
304 *random, 0, static_cast<int>(candidates.size()))];
305 }
306
307 DecisionStrategyProto::DomainReductionStrategy selection =
308 strategy.domain_reduction_strategy();
309 if (!RefIsPositive(candidate_ref)) {
310 switch (selection) {
311 case DecisionStrategyProto::SELECT_MIN_VALUE:
312 selection = DecisionStrategyProto::SELECT_MAX_VALUE;
313 break;
314 case DecisionStrategyProto::SELECT_MAX_VALUE:
315 selection = DecisionStrategyProto::SELECT_MIN_VALUE;
316 break;
317 case DecisionStrategyProto::SELECT_LOWER_HALF:
318 selection = DecisionStrategyProto::SELECT_UPPER_HALF;
319 break;
320 case DecisionStrategyProto::SELECT_UPPER_HALF:
321 selection = DecisionStrategyProto::SELECT_LOWER_HALF;
322 break;
323 default:
324 break;
325 }
326 }
327
328 const int var = PositiveRef(candidate_ref);
329 const int64_t lb = view.Min(var);
330 const int64_t ub = view.Max(var);
331 switch (selection) {
332 case DecisionStrategyProto::SELECT_MIN_VALUE:
333 return view.LowerOrEqual(var, lb);
334 case DecisionStrategyProto::SELECT_MAX_VALUE:
335 return view.GreaterOrEqual(var, ub);
336 case DecisionStrategyProto::SELECT_LOWER_HALF:
337 return view.LowerOrEqual(var, lb + (ub - lb) / 2);
338 case DecisionStrategyProto::SELECT_UPPER_HALF:
339 return view.GreaterOrEqual(var, ub - (ub - lb) / 2);
340 case DecisionStrategyProto::SELECT_MEDIAN_VALUE:
341 return view.MedianValue(var);
342 case DecisionStrategyProto::SELECT_RANDOM_HALF:
343 return view.RandomSplit(var, lb, ub);
344 default:
345 LOG(FATAL) << "Unknown DomainReductionStrategy "
346 << strategy.domain_reduction_strategy();
347 }
348 }
350 };
351}
352
353// TODO(user): Implement a routing search.
355 const CpModelProto& cp_model_proto, Model* model) {
356 if (ModelHasSchedulingConstraints(cp_model_proto)) {
357 std::vector<std::function<BooleanOrIntegerLiteral()>> heuristics;
358 const auto& params = *model->GetOrCreate<SatParameters>();
359 bool possible_new_constraints = false;
360 if (params.use_dynamic_precedence_in_disjunctive()) {
361 possible_new_constraints = true;
362 heuristics.push_back(DisjunctivePrecedenceSearchHeuristic(model));
363 }
364 if (params.use_dynamic_precedence_in_cumulative()) {
365 possible_new_constraints = true;
366 heuristics.push_back(CumulativePrecedenceSearchHeuristic(model));
367 }
368
369 // Tricky: we need to create this at level zero in case there are no linear
370 // constraint in the model at the beginning.
371 //
372 // TODO(user): Alternatively, support creation of SatPropagator at positive
373 // level.
374 if (possible_new_constraints && params.new_linear_propagation()) {
376 }
377
378 heuristics.push_back(SchedulingSearchHeuristic(model));
379 return SequentialSearch(std::move(heuristics));
380 }
381 return PseudoCost(model);
382}
383
384std::function<BooleanOrIntegerLiteral()>
386 absl::Span<const IntegerVariable> variable_mapping,
387 IntegerVariable objective_var, Model* model) {
388 const auto& params = *model->GetOrCreate<SatParameters>();
389 if (!params.instantiate_all_variables()) {
390 return []() { return BooleanOrIntegerLiteral(); };
391 }
392
393 std::vector<IntegerVariable> decisions;
394 for (const IntegerVariable var : variable_mapping) {
395 if (var == kNoIntegerVariable) continue;
396
397 // Make sure we try to fix the objective to its lowest value first.
398 // TODO(user): we could also fix terms of the objective in the right
399 // direction.
400 if (var == NegationOf(objective_var)) {
401 decisions.push_back(objective_var);
402 } else {
403 decisions.push_back(var);
404 }
405 }
406 return FirstUnassignedVarAtItsMinHeuristic(decisions, model);
407}
408
409// Constructs a search strategy that follow the hint from the model.
411 const CpModelProto& cp_model_proto, CpModelMapping* mapping, Model* model) {
412 std::vector<BooleanOrIntegerVariable> vars;
413 std::vector<IntegerValue> values;
414 for (int i = 0; i < cp_model_proto.solution_hint().vars_size(); ++i) {
415 const int ref = cp_model_proto.solution_hint().vars(i);
416 CHECK(RefIsPositive(ref));
418 if (mapping->IsBoolean(ref)) {
419 var.bool_var = mapping->Literal(ref).Variable();
420 } else {
421 var.int_var = mapping->Integer(ref);
422 }
423 vars.push_back(var);
424 values.push_back(IntegerValue(cp_model_proto.solution_hint().values(i)));
425 }
426 return FollowHint(vars, values, model);
427}
428
430 std::function<BooleanOrIntegerLiteral()> user_search,
431 std::function<BooleanOrIntegerLiteral()> heuristic_search,
432 std::function<BooleanOrIntegerLiteral()> integer_completion) {
433 // We start by the user specified heuristic.
434 std::vector<std::function<BooleanOrIntegerLiteral()>> heuristics;
435 if (user_search != nullptr) {
436 heuristics.push_back(user_search);
437 }
438 if (heuristic_search != nullptr) {
439 heuristics.push_back(heuristic_search);
440 }
441 if (integer_completion != nullptr) {
442 heuristics.push_back(integer_completion);
443 }
444
445 return SequentialSearch(heuristics);
446}
447
449 const CpModelProto& cp_model_proto,
450 absl::Span<const IntegerVariable> variable_mapping,
451 std::function<BooleanOrIntegerLiteral()> instrumented_strategy,
452 Model* model) {
453 std::vector<int> ref_to_display;
454 for (int i = 0; i < cp_model_proto.variables_size(); ++i) {
455 if (variable_mapping[i] == kNoIntegerVariable) continue;
456 if (cp_model_proto.variables(i).name().empty()) continue;
457 ref_to_display.push_back(i);
458 }
459 std::sort(ref_to_display.begin(), ref_to_display.end(), [&](int i, int j) {
460 return cp_model_proto.variables(i).name() <
461 cp_model_proto.variables(j).name();
462 });
463
464 std::vector<std::pair<int64_t, int64_t>> old_domains(variable_mapping.size());
465 return [instrumented_strategy, model, variable_mapping, &cp_model_proto,
466 old_domains, ref_to_display]() mutable {
467 const BooleanOrIntegerLiteral decision = instrumented_strategy();
468 if (!decision.HasValue()) return decision;
469
470 if (decision.boolean_literal_index != kNoLiteralIndex) {
471 const Literal l = Literal(decision.boolean_literal_index);
472 LOG(INFO) << "Boolean decision " << l;
473 const auto& encoder = model->Get<IntegerEncoder>();
474 for (const IntegerLiteral i_lit : encoder->GetIntegerLiterals(l)) {
475 LOG(INFO) << " - associated with " << i_lit;
476 }
477 for (const auto [var, value] : encoder->GetEqualityLiterals(l)) {
478 LOG(INFO) << " - associated with " << var << " == " << value;
479 }
480 } else {
481 LOG(INFO) << "Integer decision " << decision.integer_literal;
482 }
483 const int level = model->Get<Trail>()->CurrentDecisionLevel();
484 std::string to_display =
485 absl::StrCat("Diff since last call, level=", level, "\n");
486 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
487 for (const int ref : ref_to_display) {
488 const IntegerVariable var = variable_mapping[ref];
489 const std::pair<int64_t, int64_t> new_domain(
490 integer_trail->LowerBound(var).value(),
491 integer_trail->UpperBound(var).value());
492 if (new_domain != old_domains[ref]) {
493 absl::StrAppend(&to_display, cp_model_proto.variables(ref).name(), " [",
494 old_domains[ref].first, ",", old_domains[ref].second,
495 "] -> [", new_domain.first, ",", new_domain.second,
496 "]\n");
497 old_domains[ref] = new_domain;
498 }
499 }
500 LOG(INFO) << to_display;
501 return decision;
502 };
503}
504
505absl::flat_hash_map<std::string, SatParameters> GetNamedParameters(
506 SatParameters base_params) {
507 absl::flat_hash_map<std::string, SatParameters> strategies;
508
509 // By default we disable the logging when we generate a set of parameter. It
510 // is possible to force it by setting it in the corresponding named parameter
511 // via the subsolver_params field.
512 base_params.set_log_search_progress(false);
513
514 // The "default" name can be used for the base_params unchanged.
515 strategies["default"] = base_params;
516
517 // Lp variations only.
518 {
519 SatParameters new_params = base_params;
520 new_params.set_linearization_level(0);
521 strategies["no_lp"] = new_params;
522 new_params.set_linearization_level(1);
523 strategies["default_lp"] = new_params;
524 new_params.set_linearization_level(2);
525 new_params.set_add_lp_constraints_lazily(false);
526 strategies["max_lp"] = new_params;
527 new_params.set_use_symmetry_in_lp(true);
528 strategies["max_lp_sym"] = new_params;
529 }
530
531 // Core. Note that we disable the lp here because it is faster on the minizinc
532 // benchmark.
533 //
534 // TODO(user): Do more experiments, the LP with core could be useful, but we
535 // probably need to incorporate the newly created integer variables from the
536 // core algorithm into the LP.
537 {
538 SatParameters new_params = base_params;
539 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
540 new_params.set_optimize_with_core(true);
541 new_params.set_linearization_level(0);
542 strategies["core"] = new_params;
543 }
544
545 // It can be interesting to try core and lp.
546 {
547 SatParameters new_params = base_params;
548 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
549 new_params.set_optimize_with_core(true);
550 new_params.set_linearization_level(1);
551 strategies["core_default_lp"] = new_params;
552 }
553
554 {
555 SatParameters new_params = base_params;
556 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
557 new_params.set_optimize_with_core(true);
558 new_params.set_linearization_level(2);
559 strategies["core_max_lp"] = new_params;
560 }
561
562 {
563 SatParameters new_params = base_params;
564 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
565 new_params.set_optimize_with_core(true);
566 new_params.set_optimize_with_max_hs(true);
567 strategies["max_hs"] = new_params;
568 }
569
570 {
571 SatParameters new_params = base_params;
572 new_params.set_optimize_with_lb_tree_search(true);
573 // We do not want to change the objective_var lb from outside as it gives
574 // better result to only use locally derived reason in that algo.
575 new_params.set_share_objective_bounds(false);
576
577 new_params.set_linearization_level(0);
578 strategies["lb_tree_search_no_lp"] = new_params;
579
580 new_params.set_linearization_level(2);
581 if (base_params.use_dual_scheduling_heuristics()) {
582 AddExtraSchedulingPropagators(new_params);
583 }
584 // We want to spend more time on the LP here.
585 new_params.set_add_lp_constraints_lazily(false);
586 new_params.set_root_lp_iterations(100'000);
587 strategies["lb_tree_search"] = new_params;
588 }
589
590 {
591 SatParameters new_params = base_params;
592 new_params.set_use_objective_lb_search(true);
593
594 new_params.set_linearization_level(0);
595 strategies["objective_lb_search_no_lp"] = new_params;
596
597 new_params.set_linearization_level(1);
598 strategies["objective_lb_search"] = new_params;
599
600 if (base_params.use_dual_scheduling_heuristics()) {
601 AddExtraSchedulingPropagators(new_params);
602 }
603 new_params.set_linearization_level(2);
604 strategies["objective_lb_search_max_lp"] = new_params;
605 }
606
607 {
608 SatParameters new_params = base_params;
609 new_params.set_use_objective_shaving_search(true);
610 new_params.set_cp_model_presolve(true);
611 new_params.set_cp_model_probing_level(0);
612 new_params.set_symmetry_level(0);
613 if (base_params.use_dual_scheduling_heuristics()) {
614 AddExtraSchedulingPropagators(new_params);
615 }
616
617 strategies["objective_shaving"] = new_params;
618
619 new_params.set_linearization_level(0);
620 strategies["objective_shaving_no_lp"] = new_params;
621
622 new_params.set_linearization_level(2);
623 strategies["objective_shaving_max_lp"] = new_params;
624 }
625
626 {
627 SatParameters new_params = base_params;
628 new_params.set_use_variables_shaving_search(true);
629 new_params.set_cp_model_presolve(true);
630 new_params.set_cp_model_probing_level(0);
631 new_params.set_symmetry_level(0);
632 new_params.set_share_objective_bounds(false);
633 new_params.set_share_level_zero_bounds(false);
634
635 strategies["variables_shaving"] = new_params;
636
637 new_params.set_linearization_level(0);
638 strategies["variables_shaving_no_lp"] = new_params;
639
640 if (base_params.use_dual_scheduling_heuristics()) {
641 AddExtraSchedulingPropagators(new_params);
642 }
643 new_params.set_linearization_level(2);
644 strategies["variables_shaving_max_lp"] = new_params;
645 }
646
647 {
648 SatParameters new_params = base_params;
649 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
650 new_params.set_use_probing_search(true);
651 new_params.set_at_most_one_max_expansion_size(2);
652 if (base_params.use_dual_scheduling_heuristics()) {
653 AddExtraSchedulingPropagators(new_params);
654 }
655 strategies["probing"] = new_params;
656
657 new_params.set_linearization_level(0);
658 strategies["probing_no_lp"] = new_params;
659
660 new_params.set_linearization_level(2);
661 // We want to spend more time on the LP here.
662 new_params.set_add_lp_constraints_lazily(false);
663 new_params.set_root_lp_iterations(100'000);
664 strategies["probing_max_lp"] = new_params;
665 }
666
667 // Search variation.
668 {
669 SatParameters new_params = base_params;
670 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
671 strategies["auto"] = new_params;
672
673 new_params.set_search_branching(SatParameters::FIXED_SEARCH);
674 new_params.set_use_dynamic_precedence_in_disjunctive(false);
675 new_params.set_use_dynamic_precedence_in_cumulative(false);
676 strategies["fixed"] = new_params;
677 }
678
679 // Quick restart.
680 {
681 // TODO(user): Experiment with search_random_variable_pool_size.
682 SatParameters new_params = base_params;
683 new_params.set_search_branching(
684 SatParameters::PORTFOLIO_WITH_QUICK_RESTART_SEARCH);
685 strategies["quick_restart"] = new_params;
686
687 new_params.set_linearization_level(0);
688 strategies["quick_restart_no_lp"] = new_params;
689
690 new_params.set_linearization_level(2);
691 strategies["quick_restart_max_lp"] = new_params;
692 }
693
694 {
695 SatParameters new_params = base_params;
696 new_params.set_linearization_level(2);
697 new_params.set_search_branching(SatParameters::LP_SEARCH);
698 if (base_params.use_dual_scheduling_heuristics()) {
699 AddExtraSchedulingPropagators(new_params);
700 }
701 strategies["reduced_costs"] = new_params;
702 }
703
704 {
705 // Note: no dual scheduling heuristics.
706 SatParameters new_params = base_params;
707 new_params.set_linearization_level(2);
708 new_params.set_search_branching(SatParameters::PSEUDO_COST_SEARCH);
709 new_params.set_exploit_best_solution(true);
710 strategies["pseudo_costs"] = new_params;
711 }
712
713 // Less encoding.
714 {
715 SatParameters new_params = base_params;
716 new_params.set_boolean_encoding_level(0);
717 strategies["less_encoding"] = new_params;
718 }
719
720 // Base parameters for shared tree worker.
721 {
722 SatParameters new_params = base_params;
723 new_params.set_use_shared_tree_search(true);
724 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
725 new_params.set_linearization_level(0);
726
727 // These settings don't make sense with shared tree search, turn them off as
728 // they can break things.
729 new_params.set_optimize_with_core(false);
730 new_params.set_optimize_with_lb_tree_search(false);
731 new_params.set_optimize_with_max_hs(false);
732
733 // Given that each workers work on a different part of the subtree, it might
734 // not be a good idea to try to work on a global shared solution.
735 //
736 // TODO(user): Experiments more here, in particular we could follow it if
737 // it falls into the current subtree.
738 new_params.set_polarity_exploit_ls_hints(false);
739
740 strategies["shared_tree"] = new_params;
741 }
742
743 // Base parameters for LNS worker.
744 {
745 SatParameters lns_params = base_params;
746 lns_params.set_stop_after_first_solution(false);
747 lns_params.set_cp_model_presolve(true);
748
749 // We disable costly presolve/inprocessing.
750 lns_params.set_use_sat_inprocessing(false);
751 lns_params.set_cp_model_probing_level(0);
752 lns_params.set_symmetry_level(0);
753 lns_params.set_find_big_linear_overlap(false);
754
755 lns_params.set_log_search_progress(false);
756 lns_params.set_debug_crash_on_bad_hint(false); // Can happen in lns.
757 lns_params.set_solution_pool_size(1); // Keep the best solution found.
758 strategies["lns"] = lns_params;
759
760 // Note that we only do this for the derived parameters. The strategy "lns"
761 // will be handled along with the other ones.
762 auto it = absl::c_find_if(
763 base_params.subsolver_params(),
764 [](const SatParameters& params) { return params.name() == "lns"; });
765 if (it != base_params.subsolver_params().end()) {
766 lns_params.MergeFrom(*it);
767 }
768
769 SatParameters lns_params_base = lns_params;
770 lns_params_base.set_linearization_level(0);
771 lns_params_base.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
772 strategies["lns_base"] = lns_params_base;
773
774 SatParameters lns_params_stalling = lns_params;
775 lns_params_stalling.set_search_branching(SatParameters::PORTFOLIO_SEARCH);
776 lns_params_stalling.set_search_random_variable_pool_size(5);
777 strategies["lns_stalling"] = lns_params_stalling;
778
779 // For routing, the LP relaxation seems pretty important, so we prefer an
780 // high linearization level to solve LNS subproblems.
781 SatParameters lns_params_routing = lns_params;
782 lns_params_routing.set_linearization_level(2);
783 lns_params_routing.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
784 strategies["lns_routing"] = lns_params_routing;
785 }
786
787 // Add user defined ones.
788 // Note that this might be merged to our default ones.
789 for (const SatParameters& params : base_params.subsolver_params()) {
790 auto it = strategies.find(params.name());
791 if (it != strategies.end()) {
792 it->second.MergeFrom(params);
793 } else {
794 // Merge the named parameters with the base parameters to create the new
795 // parameters.
796 SatParameters new_params = base_params;
797 new_params.MergeFrom(params);
798 strategies[params.name()] = new_params;
799 }
800 }
801
802 // Fix names (we don't set them above).
803 for (auto& [name, params] : strategies) {
804 params.set_name(name);
805 }
806
807 return strategies;
808}
809
810// Note: in flatzinc setting, we know we always have a fixed search defined.
811//
812// Things to try:
813// - Specialize for purely boolean problems
814// - Disable linearization_level options for non linear problems
815// - Fast restart in randomized search
816// - Different propatation levels for scheduling constraints
817std::vector<SatParameters> GetFullWorkerParameters(
818 const SatParameters& base_params, const CpModelProto& cp_model,
819 int num_already_present, SubsolverNameFilter* filter) {
820 // Defines a set of named strategies so it is easier to read in one place
821 // the one that are used. See below.
822 const auto strategies = GetNamedParameters(base_params);
823
824 // We only use a "fixed search" worker if some strategy is specified or
825 // if we have a scheduling model.
826 //
827 // TODO(user): For scheduling, this is important to find good first solution
828 // but afterwards it is not really great and should probably be replaced by a
829 // LNS worker.
830 const bool use_fixed_strategy = !cp_model.search_strategy().empty() ||
831 ModelHasSchedulingConstraints(cp_model);
832
833 // Our current set of strategies
834 //
835 // TODO(user): Avoid launching two strategies if they are the same,
836 // like if there is no lp, or everything is already linearized at level 1.
837 std::vector<std::string> names;
838
839 // Starts by adding user specified ones.
840 for (const std::string& name : base_params.extra_subsolvers()) {
841 names.push_back(name);
842 }
843
844 // We use the default if empty.
845 if (base_params.subsolvers().empty()) {
846 // Note that the order is important as the list can be truncated.
847 names.push_back("default_lp");
848 names.push_back("fixed");
849 names.push_back("core");
850 names.push_back("no_lp");
851 if (cp_model.has_symmetry()) {
852 names.push_back("max_lp_sym");
853 } else {
854 // If there is no symmetry, max_lp_sym and max_lp are the same, but
855 // we prefer the less confusing name.
856 names.push_back("max_lp");
857 }
858 names.push_back("quick_restart");
859 names.push_back("reduced_costs");
860 names.push_back("quick_restart_no_lp");
861 names.push_back("pseudo_costs");
862 names.push_back("lb_tree_search");
863 names.push_back("probing");
864 names.push_back("objective_lb_search");
865 names.push_back("objective_shaving_no_lp");
866 names.push_back("objective_shaving_max_lp");
867 names.push_back("probing_max_lp");
868 names.push_back("probing_no_lp");
869 names.push_back("objective_lb_search_no_lp");
870 names.push_back("objective_lb_search_max_lp");
871 if (cp_model.has_symmetry()) {
872 names.push_back("max_lp");
873 }
874 } else {
875 for (const std::string& name : base_params.subsolvers()) {
876 // Hack for flatzinc. At the time of parameter setting, the objective is
877 // not expanded. So we do not know if core is applicable or not.
878 if (name == "core_or_no_lp") {
879 if (!cp_model.has_objective() ||
880 cp_model.objective().vars_size() <= 1) {
881 names.push_back("no_lp");
882 } else {
883 names.push_back("core");
884 }
885 } else {
886 names.push_back(name);
887 }
888 }
889 }
890
891 // Remove the names that should be ignored.
892 int new_size = 0;
893 for (const std::string& name : names) {
894 if (filter->Keep(name)) {
895 names[new_size++] = name;
896 }
897 }
898 names.resize(new_size);
899
900 // Creates the diverse set of parameters with names and seed.
901 std::vector<SatParameters> result;
902 for (const std::string& name : names) {
903 SatParameters params = strategies.at(name);
904
905 // Do some filtering.
906 if (!use_fixed_strategy &&
907 params.search_branching() == SatParameters::FIXED_SEARCH) {
908 continue;
909 }
910
911 // TODO(user): Enable probing_search in deterministic mode.
912 // Currently it timeouts on small problems as the deterministic time limit
913 // never hits the sharding limit.
914 if (params.use_probing_search() && params.interleave_search()) continue;
915
916 // TODO(user): Enable shaving search in interleave mode.
917 // Currently it do not respect ^C, and has no per chunk time limit.
918 if ((params.use_objective_shaving_search() ||
919 params.use_variables_shaving_search()) &&
920 params.interleave_search()) {
921 continue;
922 }
923
924 // In the corner case of empty variable, lets not schedule the probing as
925 // it currently just loop forever instead of returning right away.
926 if (params.use_probing_search() && cp_model.variables().empty()) continue;
927
928 if (cp_model.has_objective() && !cp_model.objective().vars().empty()) {
929 // Disable core search if there is only 1 term in the objective.
930 if (cp_model.objective().vars().size() == 1 &&
931 params.optimize_with_core()) {
932 continue;
933 }
934
935 if (name == "less_encoding") continue;
936
937 // Disable subsolvers that do not implement the deterministic mode.
938 //
939 // TODO(user): Enable lb_tree_search in deterministic mode.
940 if (params.interleave_search() &&
941 (params.optimize_with_lb_tree_search() ||
942 params.use_objective_lb_search())) {
943 continue;
944 }
945 } else {
946 // Remove subsolvers that require an objective.
947 if (params.optimize_with_lb_tree_search()) continue;
948 if (params.optimize_with_core()) continue;
949 if (params.use_objective_lb_search()) continue;
950 if (params.use_objective_shaving_search()) continue;
951 if (params.search_branching() == SatParameters::LP_SEARCH) continue;
952 if (params.search_branching() == SatParameters::PSEUDO_COST_SEARCH) {
953 continue;
954 }
955 }
956
957 // Add this strategy.
958 params.set_name(name);
959 params.set_random_seed(CombineSeed(
960 base_params.random_seed(), static_cast<int64_t>(result.size()) + 1));
961 result.push_back(params);
962 }
963
964 // In interleaved mode, we run all of them.
965 //
966 // TODO(user): Actually make sure the gap num_workers <-> num_heuristics is
967 // contained.
968 if (base_params.interleave_search()) return result;
969
970 // Apply the logic for how many we keep.
971 int num_to_keep = base_params.num_full_subsolvers();
972 if (num_to_keep == 0) {
973 // Derive some automatic number to leave room for LS/LNS and other
974 // strategies not taken into account here.
975 const int num_available =
976 std::max(0, base_params.num_workers() - num_already_present);
977
978 const auto heuristic_num_workers = [](int num_workers) {
979 DCHECK_GE(num_workers, 0);
980 if (num_workers == 1) return 1;
981 if (num_workers <= 4) return num_workers - 1;
982 if (num_workers <= 8) return num_workers - 2;
983 if (num_workers <= 16) return num_workers - (num_workers / 4 + 1);
984 return num_workers - (num_workers / 2 - 3);
985 };
986
987 num_to_keep = heuristic_num_workers(num_available);
988 }
989
990 if (result.size() > num_to_keep) {
991 result.resize(std::max(0, num_to_keep));
992 }
993 return result;
994}
995
996std::vector<SatParameters> GetFirstSolutionBaseParams(
997 const SatParameters& base_params) {
998 std::vector<SatParameters> result;
999
1000 const auto get_base = [&result, &base_params](bool fj) {
1001 SatParameters new_params = base_params;
1002 new_params.set_log_search_progress(false);
1003 new_params.set_use_feasibility_jump(fj);
1004
1005 const int base_seed = base_params.random_seed();
1006 new_params.set_random_seed(CombineSeed(base_seed, result.size()));
1007 return new_params;
1008 };
1009
1010 // Add one feasibility jump.
1011 if (base_params.use_feasibility_jump()) {
1012 SatParameters new_params = get_base(true);
1013 new_params.set_name("fj");
1014 new_params.set_feasibility_jump_linearization_level(0);
1015 result.push_back(new_params);
1016 }
1017
1018 // Random search.
1019 for (int i = 0; i < 2; ++i) {
1020 SatParameters new_params = get_base(false);
1021 new_params.set_search_random_variable_pool_size(5);
1022 new_params.set_search_branching(SatParameters::RANDOMIZED_SEARCH);
1023 if (i % 2 == 0) {
1024 new_params.set_name("fs_random_no_lp");
1025 new_params.set_linearization_level(0);
1026 } else {
1027 new_params.set_name("fs_random");
1028 }
1029 result.push_back(new_params);
1030 }
1031
1032 // Add a second feasibility jump.
1033 if (base_params.use_feasibility_jump()) {
1034 SatParameters new_params = get_base(true);
1035 new_params.set_name("fj");
1036 new_params.set_feasibility_jump_linearization_level(0);
1037 result.push_back(new_params);
1038 }
1039
1040 // Random quick restart.
1041 for (int i = 0; i < 2; ++i) {
1042 SatParameters new_params = get_base(false);
1043 new_params.set_search_random_variable_pool_size(5);
1044 new_params.set_search_branching(
1045 SatParameters::PORTFOLIO_WITH_QUICK_RESTART_SEARCH);
1046 if (i % 2 == 0) {
1047 new_params.set_name("fs_random_quick_restart_no_lp");
1048 new_params.set_linearization_level(0);
1049 } else {
1050 new_params.set_name("fs_random_quick_restart");
1051 }
1052 result.push_back(new_params);
1053 }
1054
1055 // Add a linear feasibility jump.
1056 // This one seems to perform worse, so we add only 1 for 2 normal LS, and we
1057 // add this late.
1058 if (base_params.use_feasibility_jump()) {
1059 SatParameters new_params = get_base(true);
1060 new_params.set_name("fj_lin");
1061 new_params.set_feasibility_jump_linearization_level(2);
1062 result.push_back(new_params);
1063 }
1064
1065 return result;
1066}
1067
1068std::vector<SatParameters> RepeatParameters(
1069 absl::Span<const SatParameters> base_params, int num_params_to_generate) {
1070 // Return if we are done.
1071 std::vector<SatParameters> result;
1072 result.assign(base_params.begin(), base_params.end());
1073 if (result.empty()) return result;
1074 if (result.size() >= num_params_to_generate) {
1075 result.resize(num_params_to_generate);
1076 return result;
1077 }
1078
1079 // Repeat parameters until we have enough.
1080 int i = 0;
1081 const int base_size = result.size();
1082 while (result.size() < num_params_to_generate) {
1083 result.push_back(result[i % base_size]);
1084 result.back().set_random_seed(CombineSeed(result.back().random_seed(), i));
1085 ++i;
1086 }
1087 return result;
1088}
1089
1090SubsolverNameFilter::SubsolverNameFilter(const SatParameters& params) {
1091 for (const auto& pattern : params.filter_subsolvers()) {
1092 filter_patterns_.push_back(pattern);
1093 }
1094 for (const auto& pattern : params.ignore_subsolvers()) {
1095 ignore_patterns_.push_back(pattern);
1096 }
1097
1098 // Hack for backward compatibility and easy of use.
1099 if (params.use_ls_only()) {
1100 filter_patterns_.push_back("ls*");
1101 filter_patterns_.push_back("fj*");
1102 }
1103
1104 if (params.use_lns_only()) {
1105 // Still add first solution solvers.
1106 filter_patterns_.push_back("fj*");
1107 filter_patterns_.push_back("fs*");
1108 filter_patterns_.push_back("*lns");
1109 }
1110}
1111
1112bool SubsolverNameFilter::Keep(absl::string_view name) {
1113 last_name_ = name;
1114 if (!filter_patterns_.empty()) {
1115 bool keep = false;
1116 for (const absl::string_view pattern : filter_patterns_) {
1117 if (FNMatch(pattern, name)) {
1118 keep = true;
1119 break;
1120 }
1121 }
1122 if (!keep) {
1123 ignored_.emplace_back(name);
1124 return false;
1125 }
1126 }
1127 for (const absl::string_view pattern : ignore_patterns_) {
1128 if (FNMatch(pattern, name)) {
1129 ignored_.emplace_back(name);
1130 return false;
1131 }
1132 }
1133 return true;
1134}
1135
1136bool SubsolverNameFilter::FNMatch(absl::string_view pattern,
1137 absl::string_view str) {
1138 bool in_wildcard_match = false;
1139 while (true) {
1140 if (pattern.empty()) {
1141 return in_wildcard_match || str.empty();
1142 }
1143 if (str.empty()) {
1144 return pattern.find_first_not_of('*') == pattern.npos;
1145 }
1146 switch (pattern.front()) {
1147 case '*':
1148 pattern.remove_prefix(1);
1149 in_wildcard_match = true;
1150 break;
1151 case '?':
1152 pattern.remove_prefix(1);
1153 str.remove_prefix(1);
1154 break;
1155 default:
1156 if (in_wildcard_match) {
1157 absl::string_view fixed_portion = pattern;
1158 const size_t end = fixed_portion.find_first_of("*?");
1159 if (end != fixed_portion.npos) {
1160 fixed_portion = fixed_portion.substr(0, end);
1161 }
1162 const size_t match = str.find(fixed_portion);
1163 if (match == str.npos) {
1164 return false;
1165 }
1166 pattern.remove_prefix(fixed_portion.size());
1167 str.remove_prefix(match + fixed_portion.size());
1168 in_wildcard_match = false;
1169 } else {
1170 if (pattern.front() != str.front()) {
1171 return false;
1172 }
1173 pattern.remove_prefix(1);
1174 str.remove_prefix(1);
1175 }
1176 break;
1177 }
1178 }
1179}
1180
1181} // namespace sat
1182} // namespace operations_research
IntegerVariable Integer(int ref) const
BooleanOrIntegerLiteral GreaterOrEqual(int var, int64_t value) const
Helpers to generate a decision.
BooleanOrIntegerLiteral LowerOrEqual(int var, int64_t value) const
BooleanOrIntegerLiteral MedianValue(int var) const
int NumVariables() const
The valid indices for the calls below are in [0, num_variables).
bool IsFixed(int var) const
Getters about the current domain of the given variable.
BooleanOrIntegerLiteral RandomSplit(int var, int64_t lb, int64_t ub) const
IntegerValue LowerBound(IntegerVariable i) const
Returns the current lower/upper bound of the given integer variable.
Definition integer.h:1301
IntegerValue UpperBound(IntegerVariable i) const
Definition integer.h:1305
BooleanVariable Variable() const
Definition sat_base.h:87
T Get(std::function< T(const Model &)> f) const
Similar to Add() but this is const.
Definition model.h:93
Simple class used to filter executed subsolver names.
SubsolverNameFilter(const SatParameters &params)
Warning, params must outlive the class and be constant.
bool Keep(absl::string_view name)
Shall we keep a parameter with given name?
void Add(Element e, Score score)
Definition util.h:681
const std::vector< Element > & UnorderedElements() const
Definition util.h:704
std::function< BooleanOrIntegerLiteral()> FirstUnassignedVarAtItsMinHeuristic(absl::Span< const IntegerVariable > vars, Model *model)
std::vector< SatParameters > GetFullWorkerParameters(const SatParameters &base_params, const CpModelProto &cp_model, int num_already_present, SubsolverNameFilter *filter)
const LiteralIndex kNoLiteralIndex(-1)
std::function< BooleanOrIntegerLiteral()> DisjunctivePrecedenceSearchHeuristic(Model *model)
std::function< BooleanOrIntegerLiteral()> ConstructIntegerCompletionSearchStrategy(absl::Span< const IntegerVariable > variable_mapping, IntegerVariable objective_var, Model *model)
Constructs an integer completion search strategy.
std::vector< IntegerVariable > NegationOf(absl::Span< const IntegerVariable > vars)
Returns the vector of the negated variables.
Definition integer.cc:52
int CombineSeed(int base_seed, int64_t delta)
We assume delta >= 0 and we only use the low bit of delta.
std::function< BooleanOrIntegerLiteral()> ConstructFixedSearchStrategy(std::function< BooleanOrIntegerLiteral()> user_search, std::function< BooleanOrIntegerLiteral()> heuristic_search, std::function< BooleanOrIntegerLiteral()> integer_completion)
std::function< BooleanOrIntegerLiteral()> ConstructHintSearchStrategy(const CpModelProto &cp_model_proto, CpModelMapping *mapping, Model *model)
Constructs a search strategy that follow the hint from the model.
const IntegerVariable kNoIntegerVariable(-1)
std::function< BooleanOrIntegerLiteral()> PseudoCost(Model *model)
std::function< BooleanOrIntegerLiteral()> ConstructHeuristicSearchStrategy(const CpModelProto &cp_model_proto, Model *model)
Constructs a search strategy tailored for the current model.
std::function< BooleanOrIntegerLiteral()> SchedulingSearchHeuristic(Model *model)
A simple heuristic for scheduling models.
std::vector< SatParameters > GetFirstSolutionBaseParams(const SatParameters &base_params)
std::vector< SatParameters > RepeatParameters(absl::Span< const SatParameters > base_params, int num_params_to_generate)
std::function< BooleanOrIntegerLiteral()> ConstructUserSearchStrategy(const CpModelProto &cp_model_proto, Model *model)
Constructs the search strategy specified in the given CpModelProto.
std::function< BooleanOrIntegerLiteral()> FollowHint(absl::Span< const BooleanOrIntegerVariable > vars, absl::Span< const IntegerValue > values, Model *model)
int NegatedRef(int ref)
Small utility functions to deal with negative variable/literal references.
absl::flat_hash_map< std::string, SatParameters > GetNamedParameters(SatParameters base_params)
std::function< BooleanOrIntegerLiteral()> InstrumentSearchStrategy(const CpModelProto &cp_model_proto, absl::Span< const IntegerVariable > variable_mapping, std::function< BooleanOrIntegerLiteral()> instrumented_strategy, Model *model)
std::function< BooleanOrIntegerLiteral()> CumulativePrecedenceSearchHeuristic(Model *model)
std::function< BooleanOrIntegerLiteral()> SequentialSearch(std::vector< std::function< BooleanOrIntegerLiteral()> > heuristics)
In SWIG mode, we don't want anything besides these top-level includes.
LinearRange operator<=(const LinearExpr &lhs, const LinearExpr &rhs)
static IntegerLiteral GreaterOrEqual(IntegerVariable i, IntegerValue bound)
static IntegerLiteral LowerOrEqual(IntegerVariable i, IntegerValue bound)
Stores one variable and its strategy value.