Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
cp_model_search.cc
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
15
16#include <algorithm>
17#include <cstddef>
18#include <cstdint>
19#include <functional>
20#include <limits>
21#include <string>
22#include <utility>
23#include <vector>
24
25#include "absl/container/flat_hash_map.h"
26#include "absl/container/flat_hash_set.h"
27#include "absl/flags/flag.h"
28#include "absl/log/check.h"
29#include "absl/random/distributions.h"
30#include "absl/strings/str_cat.h"
31#include "absl/strings/string_view.h"
32#include "absl/types/span.h"
34#include "ortools/sat/cp_model.pb.h"
37#include "ortools/sat/integer.h"
40#include "ortools/sat/model.h"
42#include "ortools/sat/sat_parameters.pb.h"
43#include "ortools/sat/util.h"
45
46namespace operations_research {
47namespace sat {
48
50 : mapping_(*model->GetOrCreate<CpModelMapping>()),
51 boolean_assignment_(model->GetOrCreate<Trail>()->Assignment()),
52 integer_trail_(*model->GetOrCreate<IntegerTrail>()),
53 integer_encoder_(*model->GetOrCreate<IntegerEncoder>()) {}
54
55int CpModelView::NumVariables() const { return mapping_.NumProtoVariables(); }
56
57bool CpModelView::IsFixed(int var) const {
58 if (mapping_.IsBoolean(var)) {
59 return boolean_assignment_.VariableIsAssigned(
60 mapping_.Literal(var).Variable());
61 } else if (mapping_.IsInteger(var)) {
62 return integer_trail_.IsFixed(mapping_.Integer(var));
63 }
64 return true; // Default.
65}
66
67int64_t CpModelView::Min(int var) const {
68 if (mapping_.IsBoolean(var)) {
69 const Literal l = mapping_.Literal(var);
70 return boolean_assignment_.LiteralIsTrue(l) ? 1 : 0;
71 } else if (mapping_.IsInteger(var)) {
72 return integer_trail_.LowerBound(mapping_.Integer(var)).value();
73 }
74 return 0; // Default.
75}
76
77int64_t CpModelView::Max(int var) const {
78 if (mapping_.IsBoolean(var)) {
79 const Literal l = mapping_.Literal(var);
80 return boolean_assignment_.LiteralIsFalse(l) ? 0 : 1;
81 } else if (mapping_.IsInteger(var)) {
82 return integer_trail_.UpperBound(mapping_.Integer(var)).value();
83 }
84 return 0; // Default.
85}
86
88 int64_t value) const {
89 DCHECK(!IsFixed(var));
91 if (mapping_.IsBoolean(var)) {
92 DCHECK(value == 0 || value == 1);
93 if (value == 1) {
94 result.boolean_literal_index = mapping_.Literal(var).Index();
95 }
96 } else if (mapping_.IsInteger(var)) {
98 mapping_.Integer(var), IntegerValue(value));
99 }
100 return result;
101}
102
104 int64_t value) const {
105 DCHECK(!IsFixed(var));
107 if (mapping_.IsBoolean(var)) {
108 DCHECK(value == 0 || value == 1);
109 if (value == 0) {
110 result.boolean_literal_index = mapping_.Literal(var).NegatedIndex();
111 }
112 } else if (mapping_.IsInteger(var)) {
114 IntegerValue(value));
115 }
116 return result;
117}
118
120 DCHECK(!IsFixed(var));
122 if (mapping_.IsBoolean(var)) {
123 result.boolean_literal_index = mapping_.Literal(var).NegatedIndex();
124 } else if (mapping_.IsInteger(var)) {
125 const IntegerVariable variable = mapping_.Integer(var);
126 const std::vector<ValueLiteralPair> encoding =
127 integer_encoder_.FullDomainEncoding(variable);
128
129 // 5 values -> returns the second.
130 // 4 values -> returns the second too.
131 // Array is 0 based.
132 const int target = (static_cast<int>(encoding.size()) + 1) / 2 - 1;
133 result.boolean_literal_index = encoding[target].literal.Index();
134 }
135 return result;
136}
137
138// Stores one variable and its strategy value.
139struct VarValue {
140 int ref;
141 int64_t value;
142};
143
144namespace {
145
146// TODO(user): Save this somewhere instead of recomputing it.
147bool ModelHasSchedulingConstraints(const CpModelProto& cp_model_proto) {
148 for (const ConstraintProto& ct : cp_model_proto.constraints()) {
149 if (ct.constraint_case() == ConstraintProto::kNoOverlap) return true;
150 if (ct.constraint_case() == ConstraintProto::kCumulative) return true;
151 }
152 return false;
153}
154
155void AddDualSchedulingHeuristics(SatParameters& new_params) {
156 new_params.set_exploit_all_precedences(true);
157 new_params.set_use_hard_precedences_in_cumulative(true);
158 new_params.set_use_overload_checker_in_cumulative(true);
159 new_params.set_use_strong_propagation_in_disjunctive(true);
160 new_params.set_use_timetable_edge_finding_in_cumulative(true);
161 new_params.set_max_pairs_pairwise_reasoning_in_no_overlap_2d(5000);
162 new_params.set_use_timetabling_in_no_overlap_2d(true);
163 new_params.set_use_energetic_reasoning_in_no_overlap_2d(true);
164 new_params.set_use_area_energetic_reasoning_in_no_overlap_2d(true);
165 new_params.set_use_conservative_scale_overload_checker(true);
166}
167
168// We want a random tie breaking among variables with equivalent values.
169struct NoisyInteger {
170 int64_t value;
171 double noise;
172
173 bool operator<=(const NoisyInteger& other) const {
174 return value < other.value ||
175 (value == other.value && noise <= other.noise);
176 }
177 bool operator>(const NoisyInteger& other) const {
178 return value > other.value || (value == other.value && noise > other.noise);
179 }
180};
181
182} // namespace
183
185 const CpModelProto& cp_model_proto, Model* model) {
186 if (cp_model_proto.search_strategy().empty()) return nullptr;
187
188 std::vector<DecisionStrategyProto> strategies;
189 for (const DecisionStrategyProto& proto : cp_model_proto.search_strategy()) {
190 strategies.push_back(proto);
191 }
192 const auto& view = *model->GetOrCreate<CpModelView>();
193 const auto& parameters = *model->GetOrCreate<SatParameters>();
194 auto* random = model->GetOrCreate<ModelRandomGenerator>();
195
196 // Note that we copy strategies to keep the return function validity
197 // independently of the life of the passed vector.
198 return [&view, &parameters, random, strategies]() {
199 for (const DecisionStrategyProto& strategy : strategies) {
200 int candidate_ref = -1;
201 int64_t candidate_value = std::numeric_limits<int64_t>::max();
202
203 // TODO(user): Improve the complexity if this becomes an issue which
204 // may be the case if we do a fixed_search.
205
206 // To store equivalent variables in randomized search.
207 const bool randomize_decision =
208 parameters.search_random_variable_pool_size() > 1;
209 TopN<int, NoisyInteger> top_variables(
210 randomize_decision ? parameters.search_random_variable_pool_size()
211 : 1);
212
213 for (const LinearExpressionProto& expr : strategy.exprs()) {
214 const int var = expr.vars(0);
215 if (view.IsFixed(var)) continue;
216
217 int64_t coeff = expr.coeffs(0);
218 int64_t offset = expr.offset();
219 int64_t lb = view.Min(var);
220 int64_t ub = view.Max(var);
221 int ref = var;
222 if (coeff < 0) {
223 lb = -view.Max(var);
224 ub = -view.Min(var);
225 coeff = -coeff;
226 ref = NegatedRef(var);
227 }
228
229 int64_t value(0);
230 switch (strategy.variable_selection_strategy()) {
231 case DecisionStrategyProto::CHOOSE_FIRST:
232 break;
233 case DecisionStrategyProto::CHOOSE_LOWEST_MIN:
234 value = coeff * lb + offset;
235 break;
236 case DecisionStrategyProto::CHOOSE_HIGHEST_MAX:
237 value = -(coeff * ub + offset);
238 break;
239 case DecisionStrategyProto::CHOOSE_MIN_DOMAIN_SIZE:
240 // The size of the domain is not multiplied by the coeff.
241 value = ub - lb + 1;
242 break;
243 case DecisionStrategyProto::CHOOSE_MAX_DOMAIN_SIZE:
244 // The size of the domain is not multiplied by the coeff.
245 value = -(ub - lb + 1);
246 break;
247 default:
248 LOG(FATAL) << "Unknown VariableSelectionStrategy "
249 << strategy.variable_selection_strategy();
250 }
251
252 if (randomize_decision) {
253 // We need to use -value as we want the minimum valued variables.
254 // We add a random noise to get improve the entropy.
255 const double noise = absl::Uniform(*random, 0., 1.0);
256 top_variables.Add(ref, {-value, noise});
257 candidate_value = std::min(candidate_value, value);
258 } else if (value < candidate_value) {
259 candidate_ref = ref;
260 candidate_value = value;
261 }
262
263 // We can stop scanning if the variable selection strategy is to use the
264 // first unbound variable and no randomization is needed.
265 if (strategy.variable_selection_strategy() ==
266 DecisionStrategyProto::CHOOSE_FIRST &&
267 !randomize_decision) {
268 break;
269 }
270 }
271
272 // Check if one active variable has been found.
273 if (candidate_value == std::numeric_limits<int64_t>::max()) continue;
274
275 // Pick the winner when decisions are randomized.
276 if (randomize_decision) {
277 const std::vector<int>& candidates = top_variables.UnorderedElements();
278 candidate_ref = candidates[absl::Uniform(
279 *random, 0, static_cast<int>(candidates.size()))];
280 }
281
282 DecisionStrategyProto::DomainReductionStrategy selection =
283 strategy.domain_reduction_strategy();
284 if (!RefIsPositive(candidate_ref)) {
285 switch (selection) {
286 case DecisionStrategyProto::SELECT_MIN_VALUE:
287 selection = DecisionStrategyProto::SELECT_MAX_VALUE;
288 break;
289 case DecisionStrategyProto::SELECT_MAX_VALUE:
290 selection = DecisionStrategyProto::SELECT_MIN_VALUE;
291 break;
292 case DecisionStrategyProto::SELECT_LOWER_HALF:
293 selection = DecisionStrategyProto::SELECT_UPPER_HALF;
294 break;
295 case DecisionStrategyProto::SELECT_UPPER_HALF:
296 selection = DecisionStrategyProto::SELECT_LOWER_HALF;
297 break;
298 default:
299 break;
300 }
301 }
302
303 const int var = PositiveRef(candidate_ref);
304 const int64_t lb = view.Min(var);
305 const int64_t ub = view.Max(var);
306 switch (selection) {
307 case DecisionStrategyProto::SELECT_MIN_VALUE:
308 return view.LowerOrEqual(var, lb);
309 case DecisionStrategyProto::SELECT_MAX_VALUE:
310 return view.GreaterOrEqual(var, ub);
311 case DecisionStrategyProto::SELECT_LOWER_HALF:
312 return view.LowerOrEqual(var, lb + (ub - lb) / 2);
313 case DecisionStrategyProto::SELECT_UPPER_HALF:
314 return view.GreaterOrEqual(var, ub - (ub - lb) / 2);
315 case DecisionStrategyProto::SELECT_MEDIAN_VALUE:
316 return view.MedianValue(var);
317 default:
318 LOG(FATAL) << "Unknown DomainReductionStrategy "
319 << strategy.domain_reduction_strategy();
320 }
321 }
323 };
324}
325
326// TODO(user): Implement a routing search.
328 const CpModelProto& cp_model_proto, Model* model) {
329 if (ModelHasSchedulingConstraints(cp_model_proto)) {
330 std::vector<std::function<BooleanOrIntegerLiteral()>> heuristics;
331 const auto& params = *model->GetOrCreate<SatParameters>();
332 bool possible_new_constraints = false;
333 if (params.use_dynamic_precedence_in_disjunctive()) {
334 possible_new_constraints = true;
335 heuristics.push_back(DisjunctivePrecedenceSearchHeuristic(model));
336 }
337 if (params.use_dynamic_precedence_in_cumulative()) {
338 possible_new_constraints = true;
339 heuristics.push_back(CumulativePrecedenceSearchHeuristic(model));
340 }
341
342 // Tricky: we need to create this at level zero in case there are no linear
343 // constraint in the model at the beginning.
344 //
345 // TODO(user): Alternatively, support creation of SatPropagator at positive
346 // level.
347 if (possible_new_constraints && params.new_linear_propagation()) {
348 model->GetOrCreate<LinearPropagator>();
349 }
350
351 heuristics.push_back(SchedulingSearchHeuristic(model));
352 return SequentialSearch(std::move(heuristics));
353 }
354 return PseudoCost(model);
355}
356
357std::function<BooleanOrIntegerLiteral()>
359 const std::vector<IntegerVariable>& variable_mapping,
360 IntegerVariable objective_var, Model* model) {
361 const auto& params = *model->GetOrCreate<SatParameters>();
362 if (!params.instantiate_all_variables()) {
363 return []() { return BooleanOrIntegerLiteral(); };
364 }
365
366 std::vector<IntegerVariable> decisions;
367 for (const IntegerVariable var : variable_mapping) {
368 if (var == kNoIntegerVariable) continue;
369
370 // Make sure we try to fix the objective to its lowest value first.
371 // TODO(user): we could also fix terms of the objective in the right
372 // direction.
373 if (var == NegationOf(objective_var)) {
374 decisions.push_back(objective_var);
375 } else {
376 decisions.push_back(var);
377 }
378 }
380}
381
382// Constructs a search strategy that follow the hint from the model.
384 const CpModelProto& cp_model_proto, CpModelMapping* mapping, Model* model) {
385 std::vector<BooleanOrIntegerVariable> vars;
386 std::vector<IntegerValue> values;
387 for (int i = 0; i < cp_model_proto.solution_hint().vars_size(); ++i) {
388 const int ref = cp_model_proto.solution_hint().vars(i);
389 CHECK(RefIsPositive(ref));
391 if (mapping->IsBoolean(ref)) {
392 var.bool_var = mapping->Literal(ref).Variable();
393 } else {
394 var.int_var = mapping->Integer(ref);
395 }
396 vars.push_back(var);
397 values.push_back(IntegerValue(cp_model_proto.solution_hint().values(i)));
398 }
399 return FollowHint(vars, values, model);
400}
401
403 std::function<BooleanOrIntegerLiteral()> user_search,
404 std::function<BooleanOrIntegerLiteral()> heuristic_search,
405 std::function<BooleanOrIntegerLiteral()> integer_completion) {
406 // We start by the user specified heuristic.
407 std::vector<std::function<BooleanOrIntegerLiteral()>> heuristics;
408 if (user_search != nullptr) {
409 heuristics.push_back(user_search);
410 }
411 if (heuristic_search != nullptr) {
412 heuristics.push_back(heuristic_search);
413 }
414 if (integer_completion != nullptr) {
415 heuristics.push_back(integer_completion);
416 }
417
418 return SequentialSearch(heuristics);
419}
420
422 const CpModelProto& cp_model_proto,
423 const std::vector<IntegerVariable>& variable_mapping,
424 std::function<BooleanOrIntegerLiteral()> instrumented_strategy,
425 Model* model) {
426 std::vector<int> ref_to_display;
427 for (int i = 0; i < cp_model_proto.variables_size(); ++i) {
428 if (variable_mapping[i] == kNoIntegerVariable) continue;
429 if (cp_model_proto.variables(i).name().empty()) continue;
430 ref_to_display.push_back(i);
431 }
432 std::sort(ref_to_display.begin(), ref_to_display.end(), [&](int i, int j) {
433 return cp_model_proto.variables(i).name() <
434 cp_model_proto.variables(j).name();
435 });
436
437 std::vector<std::pair<int64_t, int64_t>> old_domains(variable_mapping.size());
438 return [instrumented_strategy, model, &variable_mapping, &cp_model_proto,
439 old_domains, ref_to_display]() mutable {
440 const BooleanOrIntegerLiteral decision = instrumented_strategy();
441 if (!decision.HasValue()) return decision;
442
443 if (decision.boolean_literal_index != kNoLiteralIndex) {
444 const Literal l = Literal(decision.boolean_literal_index);
445 LOG(INFO) << "Boolean decision " << l;
446 const auto& encoder = model->Get<IntegerEncoder>();
447 for (const IntegerLiteral i_lit : encoder->GetIntegerLiterals(l)) {
448 LOG(INFO) << " - associated with " << i_lit;
449 }
450 for (const auto [var, value] : encoder->GetEqualityLiterals(l)) {
451 LOG(INFO) << " - associated with " << var << " == " << value;
452 }
453 } else {
454 LOG(INFO) << "Integer decision " << decision.integer_literal;
455 }
456 const int level = model->Get<Trail>()->CurrentDecisionLevel();
457 std::string to_display =
458 absl::StrCat("Diff since last call, level=", level, "\n");
459 IntegerTrail* integer_trail = model->GetOrCreate<IntegerTrail>();
460 for (const int ref : ref_to_display) {
461 const IntegerVariable var = variable_mapping[ref];
462 const std::pair<int64_t, int64_t> new_domain(
463 integer_trail->LowerBound(var).value(),
464 integer_trail->UpperBound(var).value());
465 if (new_domain != old_domains[ref]) {
466 absl::StrAppend(&to_display, cp_model_proto.variables(ref).name(), " [",
467 old_domains[ref].first, ",", old_domains[ref].second,
468 "] -> [", new_domain.first, ",", new_domain.second,
469 "]\n");
470 old_domains[ref] = new_domain;
471 }
472 }
473 LOG(INFO) << to_display;
474 return decision;
475 };
476}
477
478absl::flat_hash_map<std::string, SatParameters> GetNamedParameters(
479 SatParameters base_params) {
480 absl::flat_hash_map<std::string, SatParameters> strategies;
481
482 // By default we disable the logging when we generate a set of parameter. It
483 // is possible to force it by setting it in the corresponding named parameter
484 // via the subsolver_params field.
485 base_params.set_log_search_progress(false);
486
487 // The "default" name can be used for the base_params unchanged.
488 strategies["default"] = base_params;
489
490 // Lp variations only.
491 {
492 SatParameters new_params = base_params;
493 new_params.set_linearization_level(0);
494 strategies["no_lp"] = new_params;
495 new_params.set_linearization_level(1);
496 strategies["default_lp"] = new_params;
497 new_params.set_linearization_level(2);
498 new_params.set_add_lp_constraints_lazily(false);
499 strategies["max_lp"] = new_params;
500 }
501
502 // Core. Note that we disable the lp here because it is faster on the minizinc
503 // benchmark.
504 //
505 // TODO(user): Do more experiments, the LP with core could be useful, but we
506 // probably need to incorporate the newly created integer variables from the
507 // core algorithm into the LP.
508 {
509 SatParameters new_params = base_params;
510 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
511 new_params.set_optimize_with_core(true);
512 new_params.set_linearization_level(0);
513 strategies["core"] = new_params;
514 }
515
516 // It can be interesting to try core and lp.
517 {
518 SatParameters new_params = base_params;
519 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
520 new_params.set_optimize_with_core(true);
521 new_params.set_linearization_level(1);
522 strategies["core_default_lp"] = new_params;
523 }
524
525 {
526 SatParameters new_params = base_params;
527 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
528 new_params.set_optimize_with_core(true);
529 new_params.set_linearization_level(2);
530 strategies["core_max_lp"] = new_params;
531 }
532
533 {
534 SatParameters new_params = base_params;
535 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
536 new_params.set_optimize_with_core(true);
537 new_params.set_optimize_with_max_hs(true);
538 strategies["max_hs"] = new_params;
539 }
540
541 {
542 SatParameters new_params = base_params;
543 new_params.set_optimize_with_lb_tree_search(true);
544 // We do not want to change the objective_var lb from outside as it gives
545 // better result to only use locally derived reason in that algo.
546 new_params.set_share_objective_bounds(false);
547
548 new_params.set_linearization_level(0);
549 strategies["lb_tree_search_no_lp"] = new_params;
550
551 new_params.set_linearization_level(2);
552 if (base_params.use_dual_scheduling_heuristics()) {
553 AddDualSchedulingHeuristics(new_params);
554 }
555 // We want to spend more time on the LP here.
556 new_params.set_add_lp_constraints_lazily(false);
557 new_params.set_root_lp_iterations(100'000);
558 strategies["lb_tree_search"] = new_params;
559 }
560
561 {
562 SatParameters new_params = base_params;
563 new_params.set_use_objective_lb_search(true);
564
565 new_params.set_linearization_level(0);
566 strategies["objective_lb_search_no_lp"] = new_params;
567
568 new_params.set_linearization_level(1);
569 strategies["objective_lb_search"] = new_params;
570
571 if (base_params.use_dual_scheduling_heuristics()) {
572 AddDualSchedulingHeuristics(new_params);
573 }
574 new_params.set_linearization_level(2);
575 strategies["objective_lb_search_max_lp"] = new_params;
576 }
577
578 {
579 SatParameters new_params = base_params;
580 new_params.set_use_objective_shaving_search(true);
581 new_params.set_cp_model_presolve(true);
582 new_params.set_cp_model_probing_level(0);
583 new_params.set_symmetry_level(0);
584 if (base_params.use_dual_scheduling_heuristics()) {
585 AddDualSchedulingHeuristics(new_params);
586 }
587
588 strategies["objective_shaving"] = new_params;
589
590 new_params.set_linearization_level(0);
591 strategies["objective_shaving_no_lp"] = new_params;
592
593 new_params.set_linearization_level(2);
594 strategies["objective_shaving_max_lp"] = new_params;
595 }
596
597 {
598 SatParameters new_params = base_params;
599 new_params.set_use_variables_shaving_search(true);
600 new_params.set_cp_model_presolve(true);
601 new_params.set_cp_model_probing_level(0);
602 new_params.set_symmetry_level(0);
603 new_params.set_share_objective_bounds(false);
604 new_params.set_share_level_zero_bounds(false);
605
606 strategies["variables_shaving"] = new_params;
607
608 new_params.set_linearization_level(0);
609 strategies["variables_shaving_no_lp"] = new_params;
610
611 if (base_params.use_dual_scheduling_heuristics()) {
612 AddDualSchedulingHeuristics(new_params);
613 }
614 new_params.set_linearization_level(2);
615 strategies["variables_shaving_max_lp"] = new_params;
616 }
617
618 {
619 SatParameters new_params = base_params;
620 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
621 new_params.set_use_probing_search(true);
622 new_params.set_at_most_one_max_expansion_size(2);
623 if (base_params.use_dual_scheduling_heuristics()) {
624 AddDualSchedulingHeuristics(new_params);
625 }
626 strategies["probing"] = new_params;
627
628 new_params.set_linearization_level(0);
629 strategies["probing_no_lp"] = new_params;
630
631 new_params.set_linearization_level(2);
632 // We want to spend more time on the LP here.
633 new_params.set_add_lp_constraints_lazily(false);
634 new_params.set_root_lp_iterations(100'000);
635 strategies["probing_max_lp"] = new_params;
636 }
637
638 // Search variation.
639 {
640 SatParameters new_params = base_params;
641 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
642 strategies["auto"] = new_params;
643
644 new_params.set_search_branching(SatParameters::FIXED_SEARCH);
645 new_params.set_use_dynamic_precedence_in_disjunctive(false);
646 new_params.set_use_dynamic_precedence_in_cumulative(false);
647 strategies["fixed"] = new_params;
648 }
649
650 // Quick restart.
651 {
652 // TODO(user): Experiment with search_random_variable_pool_size.
653 SatParameters new_params = base_params;
654 new_params.set_search_branching(
655 SatParameters::PORTFOLIO_WITH_QUICK_RESTART_SEARCH);
656 strategies["quick_restart"] = new_params;
657
658 new_params.set_linearization_level(0);
659 strategies["quick_restart_no_lp"] = new_params;
660
661 new_params.set_linearization_level(2);
662 strategies["quick_restart_max_lp"] = new_params;
663 }
664
665 {
666 SatParameters new_params = base_params;
667 new_params.set_linearization_level(2);
668 new_params.set_search_branching(SatParameters::LP_SEARCH);
669 if (base_params.use_dual_scheduling_heuristics()) {
670 AddDualSchedulingHeuristics(new_params);
671 }
672 strategies["reduced_costs"] = new_params;
673 }
674
675 {
676 // Note: no dual scheduling heuristics.
677 SatParameters new_params = base_params;
678 new_params.set_linearization_level(2);
679 new_params.set_search_branching(SatParameters::PSEUDO_COST_SEARCH);
680 new_params.set_exploit_best_solution(true);
681 strategies["pseudo_costs"] = new_params;
682 }
683
684 // Less encoding.
685 {
686 SatParameters new_params = base_params;
687 new_params.set_boolean_encoding_level(0);
688 strategies["less_encoding"] = new_params;
689 }
690
691 // Base parameters for shared tree worker.
692 {
693 SatParameters new_params = base_params;
694 new_params.set_use_shared_tree_search(true);
695 new_params.set_search_branching(SatParameters::AUTOMATIC_SEARCH);
696
697 // These settings don't make sense with shared tree search, turn them off as
698 // they can break things.
699 new_params.set_optimize_with_core(false);
700 new_params.set_optimize_with_lb_tree_search(false);
701 new_params.set_optimize_with_max_hs(false);
702
703 strategies["shared_tree"] = new_params;
704 }
705
706 // Base parameters for LNS worker.
707 {
708 SatParameters new_params = base_params;
709 new_params.set_stop_after_first_solution(false);
710 new_params.set_cp_model_presolve(true);
711
712 // We disable costly presolve/inprocessing.
713 new_params.set_use_sat_inprocessing(false);
714 new_params.set_cp_model_probing_level(0);
715 new_params.set_symmetry_level(0);
716 new_params.set_find_big_linear_overlap(false);
717
718 new_params.set_log_search_progress(false);
719 new_params.set_debug_crash_on_bad_hint(false); // Can happen in lns.
720 new_params.set_solution_pool_size(1); // Keep the best solution found.
721 strategies["lns"] = new_params;
722 }
723
724 // Add user defined ones.
725 // Note that this might be merged to our default ones.
726 for (const SatParameters& params : base_params.subsolver_params()) {
727 auto it = strategies.find(params.name());
728 if (it != strategies.end()) {
729 it->second.MergeFrom(params);
730 } else {
731 // Merge the named parameters with the base parameters to create the new
732 // parameters.
733 SatParameters new_params = base_params;
734 new_params.MergeFrom(params);
735 strategies[params.name()] = new_params;
736 }
737 }
738
739 // Fix names (we don't set them above).
740 for (auto& [name, params] : strategies) {
741 params.set_name(name);
742 }
743
744 return strategies;
745}
746
747// Note: in flatzinc setting, we know we always have a fixed search defined.
748//
749// Things to try:
750// - Specialize for purely boolean problems
751// - Disable linearization_level options for non linear problems
752// - Fast restart in randomized search
753// - Different propatation levels for scheduling constraints
754std::vector<SatParameters> GetFullWorkerParameters(
755 const SatParameters& base_params, const CpModelProto& cp_model,
756 int num_already_present, SubsolverNameFilter* filter) {
757 // Defines a set of named strategies so it is easier to read in one place
758 // the one that are used. See below.
759 const auto strategies = GetNamedParameters(base_params);
760
761 // We only use a "fixed search" worker if some strategy is specified or
762 // if we have a scheduling model.
763 //
764 // TODO(user): For scheduling, this is important to find good first solution
765 // but afterwards it is not really great and should probably be replaced by a
766 // LNS worker.
767 const bool use_fixed_strategy = !cp_model.search_strategy().empty() ||
768 ModelHasSchedulingConstraints(cp_model);
769
770 // Our current set of strategies
771 //
772 // TODO(user): Avoid launching two strategies if they are the same,
773 // like if there is no lp, or everything is already linearized at level 1.
774 std::vector<std::string> names;
775
776 // Starts by adding user specified ones.
777 for (const std::string& name : base_params.extra_subsolvers()) {
778 names.push_back(name);
779 }
780
781 // We use the default if empty.
782 if (base_params.subsolvers().empty()) {
783 // Note that the order is important as the list can be truncated.
784 names.push_back("default_lp");
785 names.push_back("fixed");
786 names.push_back("core");
787 names.push_back("no_lp");
788 names.push_back("max_lp");
789 names.push_back("quick_restart");
790 names.push_back("reduced_costs");
791 names.push_back("quick_restart_no_lp");
792 names.push_back("pseudo_costs");
793 names.push_back("lb_tree_search");
794 names.push_back("probing");
795 names.push_back("objective_lb_search");
796 names.push_back("objective_shaving_no_lp");
797 names.push_back("objective_shaving_max_lp");
798 names.push_back("probing_max_lp");
799 names.push_back("probing_no_lp");
800 names.push_back("objective_lb_search_no_lp");
801 names.push_back("objective_lb_search_max_lp");
802 } else {
803 for (const std::string& name : base_params.subsolvers()) {
804 // Hack for flatzinc. At the time of parameter setting, the objective is
805 // not expanded. So we do not know if core is applicable or not.
806 if (name == "core_or_no_lp") {
807 if (!cp_model.has_objective() ||
808 cp_model.objective().vars_size() <= 1) {
809 names.push_back("no_lp");
810 } else {
811 names.push_back("core");
812 }
813 } else {
814 names.push_back(name);
815 }
816 }
817 }
818
819 // Remove the names that should be ignored.
820 int new_size = 0;
821 for (const std::string& name : names) {
822 if (filter->Keep(name)) {
823 names[new_size++] = name;
824 }
825 }
826 names.resize(new_size);
827
828 // Creates the diverse set of parameters with names and seed.
829 std::vector<SatParameters> result;
830 for (const std::string& name : names) {
831 SatParameters params = strategies.at(name);
832
833 // Do some filtering.
834 if (!use_fixed_strategy &&
835 params.search_branching() == SatParameters::FIXED_SEARCH) {
836 continue;
837 }
838
839 // TODO(user): Enable probing_search in deterministic mode.
840 // Currently it timeouts on small problems as the deterministic time limit
841 // never hits the sharding limit.
842 if (params.use_probing_search() && params.interleave_search()) continue;
843
844 // TODO(user): Enable shaving search in interleave mode.
845 // Currently it do not respect ^C, and has no per chunk time limit.
846 if ((params.use_objective_shaving_search() ||
847 params.use_variables_shaving_search()) &&
848 params.interleave_search()) {
849 continue;
850 }
851
852 // In the corner case of empty variable, lets not schedule the probing as
853 // it currently just loop forever instead of returning right away.
854 if (params.use_probing_search() && cp_model.variables().empty()) continue;
855
856 if (cp_model.has_objective() && !cp_model.objective().vars().empty()) {
857 // Disable core search if there is only 1 term in the objective.
858 if (cp_model.objective().vars().size() == 1 &&
859 params.optimize_with_core()) {
860 continue;
861 }
862
863 if (name == "less_encoding") continue;
864
865 // Disable subsolvers that do not implement the deterministic mode.
866 //
867 // TODO(user): Enable lb_tree_search in deterministic mode.
868 if (params.interleave_search() &&
869 (params.optimize_with_lb_tree_search() ||
870 params.use_objective_lb_search())) {
871 continue;
872 }
873 } else {
874 // Remove subsolvers that require an objective.
875 if (params.optimize_with_lb_tree_search()) continue;
876 if (params.optimize_with_core()) continue;
877 if (params.use_objective_lb_search()) continue;
878 if (params.use_objective_shaving_search()) continue;
879 if (params.search_branching() == SatParameters::LP_SEARCH) continue;
880 if (params.search_branching() == SatParameters::PSEUDO_COST_SEARCH) {
881 continue;
882 }
883 }
884
885 // Add this strategy.
886 params.set_name(name);
887 params.set_random_seed(CombineSeed(
888 base_params.random_seed(), static_cast<int64_t>(result.size()) + 1));
889 result.push_back(params);
890 }
891
892 // In interleaved mode, we run all of them.
893 //
894 // TODO(user): Actually make sure the gap num_workers <-> num_heuristics is
895 // contained.
896 if (base_params.interleave_search()) return result;
897
898 // Apply the logic for how many we keep.
899 int num_to_keep = base_params.num_full_subsolvers();
900 if (num_to_keep == 0) {
901 // Derive some automatic number to leave room for LS/LNS and other
902 // strategies not taken into account here.
903 const int num_available =
904 std::max(0, base_params.num_workers() - num_already_present);
905
906 const auto heuristic_num_workers = [](int num_workers) {
907 DCHECK_GE(num_workers, 0);
908 if (num_workers == 1) return 1;
909 if (num_workers <= 4) return num_workers - 1;
910 if (num_workers <= 8) return num_workers - 2;
911 if (num_workers <= 16) return num_workers - (num_workers / 4 + 1);
912 return num_workers - (num_workers / 2 - 3);
913 };
914
915 num_to_keep = heuristic_num_workers(num_available);
916 }
917
918 if (result.size() > num_to_keep) {
919 result.resize(std::max(0, num_to_keep));
920 }
921 return result;
922}
923
924std::vector<SatParameters> GetFirstSolutionBaseParams(
925 const SatParameters& base_params) {
926 std::vector<SatParameters> result;
927
928 const auto get_base = [&result, &base_params](bool fj) {
929 SatParameters new_params = base_params;
930 new_params.set_log_search_progress(false);
931 new_params.set_use_feasibility_jump(fj);
932
933 const int base_seed = base_params.random_seed();
934 new_params.set_random_seed(CombineSeed(base_seed, result.size()));
935 return new_params;
936 };
937
938 // Add one feasibility jump.
939 if (base_params.use_feasibility_jump()) {
940 SatParameters new_params = get_base(true);
941 new_params.set_name("fj");
942 new_params.set_feasibility_jump_linearization_level(0);
943 result.push_back(new_params);
944 }
945
946 // Random search.
947 for (int i = 0; i < 2; ++i) {
948 SatParameters new_params = get_base(false);
949 new_params.set_search_random_variable_pool_size(5);
950 new_params.set_search_branching(SatParameters::RANDOMIZED_SEARCH);
951 if (i % 2 == 0) {
952 new_params.set_name("fs_random_no_lp");
953 new_params.set_linearization_level(0);
954 } else {
955 new_params.set_name("fs_random");
956 }
957 result.push_back(new_params);
958 }
959
960 // Add a second feasibility jump.
961 if (base_params.use_feasibility_jump()) {
962 SatParameters new_params = get_base(true);
963 new_params.set_name("fj");
964 new_params.set_feasibility_jump_linearization_level(0);
965 result.push_back(new_params);
966 }
967
968 // Random quick restart.
969 for (int i = 0; i < 2; ++i) {
970 SatParameters new_params = get_base(false);
971 new_params.set_search_random_variable_pool_size(5);
972 new_params.set_search_branching(
973 SatParameters::PORTFOLIO_WITH_QUICK_RESTART_SEARCH);
974 if (i % 2 == 0) {
975 new_params.set_name("fs_random_quick_restart_no_lp");
976 new_params.set_linearization_level(0);
977 } else {
978 new_params.set_name("fs_random_quick_restart");
979 }
980 result.push_back(new_params);
981 }
982
983 // Add a linear feasibility jump.
984 // This one seems to perform worse, so we add only 1 for 2 normal LS, and we
985 // add this late.
986 if (base_params.use_feasibility_jump()) {
987 SatParameters new_params = get_base(true);
988 new_params.set_name("fj_lin");
989 new_params.set_feasibility_jump_linearization_level(2);
990 result.push_back(new_params);
991 }
992
993 return result;
994}
995
996std::vector<SatParameters> RepeatParameters(
997 absl::Span<const SatParameters> base_params, int num_params_to_generate) {
998 // Return if we are done.
999 std::vector<SatParameters> result;
1000 result.assign(base_params.begin(), base_params.end());
1001 if (result.empty()) return result;
1002 if (result.size() >= num_params_to_generate) {
1003 result.resize(num_params_to_generate);
1004 return result;
1005 }
1006
1007 // Repeat parameters until we have enough.
1008 int i = 0;
1009 const int base_size = result.size();
1010 while (result.size() < num_params_to_generate) {
1011 result.push_back(result[i % base_size]);
1012 result.back().set_random_seed(CombineSeed(result.back().random_seed(), i));
1013 ++i;
1014 }
1015 return result;
1016}
1017
1018SubsolverNameFilter::SubsolverNameFilter(const SatParameters& params) {
1019 for (const auto& pattern : params.filter_subsolvers()) {
1020 filter_patterns_.push_back(pattern);
1021 }
1022 for (const auto& pattern : params.ignore_subsolvers()) {
1023 ignore_patterns_.push_back(pattern);
1024 }
1025
1026 // Hack for backward compatibility and easy of use.
1027 if (params.use_ls_only()) {
1028 filter_patterns_.push_back("ls*");
1029 filter_patterns_.push_back("fj*");
1030 }
1031
1032 if (params.use_lns_only()) {
1033 // Still add first solution solvers.
1034 filter_patterns_.push_back("fj*");
1035 filter_patterns_.push_back("fs*");
1036 filter_patterns_.push_back("*lns");
1037 }
1038}
1039
1040bool SubsolverNameFilter::Keep(absl::string_view name) {
1041 last_name_ = name;
1042 if (!filter_patterns_.empty()) {
1043 bool keep = false;
1044 for (const absl::string_view pattern : filter_patterns_) {
1045 if (FNMatch(pattern, name)) {
1046 keep = true;
1047 break;
1048 }
1049 }
1050 if (!keep) {
1051 ignored_.emplace_back(name);
1052 return false;
1053 }
1054 }
1055 for (const absl::string_view pattern : ignore_patterns_) {
1056 if (FNMatch(pattern, name)) {
1057 ignored_.emplace_back(name);
1058 return false;
1059 }
1060 }
1061 return true;
1062}
1063
1064bool SubsolverNameFilter::FNMatch(absl::string_view pattern,
1065 absl::string_view str) {
1066 bool in_wildcard_match = false;
1067 while (true) {
1068 if (pattern.empty()) {
1069 return in_wildcard_match || str.empty();
1070 }
1071 if (str.empty()) {
1072 return pattern.find_first_not_of('*') == pattern.npos;
1073 }
1074 switch (pattern.front()) {
1075 case '*':
1076 pattern.remove_prefix(1);
1077 in_wildcard_match = true;
1078 break;
1079 case '?':
1080 pattern.remove_prefix(1);
1081 str.remove_prefix(1);
1082 break;
1083 default:
1084 if (in_wildcard_match) {
1085 absl::string_view fixed_portion = pattern;
1086 const size_t end = fixed_portion.find_first_of("*?");
1087 if (end != fixed_portion.npos) {
1088 fixed_portion = fixed_portion.substr(0, end);
1089 }
1090 const size_t match = str.find(fixed_portion);
1091 if (match == str.npos) {
1092 return false;
1093 }
1094 pattern.remove_prefix(fixed_portion.size());
1095 str.remove_prefix(match + fixed_portion.size());
1096 in_wildcard_match = false;
1097 } else {
1098 if (pattern.front() != str.front()) {
1099 return false;
1100 }
1101 pattern.remove_prefix(1);
1102 str.remove_prefix(1);
1103 }
1104 break;
1105 }
1106 }
1107}
1108
1109} // namespace sat
1110} // namespace operations_research
int NumProtoVariables() const
Returns the number of variables in the loaded proto.
IntegerVariable Integer(int ref) const
BooleanOrIntegerLiteral GreaterOrEqual(int var, int64_t value) const
Helpers to generate a decision.
BooleanOrIntegerLiteral LowerOrEqual(int var, int64_t value) const
BooleanOrIntegerLiteral MedianValue(int var) const
int NumVariables() const
The valid indices for the calls below are in [0, num_variables).
bool IsFixed(int var) const
Getters about the current domain of the given variable.
const std::vector< ValueLiteralPair > & FullDomainEncoding(IntegerVariable var) const
Definition integer.cc:145
IntegerValue LowerBound(IntegerVariable i) const
Returns the current lower/upper bound of the given integer variable.
Definition integer.h:1717
IntegerValue UpperBound(IntegerVariable i) const
Definition integer.h:1721
bool IsFixed(IntegerVariable i) const
Checks if the variable is fixed.
Definition integer.h:1725
LiteralIndex NegatedIndex() const
Definition sat_base.h:92
LiteralIndex Index() const
Definition sat_base.h:91
BooleanVariable Variable() const
Definition sat_base.h:87
Simple class used to filter executed subsolver names.
SubsolverNameFilter(const SatParameters &params)
Warning, params must outlive the class and be constant.
bool Keep(absl::string_view name)
Shall we keep a parameter with given name?
void Add(Element e, Score score)
Definition util.h:641
const std::vector< Element > & UnorderedElements() const
Definition util.h:664
bool VariableIsAssigned(BooleanVariable var) const
Returns true iff the given variable is assigned.
Definition sat_base.h:196
bool LiteralIsFalse(Literal literal) const
Definition sat_base.h:185
bool LiteralIsTrue(Literal literal) const
Definition sat_base.h:188
SatParameters parameters
CpModelProto proto
The output proto.
double noise
const std::string name
A name for logging purposes.
const Constraint * ct
int64_t value
IntVar * var
GRBmodel * model
std::vector< SatParameters > GetFullWorkerParameters(const SatParameters &base_params, const CpModelProto &cp_model, int num_already_present, SubsolverNameFilter *filter)
const LiteralIndex kNoLiteralIndex(-1)
std::function< BooleanOrIntegerLiteral()> DisjunctivePrecedenceSearchHeuristic(Model *model)
std::vector< IntegerVariable > NegationOf(const std::vector< IntegerVariable > &vars)
Returns the vector of the negated variables.
Definition integer.cc:51
int CombineSeed(int base_seed, int64_t delta)
We assume delta >= 0 and we only use the low bit of delta.
std::function< BooleanOrIntegerLiteral()> ConstructFixedSearchStrategy(std::function< BooleanOrIntegerLiteral()> user_search, std::function< BooleanOrIntegerLiteral()> heuristic_search, std::function< BooleanOrIntegerLiteral()> integer_completion)
std::function< BooleanOrIntegerLiteral()> ConstructHintSearchStrategy(const CpModelProto &cp_model_proto, CpModelMapping *mapping, Model *model)
Constructs a search strategy that follow the hint from the model.
const IntegerVariable kNoIntegerVariable(-1)
std::function< BooleanOrIntegerLiteral()> PseudoCost(Model *model)
std::function< BooleanOrIntegerLiteral()> ConstructHeuristicSearchStrategy(const CpModelProto &cp_model_proto, Model *model)
Constructs a search strategy tailored for the current model.
std::function< BooleanOrIntegerLiteral()> SchedulingSearchHeuristic(Model *model)
A simple heuristic for scheduling models.
std::function< BooleanOrIntegerLiteral()> InstrumentSearchStrategy(const CpModelProto &cp_model_proto, const std::vector< IntegerVariable > &variable_mapping, std::function< BooleanOrIntegerLiteral()> instrumented_strategy, Model *model)
std::vector< SatParameters > GetFirstSolutionBaseParams(const SatParameters &base_params)
std::function< BooleanOrIntegerLiteral()> FirstUnassignedVarAtItsMinHeuristic(const std::vector< IntegerVariable > &vars, Model *model)
std::function< BooleanOrIntegerLiteral()> ConstructIntegerCompletionSearchStrategy(const std::vector< IntegerVariable > &variable_mapping, IntegerVariable objective_var, Model *model)
Constructs an integer completion search strategy.
std::vector< SatParameters > RepeatParameters(absl::Span< const SatParameters > base_params, int num_params_to_generate)
std::function< BooleanOrIntegerLiteral()> ConstructUserSearchStrategy(const CpModelProto &cp_model_proto, Model *model)
Constructs the search strategy specified in the given CpModelProto.
int NegatedRef(int ref)
Small utility functions to deal with negative variable/literal references.
absl::flat_hash_map< std::string, SatParameters > GetNamedParameters(SatParameters base_params)
std::function< BooleanOrIntegerLiteral()> CumulativePrecedenceSearchHeuristic(Model *model)
std::function< BooleanOrIntegerLiteral()> FollowHint(const std::vector< BooleanOrIntegerVariable > &vars, const std::vector< IntegerValue > &values, Model *model)
std::function< BooleanOrIntegerLiteral()> SequentialSearch(std::vector< std::function< BooleanOrIntegerLiteral()> > heuristics)
In SWIG mode, we don't want anything besides these top-level includes.
LinearRange operator<=(const LinearExpr &lhs, const LinearExpr &rhs)
std::optional< int64_t > end
static IntegerLiteral GreaterOrEqual(IntegerVariable i, IntegerValue bound)
Definition integer.h:1667
static IntegerLiteral LowerOrEqual(IntegerVariable i, IntegerValue bound)
Definition integer.h:1673
Stores one variable and its strategy value.