Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
lp_parameter_tests.cc
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14// TODO(b/180024054): the following parameters are not tested:
15// * time_limit
16// * threads
17// * scaling
18//
19// The following parameters are under-tested:
20// * lp_algorithm
21//
22// Note that cuts and heuristics do not apply for LP. enable_output is tested
23// in generic_tests.h.
25
26#include <algorithm>
27#include <cmath>
28#include <optional>
29#include <ostream>
30#include <string>
31#include <vector>
32
33#include "absl/container/flat_hash_set.h"
34#include "absl/log/check.h"
35#include "absl/status/status.h"
36#include "absl/status/statusor.h"
37#include "absl/strings/str_cat.h"
38#include "absl/strings/string_view.h"
39#include "gtest/gtest.h"
40#include "ortools/base/gmock.h"
45
46namespace operations_research {
47namespace math_opt {
48
49using ::testing::HasSubstr;
50using ::testing::status::IsOkAndHolds;
51using ::testing::status::StatusIs;
52
53std::ostream& operator<<(std::ostream& out,
54 const LpParameterTestParams& params) {
55 out << "{ solver_type: " << params.solver_type
56 << " supports_simplex: " << params.supports_simplex
57 << " supports_barrier: " << params.supports_barrier
58 << " supports_random_seed: " << params.supports_random_seed
59 << " supports_presolve: " << params.supports_presolve
60 << " supports_cutoff: " << params.supports_cutoff << " }";
61 return out;
62}
63
64namespace {
65
66class AssignmentProblem {
67 public:
68 explicit AssignmentProblem(const int n) : n_(n) {
69 for (int i = 0; i < n_; ++i) {
70 vars_.emplace_back();
71 for (int j = 0; j < n_; ++j) {
72 vars_.back().push_back(
73 model_.AddVariable(0.0, 1.0, false, absl::StrCat("x_", i, "_", j)));
74 }
75 }
76 LinearExpression obj;
77 for (int i = 0; i < n_; ++i) {
78 obj += Sum(vars_[i]);
79 }
80 model_.Maximize(obj);
81 for (int i = 0; i < n_; ++i) {
82 model_.AddLinearConstraint(Sum(vars_[i]) <= 1.0);
83 }
84 for (int j = 0; j < n_; ++j) {
85 LinearExpression j_vars;
86 for (int i = 0; i < n_; ++i) {
87 j_vars += vars_[i][j];
88 }
89 model_.AddLinearConstraint(j_vars <= 1.0);
90 }
91 }
92
93 void MakePresolveOptimal() {
94 for (int i = 0; i < n_; ++i) {
95 LinearExpression terms;
96 for (int j = 0; j < n_; ++j) {
97 if (i != j) {
98 terms += vars_[i][j];
99 }
100 }
101 model_.AddLinearConstraint(terms == 0.0);
102 }
103 }
104
105 const Model& model() { return model_; }
106
107 std::vector<std::string> SolutionFingerprint(
108 const SolveResult& result) const {
109 std::vector<std::string> fingerprint;
110 for (int i = 0; i < n_; ++i) {
111 for (int j = 0; j < n_; ++j) {
112 const double val = result.variable_values().at(vars_[i][j]);
113 CHECK(val <= 0.01 || val >= 0.99)
114 << "i: " << i << " j: " << j << " val: " << val;
115 if (val > 0.5) {
116 fingerprint.push_back(std::string(vars_[i][j].name()));
117 }
118 }
119 }
120 std::sort(fingerprint.begin(), fingerprint.end());
121 return fingerprint;
122 }
123
124 private:
125 const int n_;
126 Model model_;
127 std::vector<std::vector<Variable>> vars_;
128};
129
130std::vector<std::string> LPAssignment(const SolverType solver_type,
131 const SolveArguments& args) {
132 constexpr int n = 5;
133 AssignmentProblem assignment(n);
134 const SolveResult result =
135 Solve(assignment.model(), solver_type, args).value();
136 CHECK_OK(result.termination.EnsureIsOptimal());
137 CHECK_LE(std::abs(result.objective_value() - n), 1e-4)
138 << result.objective_value();
139 return assignment.SolutionFingerprint(result);
140}
141
142TEST_P(LpParameterTest, RandomSeedLp) {
143 if (!SupportsRandomSeed()) {
144 GTEST_SKIP() << "Random seed not supported. Ignoring this test.";
145 }
146 absl::flat_hash_set<std::vector<std::string>> solutions_seen;
147 for (int seed = 10; seed < 200; seed += 10) {
148 std::vector<std::string> result_for_seed;
149 for (int trial = 0; trial < 10; ++trial) {
150 SolveArguments args;
151 args.parameters.random_seed = seed;
152 // When the problem is solved in presolve, solvers typically give the same
153 // solution every time, regardless of the seed.
154 args.parameters.presolve = Emphasis::kOff;
155 std::vector<std::string> result = LPAssignment(TestedSolver(), args);
156 if (trial == 0) {
157 result_for_seed = result;
158 solutions_seen.insert(result_for_seed);
159 } else {
160 ASSERT_EQ(result_for_seed, result)
161 << "seed: " << seed << " trail: " << trial;
162 }
163 }
164 }
165 // Drawing 20 items from a very large number with replacement, the probability
166 // of getting at least 3 unique is very high.
167 EXPECT_GE(solutions_seen.size(), 3);
168}
169
170SolveStats LPForPresolve(SolverType solver_type, Emphasis presolve_emphasis) {
171 AssignmentProblem assignment_problem(6);
172 assignment_problem.MakePresolveOptimal();
173 SolveArguments args;
174 args.parameters.presolve = presolve_emphasis;
175 const SolveResult result =
176 Solve(assignment_problem.model(), solver_type, args).value();
177 CHECK_OK(result.termination.EnsureIsOptimal());
178 return result.solve_stats;
179}
180
181TEST_P(LpParameterTest, PresolveOff) {
182 if (!SupportsPresolve()) {
183 GTEST_SKIP() << "Presolve emphasis not supported. Ignoring this test.";
184 }
185 const SolveStats stats = LPForPresolve(TestedSolver(), Emphasis::kOff);
186 EXPECT_GT(stats.simplex_iterations + stats.barrier_iterations +
187 stats.first_order_iterations,
188 0);
189}
190
191TEST_P(LpParameterTest, PresolveOn) {
192 if (!SupportsPresolve()) {
193 GTEST_SKIP() << "Presolve emphasis not supported. Ignoring this test.";
194 }
195 const SolveStats stats = LPForPresolve(TestedSolver(), Emphasis::kMedium);
196 EXPECT_EQ(stats.simplex_iterations + stats.barrier_iterations +
197 stats.first_order_iterations,
198 0);
199}
200
201// This test doesn't really distinguish between primal and dual simplex, a
202// better test is possible.
203absl::StatusOr<SolveStats> SolveForLpAlgorithm(SolverType solver_type,
204 LPAlgorithm algorithm) {
205 AssignmentProblem assignment_problem(6);
206 SolveArguments args;
207 args.parameters.lp_algorithm = algorithm;
208 // Make sure that the underlying solver doesn't use an ensemble of LP
209 // algorithms.
210 // TODO(b/271098533): use solver capabilities instead. Note that HiGHS only
211 // lets you control the number of threads by setting a global that is not
212 // synchronized, so we disable it here.
213 if (solver_type != SolverType::kHighs) {
214 args.parameters.threads = 1;
215 }
216 ASSIGN_OR_RETURN(const SolveResult result,
217 Solve(assignment_problem.model(), solver_type, args));
218 RETURN_IF_ERROR(result.termination.EnsureIsOptimal());
219 return result.solve_stats;
220}
221
222TEST_P(LpParameterTest, LPAlgorithmPrimal) {
223 if (!SupportsSimplex()) {
225 SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kPrimalSimplex),
226 StatusIs(absl::StatusCode::kInvalidArgument,
227 AnyOf(HasSubstr("LP_ALGORITHM_PRIMAL_SIMPLEX"),
228 HasSubstr("lp_algorithm"))));
229 return;
230 }
232 const SolveStats stats,
233 SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kPrimalSimplex));
234
235 EXPECT_GT(stats.simplex_iterations, 0);
236 EXPECT_EQ(stats.barrier_iterations, 0);
237 EXPECT_EQ(stats.first_order_iterations, 0);
238}
239
240TEST_P(LpParameterTest, LPAlgorithmDual) {
241 if (!SupportsSimplex()) {
242 EXPECT_THAT(SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kDualSimplex),
243 StatusIs(absl::StatusCode::kInvalidArgument,
244 AnyOf(HasSubstr("LP_ALGORITHM_DUAL_SIMPLEX"),
245 HasSubstr("lp_algorithm"))));
246 return;
247 }
249 const SolveStats stats,
250 SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kDualSimplex));
251
252 EXPECT_GT(stats.simplex_iterations, 0);
253 EXPECT_EQ(stats.barrier_iterations, 0);
254 EXPECT_EQ(stats.first_order_iterations, 0);
255}
256
257TEST_P(LpParameterTest, LPAlgorithmBarrier) {
258 if (!SupportsBarrier()) {
259 EXPECT_THAT(SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kBarrier),
260 StatusIs(absl::StatusCode::kInvalidArgument,
261 AnyOf(HasSubstr("LP_ALGORITHM_BARRIER"),
262 HasSubstr("lp_algorithm"))));
263 return;
264 }
266 const SolveStats stats,
267 SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kBarrier));
268 // As of 2023-11-30 ecos_solver does not set the iteration count.
269 if (GetParam().solver_type != SolverType::kEcos) {
270 EXPECT_GT(stats.barrier_iterations, 0);
271 }
272 // We make no assertions on simplex iterations, we do not specify if
273 // crossover takes place.
274}
275
276TEST_P(LpParameterTest, LPAlgorithmFirstOrder) {
277 if (!SupportsFirstOrder()) {
278 EXPECT_THAT(SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kFirstOrder),
279 StatusIs(absl::StatusCode::kInvalidArgument,
280 AnyOf(HasSubstr("LP_ALGORITHM_FIRST_ORDER"),
281 HasSubstr("lp_algorithm"))));
282 return;
283 }
285 const SolveStats stats,
286 SolveForLpAlgorithm(TestedSolver(), LPAlgorithm::kFirstOrder));
287 EXPECT_EQ(stats.simplex_iterations, 0);
288 EXPECT_EQ(stats.barrier_iterations, 0);
289 EXPECT_GT(stats.first_order_iterations, 0);
290}
291
292absl::StatusOr<SolveResult> LPForIterationLimit(
293 const SolverType solver_type, const std::optional<LPAlgorithm> algorithm,
294 const int n, const bool supports_presolve) {
295 // The unique optimal solution to this problem is x[i] = 1/2 for all i, with
296 // an objective value of n/2.
297 Model model("Iteration limit LP");
298 std::vector<Variable> x;
299 for (int i = 0; i < n; ++i) {
300 x.push_back(model.AddContinuousVariable(0.0, 1.0));
301 }
302 for (int i = 0; i < n; ++i) {
303 for (int j = i + 1; j < n; ++j) {
304 model.AddLinearConstraint(x[i] + x[j] <= 1);
305 }
306 }
307 model.Maximize(Sum(x));
308 SolveArguments args;
309 args.parameters.lp_algorithm = algorithm;
310 if (supports_presolve) {
311 args.parameters.presolve = Emphasis::kOff;
312 }
313 args.parameters.iteration_limit = 1;
314 return Solve(model, solver_type, args);
315}
316
317TEST_P(LpParameterTest, IterationLimitPrimalSimplex) {
318 if (!SupportsSimplex()) {
319 GTEST_SKIP() << "Simplex not supported. Ignoring this test.";
320 }
322 const SolveResult result,
323 LPForIterationLimit(TestedSolver(), LPAlgorithm::kPrimalSimplex, 3,
324 SupportsPresolve()));
325 EXPECT_THAT(result,
328 /*allow_limit_undetermined=*/!GetParam().reports_limits));
329}
330
331TEST_P(LpParameterTest, IterationLimitDualSimplex) {
332 if (!SupportsSimplex()) {
333 GTEST_SKIP() << "Simplex not supported. Ignoring this test.";
334 }
336 const SolveResult result,
337 LPForIterationLimit(TestedSolver(), LPAlgorithm::kDualSimplex, 3,
338 SupportsPresolve()));
339 EXPECT_THAT(result,
342 /*allow_limit_undetermined=*/!GetParam().reports_limits));
343}
344
345TEST_P(LpParameterTest, IterationLimitBarrier) {
346 if (!SupportsBarrier()) {
347 GTEST_SKIP() << "Barrier not supported. Ignoring this test.";
348 }
350 const SolveResult result,
351 LPForIterationLimit(TestedSolver(), LPAlgorithm::kBarrier, 3,
352 SupportsPresolve()));
353 EXPECT_THAT(result,
356 /*allow_limit_undetermined=*/!GetParam().reports_limits));
357}
358
359TEST_P(LpParameterTest, IterationLimitFirstOrder) {
360 if (!SupportsFirstOrder()) {
361 GTEST_SKIP() << "First order methods not supported. Ignoring this test.";
362 }
364 const SolveResult result,
365 LPForIterationLimit(TestedSolver(), LPAlgorithm::kFirstOrder, 3,
366 SupportsPresolve()));
367 EXPECT_THAT(result,
370 /*allow_limit_undetermined=*/!GetParam().reports_limits));
371}
372
373TEST_P(LpParameterTest, IterationLimitUnspecified) {
375 const SolveResult result,
376 LPForIterationLimit(TestedSolver(), std::nullopt, 3, SupportsPresolve()));
377 EXPECT_THAT(result,
380 /*allow_limit_undetermined=*/!GetParam().reports_limits));
381}
382
383// This test is a little fragile as we do not set an initial basis, perhaps
384// worth reconsidering if it becomes an issue.
385TEST_P(LpParameterTest, ObjectiveLimitMaximization) {
386 // We only expect this to work for primal simplex.
387 if (!GetParam().supports_simplex) {
388 return;
389 }
390 // max 10x + 9y + 8z
391 // s.t. x + y <= 1
392 // x + z <= 1
393 // x, y, z in [0, 1].
394 //
395 // The optimal solution is (0, 1, 1), objective value 17.
396 Model model;
397 const Variable x = model.AddContinuousVariable(0.0, 1.0);
398 const Variable y = model.AddContinuousVariable(0.0, 1.0);
399 const Variable z = model.AddContinuousVariable(0.0, 1.0);
400 model.AddLinearConstraint(x + y <= 1.0);
401 model.AddLinearConstraint(x + z <= 1.0);
402 model.Maximize(10 * x + 9 * y + 8 * z);
403
404 // We can stop as soon as we find a solution with objective at least -0.5,
405 // i.e. on any feasible solution.
406 SolveParameters params = {.objective_limit = -0.5,
407 .lp_algorithm = LPAlgorithm::kPrimalSimplex,
408 .presolve = Emphasis::kOff};
409 const absl::StatusOr<SolveResult> result =
410 Solve(model, GetParam().solver_type, {.parameters = params});
411 if (!GetParam().supports_objective_limit) {
412 EXPECT_THAT(result, StatusIs(absl::StatusCode::kInvalidArgument,
413 HasSubstr("objective_limit")));
414 return;
415 }
416 EXPECT_THAT(result,
419 /*allow_limit_undetermined=*/!GetParam().reports_limits)));
420 // When the optimal solution is worse than objective_limit, the parameter
421 // has no effect on the returned SolveResult and we return the optimal
422 // solution.
423 params.objective_limit = 18.0;
424 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
425 IsOkAndHolds(IsOptimal(17.0)));
426}
427
428// This test is a little fragile as we do not set an initial basis, perhaps
429// worth reconsidering if it becomes an issue.
430TEST_P(LpParameterTest, ObjectiveLimitMinimization) {
431 if (!GetParam().supports_objective_limit) {
432 // We have already tested the solver errors in ObjectiveLimitMaximization.
433 return;
434 }
435 // We only expect this to work for primal simplex.
436 if (!GetParam().supports_simplex) {
437 return;
438 }
439 // min 10x + 9y + 8z
440 // s.t. x + y >= 1
441 // x + z >= 1
442 // x, y, z in [0, 1].
443 //
444 // The optimal solution is (1, 0, 0), objective value 10.
445 Model model;
446 const Variable x = model.AddContinuousVariable(0.0, 1.0);
447 const Variable y = model.AddContinuousVariable(0.0, 1.0);
448 const Variable z = model.AddContinuousVariable(0.0, 1.0);
449 model.AddLinearConstraint(x + y >= 1.0);
450 model.AddLinearConstraint(x + z >= 1.0);
451 model.Minimize(10 * x + 9 * y + 8 * z);
452
453 // We can stop as soon as we find a solution with objective at most 30.0,
454 // i.e. on any feasible solution.
455 SolveParameters params = {.objective_limit = 30.0,
456 .lp_algorithm = LPAlgorithm::kPrimalSimplex,
457 .presolve = Emphasis::kOff};
458 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
461 /*allow_limit_undetermined=*/!GetParam().reports_limits)));
462 // When the optimal solution is worse than objective_limit, the parameter
463 // has no effect on the returned SolveResult and we return the optimal
464 // solution.
465 params.objective_limit = 7.0;
466 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
467 IsOkAndHolds(IsOptimal(10.0)));
468}
469
470// This test is a little fragile as we do not set an initial basis, perhaps
471// worth reconsidering if it becomes an issue.
472TEST_P(LpParameterTest, BestBoundLimitMaximize) {
473 // We only expect this to work for dual simplex.
474 if (!GetParam().supports_simplex) {
475 return;
476 }
477 if (GetParam().solver_type == SolverType::kHighs) {
478 // TODO(b/272312674): bug in HiGHS breaks this test.
479 GTEST_SKIP() << "TODO(b/272312674): Highs appears to have a bug where "
480 "best_bound_limit is only supported for minimization.";
481 }
482 // max 3x + 2y + z
483 // s.t. x + y + z <= 1.5
484 // x, y, in [0, 1]
485 //
486 // The optimal solution is (1, 0.5, 0) with objective value 4.
487 Model model;
488 const Variable x = model.AddContinuousVariable(0.0, 1.0);
489 const Variable y = model.AddContinuousVariable(0.0, 1.0);
490 const Variable z = model.AddContinuousVariable(0.0, 1.0);
491 model.AddLinearConstraint(x + y + z <= 1.5);
492 model.Maximize(3 * x + 2 * y + z);
493
494 // With best bound limit of 6.5, we will find a dual feasible solution with
495 // dual objective better (smaller) than 3.5 before finding the optimal
496 // solution (e.g. (x, y, z) = (1, 1, 1), objective = 6).
497 SolveParameters params = {.best_bound_limit = 6.5,
498 .lp_algorithm = LPAlgorithm::kDualSimplex,
499 .presolve = Emphasis::kOff};
500 const absl::StatusOr<SolveResult> result =
501 Solve(model, GetParam().solver_type, {.parameters = params});
502 if (!GetParam().supports_best_bound_limit) {
503 EXPECT_THAT(result, StatusIs(absl::StatusCode::kInvalidArgument,
504 HasSubstr("best_bound_limit")));
505 return;
506 }
507 EXPECT_THAT(result,
510 /*allow_limit_undetermined=*/!GetParam().reports_limits)));
511 // When the optimal solution is better than best_bound_limit, the parameter
512 // has no effect on the returned SolveResult and we return the optimal
513 // solution.
514 params.best_bound_limit = 3.5;
515 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
516 IsOkAndHolds(IsOptimal(4.0)));
517}
518
519// This test is a little fragile as we do not set an initial basis, perhaps
520// worth reconsidering if it becomes an issue.
521TEST_P(LpParameterTest, BestBoundLimitMinimize) {
522 if (!GetParam().supports_objective_limit) {
523 // We have already tested the solver errors in BestBoundLimitMaximize.
524 return;
525 }
526 // We only expect this to work for dual simplex.
527 if (!GetParam().supports_simplex) {
528 return;
529 }
530 // min 2x + y
531 // s.t. x + y >= 1
532 // x, y, in [0, 1]
533 //
534 // The optimal solution is (0, 1) with objective value 1.
535 Model model;
536 const Variable x = model.AddContinuousVariable(0.0, 1.0);
537 const Variable y = model.AddContinuousVariable(0.0, 1.0);
538 model.AddLinearConstraint(x + y >= 1.0);
539 model.Minimize(2 * x + y);
540
541 // With best bound limit of -0.5, we will find a dual feasible solution with
542 // dual objective better (larger) than -0.5 before finding the optimal
543 // solution (e.g. (x, y) = (0, 0), objective = 0).
544 SolveParameters params = {.best_bound_limit = -0.5,
545 .lp_algorithm = LPAlgorithm::kDualSimplex,
546 .presolve = Emphasis::kOff};
547 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
550 /*allow_limit_undetermined=*/!GetParam().reports_limits)));
551 // When the optimal solution is better than best_bound_limit, the parameter
552 // has no effect on the returned SolveResult and we return the optimal
553 // solution.
554 params.best_bound_limit = 1.5;
555 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
556 IsOkAndHolds(IsOptimal(1.0)));
557}
558
559TEST_P(LpParameterTest, CutoffLimitMaximize) {
560 // max 2x + y
561 // s.t. x + y <= 1
562 // x, y, in [0, 1]
563 //
564 // The optimal solution is (1, 0) with objective value 2.
565 Model model;
566 const Variable x = model.AddContinuousVariable(0.0, 1.0);
567 const Variable y = model.AddContinuousVariable(0.0, 1.0);
568 model.AddLinearConstraint(x + y <= 1.0);
569 model.Maximize(2 * x + y);
570 // When the optimal solution is worse than cutoff, no solution information is
571 // returned and we return Limit::kCutoff.
572 SolveParameters params = {.cutoff_limit = 3.5, .presolve = Emphasis::kOff};
573 const absl::StatusOr<SolveResult> result =
574 Solve(model, GetParam().solver_type, {.parameters = params});
575 if (!GetParam().supports_cutoff) {
576 EXPECT_THAT(result, StatusIs(absl::StatusCode::kInvalidArgument,
577 HasSubstr("cutoff_limit")));
578 return;
579 }
582 // When the optimal solution is better than cutoff, the parameter has no
583 // effect on the returned SolveResult (at least for problems with a unique
584 // solution, it may change the nodes visited still) and we return the optimal
585 // solution.
586 params.cutoff_limit = 1.5;
587 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
588 IsOkAndHolds(IsOptimal(2.0)));
589}
590
591TEST_P(LpParameterTest, CutoffLimitMinimize) {
592 if (!GetParam().supports_cutoff) {
593 // We have already tested the solver errors in CutoffLimitMaximize.
594 return;
595 }
596 // min 2x + y
597 // s.t. x + y >= 1
598 // x, y, in [0, 1]
599 //
600 // The optimal solution is (0, 1) with objective value 1.
601 Model model;
602 const Variable x = model.AddContinuousVariable(0.0, 1.0);
603 const Variable y = model.AddContinuousVariable(0.0, 1.0);
604 model.AddLinearConstraint(x + y >= 1.0);
605 model.Minimize(2 * x + y);
606 // When the optimal solution is worse than cutoff, no solution information is
607 // returned and we return Limit::kCutoff.
608 SolveParameters params = {.cutoff_limit = -0.5, .presolve = Emphasis::kOff};
610 Solve(model, GetParam().solver_type, {.parameters = params}),
612 // When the optimal solution is better than cutoff, the parameter has no
613 // effect on the returned SolveResult (at least for problems with a unique
614 // solution, it may change the nodes visited still) and we return the optimal
615 // solution.
616 params.cutoff_limit = 1.5;
617 EXPECT_THAT(Solve(model, GetParam().solver_type, {.parameters = params}),
618 IsOkAndHolds(IsOptimal(1.0)));
619}
620
621// TODO(b/272268188): test the interaction between cutoff and primal + dual
622// infeasibility.
623
624} // namespace
625} // namespace math_opt
626} // namespace operations_research
IntegerValue y
const std::vector< IntVar * > vars_
#define ASSIGN_OR_RETURN(lhs, rexpr)
#define RETURN_IF_ERROR(expr)
const std::string name
A name for logging purposes.
GRBmodel * model
EXPECT_THAT(ComputeInfeasibleSubsystem(model, GetParam().solver_type), IsOkAndHolds(IsInfeasible(true, ModelSubset{ .variable_bounds={{x, ModelSubset::Bounds{.lower=false,.upper=true}}},.linear_constraints={ {c, ModelSubset::Bounds{.lower=true,.upper=false}}}})))
TEST_P(InfeasibleSubsystemTest, CanComputeInfeasibleSubsystem)
SolverType
The solvers supported by MathOpt.
Definition parameters.h:42
<=x<=1 IncrementalMipTest::IncrementalMipTest() :model_("incremental_solve_test"), x_(model_.AddContinuousVariable(0.0, 1.0, "x")), y_(model_.AddIntegerVariable(0.0, 2.0, "y")), c_(model_.AddLinearConstraint(0<=x_+y_<=1.5, "c")) { model_.Maximize(3.0 *x_+2.0 *y_+0.1);solver_=NewIncrementalSolver(&model_, TestedSolver()).value();const SolveResult first_solve=solver_->Solve().value();CHECK(first_solve.has_primal_feasible_solution());CHECK_LE(std::abs(first_solve.objective_value() - 3.6), kTolerance)<< first_solve.objective_value();} namespace { TEST_P(SimpleMipTest, OneVarMax) { Model model;const Variable x=model.AddVariable(0.0, 4.0, false, "x");model.Maximize(2.0 *x);ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));ASSERT_THAT(result, IsOptimal(8.0));EXPECT_THAT(result.variable_values(), IsNear({{x, 4.0}}));} TEST_P(SimpleMipTest, OneVarMin) { Model model;const Variable x=model.AddVariable(-2.4, 4.0, false, "x");model.Minimize(2.0 *x);ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));ASSERT_THAT(result, IsOptimal(-4.8));EXPECT_THAT(result.variable_values(), IsNear({{x, -2.4}}));} TEST_P(SimpleMipTest, OneIntegerVar) { Model model;const Variable x=model.AddVariable(0.0, 4.5, true, "x");model.Maximize(2.0 *x);ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));ASSERT_THAT(result, IsOptimal(8.0));EXPECT_THAT(result.variable_values(), IsNear({{x, 4.0}}));} TEST_P(SimpleMipTest, SimpleLinearConstraint) { Model model;const Variable x=model.AddBinaryVariable("x");const Variable y=model.AddBinaryVariable("y");model.Maximize(2.0 *x+y);model.AddLinearConstraint(0.0<=x+y<=1.5, "c");ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));ASSERT_THAT(result, IsOptimal(2.0));EXPECT_THAT(result.variable_values(), IsNear({{x, 1}, {y, 0}}));} TEST_P(SimpleMipTest, Unbounded) { Model model;const Variable x=model.AddVariable(0.0, kInf, true, "x");model.Maximize(2.0 *x);ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));if(GetParam().report_unboundness_correctly) { ASSERT_THAT(result, TerminatesWithOneOf({TerminationReason::kUnbounded, TerminationReason::kInfeasibleOrUnbounded}));} else { ASSERT_THAT(result, TerminatesWith(TerminationReason::kOtherError));} } TEST_P(SimpleMipTest, Infeasible) { Model model;const Variable x=model.AddVariable(0.0, 3.0, true, "x");model.Maximize(2.0 *x);model.AddLinearConstraint(x >=4.0);ASSERT_OK_AND_ASSIGN(const SolveResult result, Solve(model, GetParam().solver_type));ASSERT_THAT(result, TerminatesWith(TerminationReason::kInfeasible));} TEST_P(SimpleMipTest, FractionalBoundsContainNoInteger) { if(GetParam().solver_type==SolverType::kGurobi) { GTEST_SKIP()<< "TODO(b/272298816): Gurobi bindings are broken here.";} Model model;const Variable x=model.AddIntegerVariable(0.5, 0.6, "x");model.Maximize(x);EXPECT_THAT(Solve(model, GetParam().solver_type), IsOkAndHolds(TerminatesWith(TerminationReason::kInfeasible)));} TEST_P(IncrementalMipTest, EmptyUpdate) { ASSERT_THAT(solver_->Update(), IsOkAndHolds(DidUpdate()));ASSERT_OK_AND_ASSIGN(const SolveResult result, solver_->SolveWithoutUpdate());ASSERT_THAT(result, IsOptimal(3.6));EXPECT_THAT(result.variable_values(), IsNear({{x_, 0.5}, {y_, 1.0}}));} TEST_P(IncrementalMipTest, MakeContinuous) { model_.set_continuous(y_);ASSERT_THAT(solver_->Update(), IsOkAndHolds(DidUpdate()));ASSERT_OK_AND_ASSIGN(const SolveResult result, solver_->SolveWithoutUpdate());ASSERT_THAT(result, IsOptimal(4.1));EXPECT_THAT(result.variable_values(), IsNear({{x_, 1.0}, {y_, 0.5}}));} TEST_P(IncrementalMipTest, DISABLED_MakeContinuousWithNonIntegralBounds) { solver_.reset();Model model("bounds");const Variable x=model.AddIntegerVariable(0.5, 1.5, "x");model.Maximize(x);ASSERT_OK_AND_ASSIGN(const auto solver, NewIncrementalSolver(&model, TestedSolver()));ASSERT_THAT(solver->Solve(), IsOkAndHolds(IsOptimal(1.0)));model.set_continuous(x);ASSERT_THAT(solver->Update(), IsOkAndHolds(DidUpdate()));ASSERT_THAT(solver-> IsOkAndHolds(IsOptimal(1.5)))
Emphasis
never give an error, and will map onto their best match.
Definition parameters.h:179
LPAlgorithm
Selects an algorithm for solving linear programs.
Definition parameters.h:128
LinearExpression Sum(const Iterable &items)
absl::StatusOr< SolveResult > Solve(const Model &model, const SolverType solver_type, const SolveArguments &solve_args, const SolverInitArguments &init_args)
Definition solve.cc:62
std::ostream & operator<<(std::ostream &ostr, const IndicatorConstraint &constraint)
testing::Matcher< SolveResult > TerminatesWithReasonFeasible(const Limit expected, const bool allow_limit_undetermined)
Definition matchers.cc:657
testing::Matcher< SolveResult > TerminatesWithLimit(const Limit expected, const bool allow_limit_undetermined)
Definition matchers.cc:648
Matcher< SolveResult > IsOptimal(const std::optional< double > expected_primal_objective, const double tolerance)
Definition matchers.cc:762
testing::Matcher< SolveResult > TerminatesWithReasonNoSolutionFound(const Limit expected, const bool allow_limit_undetermined)
Definition matchers.cc:665
In SWIG mode, we don't want anything besides these top-level includes.
internal::StatusIsMatcher StatusIs(CodeMatcher code_matcher, MessageMatcher message_matcher)
#define ASSERT_OK_AND_ASSIGN(lhs, rexpr)
Parameters for the LpParameterTest suite below.
bool supports_presolve
Indicates if the solver supports setting the presolve emphasis.
bool supports_cutoff
Indicates if the solver supports a cutoff value.
bool supports_barrier
Indicates if the solver supports barrier as an algorithm.
bool supports_random_seed
Indicates if the solver supports setting the random seed.
bool supports_simplex
Indicates if the solver supports simplex as an algorithm (primal and dual).