Google OR-Tools v9.11
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
clause.cc
Go to the documentation of this file.
1// Copyright 2010-2024 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14#include "ortools/sat/clause.h"
15
16#include <stddef.h>
17
18#include <algorithm>
19#include <cstdint>
20#include <deque>
21#include <queue>
22#include <string>
23#include <utility>
24#include <vector>
25
26#include "absl/container/flat_hash_map.h"
27#include "absl/container/flat_hash_set.h"
28#include "absl/container/inlined_vector.h"
29#include "absl/log/check.h"
30#include "absl/random/bit_gen_ref.h"
31#include "absl/random/distributions.h"
32#include "absl/types/span.h"
36#include "ortools/base/timer.h"
40#include "ortools/sat/model.h"
42#include "ortools/sat/util.h"
43#include "ortools/util/bitset.h"
44#include "ortools/util/stats.h"
47namespace operations_research {
48namespace sat {
49
50namespace {
51
52// Returns true if the given watcher list contains the given clause.
53template <typename Watcher>
54bool WatcherListContains(const std::vector<Watcher>& list,
55 const SatClause& candidate) {
56 for (const Watcher& watcher : list) {
57 if (watcher.clause == &candidate) return true;
58 }
59 return false;
60}
61
62// A simple wrapper to simplify the erase(std::remove_if()) pattern.
63template <typename Container, typename Predicate>
64void RemoveIf(Container c, Predicate p) {
65 c->erase(std::remove_if(c->begin(), c->end(), p), c->end());
66}
67
68} // namespace
69
70// ----- ClauseManager -----
71
73 : SatPropagator("ClauseManager"),
74 implication_graph_(model->GetOrCreate<BinaryImplicationGraph>()),
75 trail_(model->GetOrCreate<Trail>()),
76 num_inspected_clauses_(0),
77 num_inspected_clause_literals_(0),
78 num_watched_clauses_(0),
79 stats_("ClauseManager") {
80 trail_->RegisterPropagator(this);
81}
82
84 gtl::STLDeleteElements(&clauses_);
85 IF_STATS_ENABLED(LOG(INFO) << stats_.StatString());
86}
87
88void ClauseManager::Resize(int num_variables) {
89 DCHECK(is_clean_);
90 watchers_on_false_.resize(num_variables << 1);
91 reasons_.resize(num_variables);
92 needs_cleaning_.Resize(LiteralIndex(num_variables << 1));
93}
94
95// Note that this is the only place where we add Watcher so the DCHECK
96// guarantees that there are no duplicates.
97void ClauseManager::AttachOnFalse(Literal literal, Literal blocking_literal,
98 SatClause* clause) {
99 SCOPED_TIME_STAT(&stats_);
100 DCHECK(is_clean_);
101 DCHECK(!WatcherListContains(watchers_on_false_[literal], *clause));
102 watchers_on_false_[literal].push_back(Watcher(clause, blocking_literal));
103}
104
105bool ClauseManager::PropagateOnFalse(Literal false_literal, Trail* trail) {
106 SCOPED_TIME_STAT(&stats_);
107 DCHECK(is_clean_);
108 std::vector<Watcher>& watchers = watchers_on_false_[false_literal];
109 const auto assignment = AssignmentView(trail->Assignment());
110
111 // Note(user): It sounds better to inspect the list in order, this is because
112 // small clauses like binary or ternary clauses will often propagate and thus
113 // stay at the beginning of the list.
114 auto new_it = watchers.begin();
115 const auto end = watchers.end();
116 while (new_it != end && assignment.LiteralIsTrue(new_it->blocking_literal)) {
117 ++new_it;
118 }
119 for (auto it = new_it; it != end; ++it) {
120 // Don't even look at the clause memory if the blocking literal is true.
121 if (assignment.LiteralIsTrue(it->blocking_literal)) {
122 *new_it++ = *it;
123 continue;
124 }
125 ++num_inspected_clauses_;
126
127 // If the other watched literal is true, just change the blocking literal.
128 // Note that we use the fact that the first two literals of the clause are
129 // the ones currently watched.
130 Literal* literals = it->clause->literals();
131 const Literal other_watched_literal(
132 LiteralIndex(literals[0].Index().value() ^ literals[1].Index().value() ^
133 false_literal.Index().value()));
134 if (assignment.LiteralIsTrue(other_watched_literal)) {
135 *new_it = *it;
136 new_it->blocking_literal = other_watched_literal;
137 ++new_it;
138 ++num_inspected_clause_literals_;
139 continue;
140 }
141
142 // Look for another literal to watch. We go through the list in a cyclic
143 // fashion from start. The first two literals can be ignored as they are the
144 // watched ones.
145 {
146 const int start = it->start_index;
147 const int size = it->clause->size();
148 DCHECK_GE(start, 2);
149
150 int i = start;
151 while (i < size && assignment.LiteralIsFalse(literals[i])) ++i;
152 num_inspected_clause_literals_ += i - start + 2;
153 if (i >= size) {
154 i = 2;
155 while (i < start && assignment.LiteralIsFalse(literals[i])) ++i;
156 num_inspected_clause_literals_ += i - 2;
157 if (i >= start) i = size;
158 }
159 if (i < size) {
160 // literal[i] is unassigned or true, it's now the new literal to watch.
161 // Note that by convention, we always keep the two watched literals at
162 // the beginning of the clause.
163 literals[0] = other_watched_literal;
164 literals[1] = literals[i];
165 literals[i] = false_literal;
166 watchers_on_false_[literals[1]].emplace_back(
167 it->clause, other_watched_literal, i + 1);
168 continue;
169 }
170 }
171
172 // At this point other_watched_literal is either false or unassigned, all
173 // other literals are false.
174 if (assignment.LiteralIsFalse(other_watched_literal)) {
175 // Conflict: All literals of it->clause are false.
176 //
177 // Note(user): we could avoid a copy here, but the conflict analysis
178 // complexity will be a lot higher than this anyway.
179 trail->MutableConflict()->assign(it->clause->begin(), it->clause->end());
180 trail->SetFailingSatClause(it->clause);
181 num_inspected_clause_literals_ += it - watchers.begin() + 1;
182 watchers.erase(new_it, it);
183 return false;
184 } else {
185 // Propagation: other_watched_literal is unassigned, set it to true and
186 // put it at position 0. Note that the position 0 is important because
187 // we will need later to recover the literal that was propagated from the
188 // clause using this convention.
189 literals[0] = other_watched_literal;
190 literals[1] = false_literal;
191 reasons_[trail->Index()] = it->clause;
192 trail->Enqueue(other_watched_literal, propagator_id_);
193 *new_it++ = *it;
194 }
195 }
196 num_inspected_clause_literals_ += watchers.size(); // The blocking ones.
197 watchers.erase(new_it, end);
198 return true;
199}
200
202 const int old_index = trail->Index();
203 while (trail->Index() == old_index && propagation_trail_index_ < old_index) {
204 const Literal literal = (*trail)[propagation_trail_index_++];
205 if (!PropagateOnFalse(literal.Negated(), trail)) return false;
206 }
207 return true;
208}
209
210absl::Span<const Literal> ClauseManager::Reason(const Trail& /*trail*/,
211 int trail_index,
212 int64_t /*conflict_id*/) const {
213 return reasons_[trail_index]->PropagationReason();
214}
215
216SatClause* ClauseManager::ReasonClause(int trail_index) const {
217 return reasons_[trail_index];
218}
219
220bool ClauseManager::AddClause(absl::Span<const Literal> literals) {
221 return AddClause(literals, trail_, -1);
222}
223
224bool ClauseManager::AddClause(absl::Span<const Literal> literals, Trail* trail,
225 int lbd) {
226 SatClause* clause = SatClause::Create(literals);
227 clauses_.push_back(clause);
228 if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals);
229 return AttachAndPropagate(clause, trail);
230}
231
232SatClause* ClauseManager::AddRemovableClause(absl::Span<const Literal> literals,
233 Trail* trail, int lbd) {
234 SatClause* clause = SatClause::Create(literals);
235 clauses_.push_back(clause);
236 if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, literals);
237 CHECK(AttachAndPropagate(clause, trail));
238 return clause;
239}
240
241// Sets up the 2-watchers data structure. It selects two non-false literals
242// and attaches the clause to the event: one of the watched literals become
243// false. It returns false if the clause only contains literals assigned to
244// false. If only one literals is not false, it propagates it to true if it
245// is not already assigned.
246bool ClauseManager::AttachAndPropagate(SatClause* clause, Trail* trail) {
247 SCOPED_TIME_STAT(&stats_);
248
249 const int size = clause->size();
250 Literal* literals = clause->literals();
251
252 // Select the first two literals that are not assigned to false and put them
253 // on position 0 and 1.
254 int num_literal_not_false = 0;
255 for (int i = 0; i < size; ++i) {
256 if (!trail->Assignment().LiteralIsFalse(literals[i])) {
257 std::swap(literals[i], literals[num_literal_not_false]);
258 ++num_literal_not_false;
259 if (num_literal_not_false == 2) {
260 break;
261 }
262 }
263 }
264
265 // Returns false if all the literals were false.
266 // This should only happen on an UNSAT problem, and there is no need to attach
267 // the clause in this case.
268 if (num_literal_not_false == 0) return false;
269
270 if (num_literal_not_false == 1) {
271 // To maintain the validity of the 2-watcher algorithm, we need to watch
272 // the false literal with the highest decision level.
273 int max_level = trail->Info(literals[1].Variable()).level;
274 for (int i = 2; i < size; ++i) {
275 const int level = trail->Info(literals[i].Variable()).level;
276 if (level > max_level) {
277 max_level = level;
278 std::swap(literals[1], literals[i]);
279 }
280 }
281
282 // Propagates literals[0] if it is unassigned.
283 if (!trail->Assignment().LiteralIsTrue(literals[0])) {
284 reasons_[trail->Index()] = clause;
285 trail->Enqueue(literals[0], propagator_id_);
286 }
287 }
288
289 ++num_watched_clauses_;
290 AttachOnFalse(literals[0], literals[1], clause);
291 AttachOnFalse(literals[1], literals[0], clause);
292 return true;
293}
294
295void ClauseManager::Attach(SatClause* clause, Trail* trail) {
296 Literal* literals = clause->literals();
297 DCHECK(!trail->Assignment().LiteralIsAssigned(literals[0]));
298 DCHECK(!trail->Assignment().LiteralIsAssigned(literals[1]));
299
300 ++num_watched_clauses_;
301 AttachOnFalse(literals[0], literals[1], clause);
302 AttachOnFalse(literals[1], literals[0], clause);
303}
304
305void ClauseManager::InternalDetach(SatClause* clause) {
306 --num_watched_clauses_;
307 const size_t size = clause->size();
308 if (drat_proof_handler_ != nullptr && size > 2) {
309 drat_proof_handler_->DeleteClause({clause->begin(), size});
310 }
311 clauses_info_.erase(clause);
312 clause->Clear();
313}
314
316 InternalDetach(clause);
317 is_clean_ = false;
318 needs_cleaning_.Set(clause->FirstLiteral());
319 needs_cleaning_.Set(clause->SecondLiteral());
320}
321
323 InternalDetach(clause);
324 for (const Literal l : {clause->FirstLiteral(), clause->SecondLiteral()}) {
325 needs_cleaning_.Clear(l);
326 RemoveIf(&(watchers_on_false_[l]), [](const Watcher& watcher) {
327 return watcher.clause->IsRemoved();
328 });
329 }
330}
331
333 if (!all_clauses_are_attached_) return;
334 all_clauses_are_attached_ = false;
335
336 // This is easy, and this allows to reset memory if some watcher lists where
337 // really long at some point.
338 is_clean_ = true;
339 num_watched_clauses_ = 0;
340 watchers_on_false_.clear();
341}
342
344 if (all_clauses_are_attached_) return;
345 all_clauses_are_attached_ = true;
346
347 needs_cleaning_.ClearAll(); // This doesn't resize it.
348 watchers_on_false_.resize(needs_cleaning_.size().value());
349
351 for (SatClause* clause : clauses_) {
352 ++num_watched_clauses_;
353 DCHECK_GE(clause->size(), 2);
354 AttachOnFalse(clause->FirstLiteral(), clause->SecondLiteral(), clause);
355 AttachOnFalse(clause->SecondLiteral(), clause->FirstLiteral(), clause);
356 }
357}
358
359// This one do not need the clause to be detached.
361 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
362 if (drat_proof_handler_ != nullptr) {
363 drat_proof_handler_->AddClause({true_literal});
364 }
365 // TODO(user): remove the test when the DRAT issue with fixed literal is
366 // resolved.
367 if (!trail_->Assignment().LiteralIsTrue(true_literal)) {
368 trail_->EnqueueWithUnitReason(true_literal);
369
370 // Even when all clauses are detached, we can propagate the implication
371 // graph and we do that right away.
372 return implication_graph_->Propagate(trail_);
373 }
374 return true;
375}
376
377// TODO(user): We could do something slower if the clauses are attached like
378// we do for InprocessingRewriteClause().
380 DCHECK(!all_clauses_are_attached_);
381 if (drat_proof_handler_ != nullptr) {
382 drat_proof_handler_->DeleteClause(clause->AsSpan());
383 }
384 clauses_info_.erase(clause);
385 clause->Clear();
386}
387
389 SatClause* clause, absl::Span<const Literal> new_clause) {
390 if (new_clause.empty()) return false; // UNSAT.
391
392 if (DEBUG_MODE) {
393 for (const Literal l : new_clause) {
394 DCHECK(!trail_->Assignment().LiteralIsAssigned(l));
395 }
396 }
397
398 if (new_clause.size() == 1) {
399 if (!InprocessingFixLiteral(new_clause[0])) return false;
401 return true;
402 }
403
404 if (new_clause.size() == 2) {
405 CHECK(implication_graph_->AddBinaryClause(new_clause[0], new_clause[1]));
407 return true;
408 }
409
410 if (drat_proof_handler_ != nullptr) {
411 // We must write the new clause before we delete the old one.
412 drat_proof_handler_->AddClause(new_clause);
413 drat_proof_handler_->DeleteClause(clause->AsSpan());
414 }
415
416 if (all_clauses_are_attached_) {
417 // We can still rewrite the clause, but it is inefficient. We first
418 // detach it in a non-lazy way.
419 --num_watched_clauses_;
420 clause->Clear();
421 for (const Literal l : {clause->FirstLiteral(), clause->SecondLiteral()}) {
422 needs_cleaning_.Clear(l);
423 RemoveIf(&(watchers_on_false_[l]), [](const Watcher& watcher) {
424 return watcher.clause->IsRemoved();
425 });
426 }
427 }
428
429 clause->Rewrite(new_clause);
430
431 // And we reattach it.
432 if (all_clauses_are_attached_) Attach(clause, trail_);
433 return true;
434}
435
437 absl::Span<const Literal> new_clause) {
438 DCHECK(!new_clause.empty());
439 DCHECK(!all_clauses_are_attached_);
440 if (DEBUG_MODE) {
441 for (const Literal l : new_clause) {
442 DCHECK(!trail_->Assignment().LiteralIsAssigned(l));
443 }
444 }
445
446 if (new_clause.size() == 1) {
447 // TODO(user): We should return false...
448 if (!InprocessingFixLiteral(new_clause[0])) return nullptr;
449 return nullptr;
450 }
451
452 if (new_clause.size() == 2) {
453 implication_graph_->AddBinaryClause(new_clause[0], new_clause[1]);
454 return nullptr;
455 }
456
457 SatClause* clause = SatClause::Create(new_clause);
458 clauses_.push_back(clause);
459 return clause;
460}
461
463 SCOPED_TIME_STAT(&stats_);
464 for (const LiteralIndex index : needs_cleaning_.PositionsSetAtLeastOnce()) {
465 DCHECK(needs_cleaning_[index]);
466 RemoveIf(&(watchers_on_false_[index]), [](const Watcher& watcher) {
467 return watcher.clause->IsRemoved();
468 });
469 needs_cleaning_.Clear(index);
470 }
471 needs_cleaning_.NotifyAllClear();
472 is_clean_ = true;
473}
474
475// We also update to_minimize_index_/to_probe_index_ correctly.
476//
477// TODO(user): If more indices are needed, generalize the code to a vector of
478// indices.
480 DCHECK(is_clean_);
481
482 int new_size = 0;
483 const int old_size = clauses_.size();
484 for (int i = 0; i < old_size; ++i) {
485 if (i == to_minimize_index_) to_minimize_index_ = new_size;
486 if (i == to_probe_index_) to_probe_index_ = new_size;
487 if (clauses_[i]->IsRemoved()) {
488 delete clauses_[i];
489 } else {
490 clauses_[new_size++] = clauses_[i];
491 }
492 }
493 clauses_.resize(new_size);
494
495 if (to_minimize_index_ > new_size) to_minimize_index_ = new_size;
496 if (to_probe_index_ > new_size) to_probe_index_ = new_size;
497}
498
499// ----- BinaryImplicationGraph -----
500
501void BinaryImplicationGraph::Resize(int num_variables) {
502 SCOPED_TIME_STAT(&stats_);
503 implications_.resize(num_variables << 1);
504 implies_something_.resize(num_variables << 1);
505 might_have_dups_.resize(num_variables << 1);
506 is_redundant_.resize(implications_.size());
507 is_removed_.resize(implications_.size(), false);
508 estimated_sizes_.resize(implications_.size(), 0);
509 in_direct_implications_.resize(implications_.size(), false);
510 reasons_.resize(num_variables);
511}
512
513void BinaryImplicationGraph::NotifyPossibleDuplicate(Literal a) {
514 if (might_have_dups_[a.Index()]) return;
515 might_have_dups_[a.Index()] = true;
516 to_clean_.push_back(a);
517}
518
520 for (const Literal l : to_clean_) {
521 might_have_dups_[l.Index()] = false;
522 gtl::STLSortAndRemoveDuplicates(&implications_[l.Index()]);
523 }
524 to_clean_.clear();
525}
526
527// TODO(user): Not all of the solver knows about representative literal, do
528// use them here to maintains invariant? Explore this when we start cleaning our
529// clauses using equivalence during search. We can easily do it for every
530// conflict we learn instead of here.
532 SCOPED_TIME_STAT(&stats_);
533
534 // Tricky: If this is the first clause, the propagator will be added and
535 // assumed to be in a "propagated" state. This makes sure this is the case.
536 if (IsEmpty()) propagation_trail_index_ = trail_->Index();
537
538 if (drat_proof_handler_ != nullptr) {
539 // TODO(user): Like this we will duplicate all binary clause from the
540 // problem. However this leads to a simpler API (since we don't need to
541 // special case the loading of the original clauses) and we mainly use drat
542 // proof for testing anyway.
543 drat_proof_handler_->AddClause({a, b});
544 }
545
546 if (is_redundant_[a.Index()]) a = Literal(representative_of_[a.Index()]);
547 if (is_redundant_[b.Index()]) b = Literal(representative_of_[b.Index()]);
548 if (a == b.Negated()) return true;
549 if (a == b) return FixLiteral(a);
550
551 DCHECK(!is_removed_[a]);
552 DCHECK(!is_removed_[b]);
553 estimated_sizes_[a.NegatedIndex()]++;
554 estimated_sizes_[b.NegatedIndex()]++;
555 implications_[a.NegatedIndex()].push_back(b);
556 implications_[b.NegatedIndex()].push_back(a);
557 implies_something_.Set(a.NegatedIndex());
558 implies_something_.Set(b.NegatedIndex());
559 NotifyPossibleDuplicate(a);
560 NotifyPossibleDuplicate(b);
561 is_dag_ = false;
562 num_implications_ += 2;
563
564 if (enable_sharing_ && add_binary_callback_ != nullptr) {
565 add_binary_callback_(a, b);
566 }
567
568 const auto& assignment = trail_->Assignment();
569 if (trail_->CurrentDecisionLevel() == 0) {
570 DCHECK(!assignment.LiteralIsAssigned(a));
571 DCHECK(!assignment.LiteralIsAssigned(b));
572 } else {
573 if (assignment.LiteralIsFalse(a)) {
574 if (assignment.LiteralIsAssigned(b)) {
575 if (assignment.LiteralIsFalse(b)) return false;
576 } else {
577 reasons_[trail_->Index()] = a;
578 trail_->Enqueue(b, propagator_id_);
579 }
580 } else if (assignment.LiteralIsFalse(b)) {
581 if (!assignment.LiteralIsAssigned(a)) {
582 reasons_[trail_->Index()] = b;
583 trail_->Enqueue(a, propagator_id_);
584 }
585 }
586 }
587
588 return true;
589}
590
592 absl::Span<const Literal> at_most_one) {
593 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
594 if (at_most_one.size() <= 1) return true;
595
596 // Temporarily copy the at_most_one constraint at the end of
597 // at_most_one_buffer_. It will be cleaned up and added by
598 // CleanUpAndAddAtMostOnes().
599 const int base_index = at_most_one_buffer_.size();
600 at_most_one_buffer_.push_back(Literal(LiteralIndex(at_most_one.size())));
601 at_most_one_buffer_.insert(at_most_one_buffer_.end(), at_most_one.begin(),
602 at_most_one.end());
603
604 is_dag_ = false;
605 return CleanUpAndAddAtMostOnes(base_index);
606}
607
608// TODO(user): remove dupl with ClauseManager::InprocessingFixLiteral().
609//
610// Note that we currently do not support calling this at a positive level since
611// we might loose the fixing on backtrack otherwise.
612bool BinaryImplicationGraph::FixLiteral(Literal true_literal) {
613 CHECK_EQ(trail_->CurrentDecisionLevel(), 0);
614 if (trail_->Assignment().LiteralIsTrue(true_literal)) return true;
615 if (trail_->Assignment().LiteralIsFalse(true_literal)) return false;
616
617 if (drat_proof_handler_ != nullptr) {
618 drat_proof_handler_->AddClause({true_literal});
619 }
620
621 trail_->EnqueueWithUnitReason(true_literal);
622 return Propagate(trail_);
623}
624
625// This works by doing a linear scan on the at_most_one_buffer_ and
626// cleaning/copying the at most ones on the fly to the beginning of the same
627// buffer.
628bool BinaryImplicationGraph::CleanUpAndAddAtMostOnes(int base_index) {
629 const VariablesAssignment& assignment = trail_->Assignment();
630 int local_end = base_index;
631 const int buffer_size = at_most_one_buffer_.size();
632 for (int i = base_index; i < buffer_size;) {
633 // Process a new at most one.
634 // It will be copied into buffer[local_start + 1, local_end].
635 // With its size at buffer[local_start].
636 const int local_start = local_end;
637
638 // Update the iterator now. Even if the current at_most_one is reduced away,
639 // local_start will still point to the next one, or to the end of the
640 // buffer.
641 if (i == at_most_one_iterator_) {
642 at_most_one_iterator_ = local_start;
643 }
644
645 // We have an at_most_one starting at i, and we increment i to the next one.
646 const absl::Span<const Literal> initial_amo = AtMostOne(i);
647 i += initial_amo.size() + 1;
648
649 // Reserve space for size.
650 local_end++;
651 bool set_all_left_to_false = false;
652 for (const Literal l : initial_amo) {
653 if (assignment.LiteralIsFalse(l)) continue;
654 if (is_removed_[l]) continue;
655 if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
656 set_all_left_to_false = true;
657 continue;
658 }
659 at_most_one_buffer_[local_end++] = RepresentativeOf(l);
660 }
661 at_most_one_buffer_[local_start] =
662 Literal(LiteralIndex(local_end - local_start - 1));
663
664 // Deal with duplicates.
665 // Any duplicate in an "at most one" must be false.
666 bool some_duplicates = false;
667 if (!set_all_left_to_false) {
668 // Sort the copied amo.
669 // We only want to expose a const AtMostOne() so we sort directly here.
670 Literal* pt = &at_most_one_buffer_[local_start + 1];
671 std::sort(pt, pt + AtMostOne(local_start).size());
672
673 LiteralIndex previous = kNoLiteralIndex;
674 bool remove_previous = false;
675 int new_local_end = local_start + 1;
676 for (const Literal l : AtMostOne(local_start)) {
677 if (l.Index() == previous) {
678 if (assignment.LiteralIsTrue(l)) return false;
679 if (!assignment.LiteralIsFalse(l)) {
680 if (!FixLiteral(l.Negated())) return false;
681 }
682 remove_previous = true;
683 some_duplicates = true;
684 continue;
685 }
686
687 // We need to pay attention to triplet or more of equal elements, so
688 // it is why we need this boolean and can't just remove it right away.
689 if (remove_previous) {
690 --new_local_end;
691 remove_previous = false;
692 }
693 previous = l.Index();
694 at_most_one_buffer_[new_local_end++] = l;
695 }
696 if (remove_previous) --new_local_end;
697
698 // Update local end and the amo size.
699 local_end = new_local_end;
700 at_most_one_buffer_[local_start] =
701 Literal(LiteralIndex(local_end - local_start - 1));
702 }
703
704 // If there was some duplicates, we need to rescan to see if a literal
705 // didn't become true because its negation was appearing twice!
706 if (some_duplicates) {
707 int new_local_end = local_start + 1;
708 for (const Literal l : AtMostOne(local_start)) {
709 if (assignment.LiteralIsFalse(l)) continue;
710 if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
711 set_all_left_to_false = true;
712 continue;
713 }
714 at_most_one_buffer_[new_local_end++] = l;
715 }
716
717 // Update local end and the amo size.
718 local_end = new_local_end;
719 at_most_one_buffer_[local_start] =
720 Literal(LiteralIndex(local_end - local_start - 1));
721 }
722
723 // Deal with all false.
724 if (set_all_left_to_false) {
725 for (const Literal l : AtMostOne(local_start)) {
726 if (assignment.LiteralIsFalse(l)) continue;
727 if (assignment.LiteralIsTrue(l)) return false;
728 if (!FixLiteral(l.Negated())) return false;
729 }
730
731 // Erase this at_most_one.
732 local_end = local_start;
733 continue;
734 }
735
736 // Create a Span<> to simplify the code below.
737 const absl::Span<const Literal> at_most_one = AtMostOne(local_start);
738
739 // We expand small sizes into implications.
740 // TODO(user): Investigate what the best threshold is.
741 if (at_most_one.size() <= std::max(2, at_most_one_max_expansion_size_)) {
742 for (const Literal a : at_most_one) {
743 for (const Literal b : at_most_one) {
744 if (a == b) continue;
745 implications_[a].push_back(b.Negated());
746 implies_something_.Set(a);
747 NotifyPossibleDuplicate(a);
748 }
749 }
750 num_implications_ += at_most_one.size() * (at_most_one.size() - 1);
751
752 // This will erase the at_most_one from the buffer.
753 local_end = local_start;
754 continue;
755 }
756
757 // Index the new at most one.
758 for (const Literal l : at_most_one) {
759 if (l.Index() >= at_most_ones_.size()) {
760 at_most_ones_.resize(l.Index().value() + 1);
761 }
762 DCHECK(!is_redundant_[l]);
763 at_most_ones_[l].push_back(local_start);
764 implies_something_.Set(l);
765 }
766 }
767
768 at_most_one_buffer_.resize(local_end);
769 return true;
770}
771
772bool BinaryImplicationGraph::PropagateOnTrue(Literal true_literal,
773 Trail* trail) {
774 SCOPED_TIME_STAT(&stats_);
775
776 const auto assignment = AssignmentView(trail->Assignment());
777 DCHECK(assignment.LiteralIsTrue(true_literal));
778
779 // Note(user): This update is not exactly correct because in case of conflict
780 // we don't inspect that much clauses. But doing ++num_inspections_ inside the
781 // loop does slow down the code by a few percent.
782 num_inspections_ += implications_[true_literal].size();
783
784 for (const Literal literal : implications_[true_literal]) {
785 if (assignment.LiteralIsTrue(literal)) {
786 // Note(user): I tried to update the reason here if the literal was
787 // enqueued after the true_literal on the trail. This property is
788 // important for ComputeFirstUIPConflict() to work since it needs the
789 // trail order to be a topological order for the deduction graph.
790 // But the performance was not too good...
791 continue;
792 }
793
794 ++num_propagations_;
795 if (assignment.LiteralIsFalse(literal)) {
796 // Conflict.
797 *(trail->MutableConflict()) = {true_literal.Negated(), literal};
798 return false;
799 } else {
800 // Propagation.
801 reasons_[trail->Index()] = true_literal.Negated();
802 trail->FastEnqueue(literal);
803 }
804 }
805
806 // Propagate the at_most_one constraints.
807 if (true_literal.Index() < at_most_ones_.size()) {
808 for (const int start : at_most_ones_[true_literal]) {
809 bool seen = false;
810 for (const Literal literal : AtMostOne(start)) {
811 ++num_inspections_;
812 if (literal == true_literal) {
813 if (DEBUG_MODE) {
814 DCHECK(!seen);
815 seen = true;
816 }
817 continue;
818 }
819 if (assignment.LiteralIsFalse(literal)) continue;
820
821 ++num_propagations_;
822 if (assignment.LiteralIsTrue(literal)) {
823 // Conflict.
824 *(trail->MutableConflict()) = {true_literal.Negated(),
825 literal.Negated()};
826 return false;
827 } else {
828 // Propagation.
829 reasons_[trail->Index()] = true_literal.Negated();
830 trail->FastEnqueue(literal.Negated());
831 }
832 }
833 }
834 }
835
836 return true;
837}
838
840 if (IsEmpty()) {
842 return true;
843 }
846 const Literal literal = (*trail)[propagation_trail_index_++];
847 if (!PropagateOnTrue(literal, trail)) return false;
848 }
849 return true;
850}
851
852absl::Span<const Literal> BinaryImplicationGraph::Reason(
853 const Trail& /*trail*/, int trail_index, int64_t /*conflict_id*/) const {
854 return {&reasons_[trail_index], 1};
855}
856
857// Here, we remove all the literal whose negation are implied by the negation of
858// the 1-UIP literal (which always appear first in the given conflict). Note
859// that this algorithm is "optimal" in the sense that it leads to a minimized
860// conflict with a backjump level as low as possible. However, not all possible
861// literals are removed.
862//
863// TODO(user): Also consider at most one?
865 std::vector<Literal>* conflict) {
866 SCOPED_TIME_STAT(&stats_);
867 dfs_stack_.clear();
868
869 // Compute the reachability from the literal "not(conflict->front())" using
870 // an iterative dfs.
871 const LiteralIndex root_literal_index = conflict->front().NegatedIndex();
872 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
873 is_marked_.Set(root_literal_index);
874
875 // TODO(user): This sounds like a good idea, but somehow it seems better not
876 // to do that even though it is almost for free. Investigate more.
877 //
878 // The idea here is that since we already compute the reachability from the
879 // root literal, we can use this computation to remove any implication
880 // root_literal => b if there is already root_literal => a and b is reachable
881 // from a.
882 const bool also_prune_direct_implication_list = false;
883
884 // We treat the direct implications differently so we can also remove the
885 // redundant implications from this list at the same time.
886 auto& direct_implications = implications_[root_literal_index];
887 for (const Literal l : direct_implications) {
888 if (is_marked_[l]) continue;
889 dfs_stack_.push_back(l);
890 while (!dfs_stack_.empty()) {
891 const LiteralIndex index = dfs_stack_.back().Index();
892 dfs_stack_.pop_back();
893 if (!is_marked_[index]) {
894 is_marked_.Set(index);
895 for (const Literal implied : implications_[index]) {
896 if (!is_marked_[implied]) dfs_stack_.push_back(implied);
897 }
898 }
899 }
900
901 // The "trick" is to unmark 'l'. This way, if we explore it twice, it means
902 // that this l is reachable from some other 'l' from the direct implication
903 // list. Remarks:
904 // - We don't loose too much complexity when this happen since a literal
905 // can be unmarked only once, so in the worst case we loop twice over its
906 // children. Moreover, this literal will be pruned for later calls.
907 // - This is correct, i.e. we can't prune too many literals because of a
908 // strongly connected component. Proof by contradiction: If we take the
909 // first (in direct_implications) literal from a removed SCC, it must
910 // have marked all the others. But because they are marked, they will not
911 // be explored again and so can't mark the first literal.
912 if (also_prune_direct_implication_list) {
913 is_marked_.Clear(l);
914 }
915 }
916
917 // Now we can prune the direct implications list and make sure are the
918 // literals there are marked.
919 if (also_prune_direct_implication_list) {
920 int new_size = 0;
921 for (const Literal l : direct_implications) {
922 if (!is_marked_[l]) {
923 is_marked_.Set(l);
924 direct_implications[new_size] = l;
925 ++new_size;
926 }
927 }
928 if (new_size < direct_implications.size()) {
929 num_redundant_implications_ += direct_implications.size() - new_size;
930 direct_implications.resize(new_size);
931 }
932 }
933
934 RemoveRedundantLiterals(conflict);
935}
936
937// Same as MinimizeConflictWithReachability() but also mark (in the given
938// SparseBitset) the reachable literal already assigned to false. These literals
939// will be implied if the 1-UIP literal is assigned to false, and the classic
940// minimization algorithm can take advantage of that.
942 const Trail& trail, std::vector<Literal>* conflict,
944 SCOPED_TIME_STAT(&stats_);
945 DCHECK(!conflict->empty());
946 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
947 MarkDescendants(conflict->front().Negated());
948 for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
949 // TODO(user): if this is false, then we actually have a conflict of size 2.
950 // This can only happen if the binary clause was not propagated properly
951 // if for instance we do chronological bactracking without re-enqueing the
952 // consequence of a binary clause.
953 if (trail.Assignment().LiteralIsTrue(Literal(i))) {
954 marked->Set(Literal(i).Variable());
955 }
956 }
957 RemoveRedundantLiterals(conflict);
958}
959
960// Same as MinimizeConflictFirst() but take advantage of this reachability
961// computation to remove redundant implication in the implication list of the
962// first UIP conflict.
964 const Trail& /*trail*/, std::vector<Literal>* conflict,
965 absl::BitGenRef random) {
966 SCOPED_TIME_STAT(&stats_);
967 const LiteralIndex root_literal_index = conflict->front().NegatedIndex();
968 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
969 is_marked_.Set(root_literal_index);
970
971 int new_size = 0;
972 auto& direct_implications = implications_[root_literal_index];
973
974 // The randomization allow to find more redundant implication since to find
975 // a => b and remove b, a must be before b in direct_implications. Note that
976 // a std::reverse() could work too. But randomization seems to work better.
977 // Probably because it has other impact on the search tree.
978 std::shuffle(direct_implications.begin(), direct_implications.end(), random);
979 dfs_stack_.clear();
980 for (const Literal l : direct_implications) {
981 if (is_marked_[l]) {
982 // The literal is already marked! so it must be implied by one of the
983 // previous literal in the direct_implications list. We can safely remove
984 // it.
985 continue;
986 }
987 direct_implications[new_size++] = l;
988 dfs_stack_.push_back(l);
989 while (!dfs_stack_.empty()) {
990 const LiteralIndex index = dfs_stack_.back().Index();
991 dfs_stack_.pop_back();
992 if (!is_marked_[index]) {
993 is_marked_.Set(index);
994 for (const Literal implied : implications_[index]) {
995 if (!is_marked_[implied]) dfs_stack_.push_back(implied);
996 }
997 }
998 }
999 }
1000 if (new_size < direct_implications.size()) {
1001 num_redundant_implications_ += direct_implications.size() - new_size;
1002 direct_implications.resize(new_size);
1003 }
1004 RemoveRedundantLiterals(conflict);
1005}
1006
1007void BinaryImplicationGraph::RemoveRedundantLiterals(
1008 std::vector<Literal>* conflict) {
1009 SCOPED_TIME_STAT(&stats_);
1010 int new_index = 1;
1011 for (int i = 1; i < conflict->size(); ++i) {
1012 if (!is_marked_[(*conflict)[i].NegatedIndex()]) {
1013 (*conflict)[new_index] = (*conflict)[i];
1014 ++new_index;
1015 }
1016 }
1017 if (new_index < conflict->size()) {
1018 ++num_minimization_;
1019 num_literals_removed_ += conflict->size() - new_index;
1020 conflict->resize(new_index);
1021 }
1022}
1023
1024// TODO(user): Also consider at most one?
1026 const Trail& trail, std::vector<Literal>* conflict) {
1027 SCOPED_TIME_STAT(&stats_);
1028 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
1029 is_simplified_.ClearAndResize(LiteralIndex(implications_.size()));
1030 for (const Literal lit : *conflict) {
1031 is_marked_.Set(lit);
1032 }
1033
1034 // Identify and remove the redundant literals from the given conflict.
1035 // 1/ If a -> b then a can be removed from the conflict clause.
1036 // This is because not b -> not a.
1037 // 2/ a -> b can only happen if level(a) <= level(b).
1038 // 3/ Because of 2/, cycles can appear only at the same level.
1039 // The vector is_simplified_ is used to avoid removing all elements of a
1040 // cycle. Note that this is not optimal in the sense that we may not remove
1041 // a literal that can be removed.
1042 //
1043 // Note that there is no need to explore the unique literal of the highest
1044 // decision level since it can't be removed. Because this is a conflict, such
1045 // literal is always at position 0, so we start directly at 1.
1046 int index = 1;
1047 for (int i = 1; i < conflict->size(); ++i) {
1048 const Literal lit = (*conflict)[i];
1049 const int lit_level = trail.Info(lit.Variable()).level;
1050 bool keep_literal = true;
1051 for (const Literal implied : implications_[lit]) {
1052 if (is_marked_[implied]) {
1053 DCHECK_LE(lit_level, trail.Info(implied.Variable()).level);
1054 if (lit_level == trail.Info(implied.Variable()).level &&
1055 is_simplified_[implied]) {
1056 continue;
1057 }
1058 keep_literal = false;
1059 break;
1060 }
1061 }
1062 if (keep_literal) {
1063 (*conflict)[index] = lit;
1064 ++index;
1065 } else {
1066 is_simplified_.Set(lit);
1067 }
1068 }
1069 if (index < conflict->size()) {
1070 ++num_minimization_;
1071 num_literals_removed_ += conflict->size() - index;
1072 conflict->erase(conflict->begin() + index, conflict->end());
1073 }
1074}
1075
1077 SCOPED_TIME_STAT(&stats_);
1078 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
1079 if (IsEmpty()) return;
1080
1081 // Nothing to do if nothing changed since last call.
1082 const int new_num_fixed = trail_->Index();
1083 DCHECK_EQ(propagation_trail_index_, new_num_fixed);
1084 if (num_processed_fixed_variables_ == new_num_fixed) return;
1085
1086 const VariablesAssignment& assignment = trail_->Assignment();
1087 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
1088 for (; num_processed_fixed_variables_ < new_num_fixed;
1089 ++num_processed_fixed_variables_) {
1090 const Literal true_literal = (*trail_)[num_processed_fixed_variables_];
1091 if (DEBUG_MODE) {
1092 // The code assumes that everything is already propagated.
1093 // Otherwise we will remove implications that didn't propagate yet!
1094 for (const Literal lit : implications_[true_literal]) {
1095 DCHECK(trail_->Assignment().LiteralIsTrue(lit));
1096 }
1097 }
1098
1099 // If b is true and a -> b then because not b -> not a, all the
1100 // implications list that contains b will be marked by this process.
1101 // And the ones that contains not(b) should correspond to a false literal!
1102 //
1103 // TODO(user): This might not be true if we remove implication by
1104 // transitive reduction and the process was aborted due to the computation
1105 // limit. I think it will be good to maintain that invariant though,
1106 // otherwise fixed literals might never be removed from these lists...
1107 for (const Literal lit : implications_[true_literal.NegatedIndex()]) {
1108 if (lit.NegatedIndex() < representative_of_.size() &&
1109 representative_of_[lit.Negated()] != kNoLiteralIndex) {
1110 // We mark its representative instead.
1111 is_marked_.Set(representative_of_[lit.Negated()]);
1112 } else {
1113 is_marked_.Set(lit.NegatedIndex());
1114 }
1115 }
1116 gtl::STLClearObject(&(implications_[true_literal]));
1117 gtl::STLClearObject(&(implications_[true_literal.NegatedIndex()]));
1118
1119 if (true_literal.Index() < at_most_ones_.size()) {
1120 gtl::STLClearObject(&(at_most_ones_[true_literal]));
1121 }
1122 if (true_literal.NegatedIndex() < at_most_ones_.size()) {
1123 gtl::STLClearObject(&(at_most_ones_[true_literal.NegatedIndex()]));
1124 }
1125 }
1126 for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
1127 RemoveIf(&implications_[i], [&assignment](const Literal& lit) {
1128 return assignment.LiteralIsTrue(lit);
1129 });
1130 }
1131
1132 // TODO(user): This might be a bit slow. Do not call all the time if needed,
1133 // this shouldn't change the correctness of the code.
1134 at_most_ones_.clear();
1135 CleanUpAndAddAtMostOnes(/*base_index=*/0);
1136 DCHECK(InvariantsAreOk());
1137}
1138
1140 public:
1148
1150 AtMostOnes* at_most_ones,
1151 std::vector<Literal>* at_most_one_buffer)
1152 : finder_(*finder),
1153 implications_(*graph),
1154 at_most_ones_(*at_most_ones),
1155 at_most_one_buffer_(*at_most_one_buffer) {}
1156
1157 const std::vector<int32_t>& operator[](int32_t node) const {
1158 tmp_.clear();
1159 for (const Literal l : implications_[LiteralIndex(node)]) {
1160 tmp_.push_back(l.Index().value());
1161 if (finder_.NodeIsInCurrentDfsPath(l.NegatedIndex().value())) {
1162 to_fix_.push_back(l);
1163 }
1164 }
1165 if (node < at_most_ones_.size()) {
1166 for (const int start : at_most_ones_[LiteralIndex(node)]) {
1167 if (start >= at_most_one_already_explored_.size()) {
1168 at_most_one_already_explored_.resize(start + 1, false);
1169 previous_node_to_explore_at_most_one_.resize(start + 1);
1170 }
1171
1172 // In the presence of at_most_ones_ constraints, expanding them
1173 // implicitly to implications in the SCC computation can result in a
1174 // quadratic complexity rather than a linear one in term of the input
1175 // data structure size. So this test here is critical on problem with
1176 // large at_most ones like the "ivu06-big.mps.gz" where without it, the
1177 // full FindStronglyConnectedComponents() take more than on hour instead
1178 // of less than a second!
1179 if (at_most_one_already_explored_[start]) {
1180 // We never expand a node twice.
1181 const int first_node = previous_node_to_explore_at_most_one_[start];
1182 DCHECK_NE(node, first_node);
1183
1184 if (finder_.NodeIsInCurrentDfsPath(first_node)) {
1185 // If the first node is not settled, then we do explore the
1186 // at_most_one constraint again. In "Mixed-Integer-Programming:
1187 // Analyzing 12 years of progress", Tobias Achterberg and Roland
1188 // Wunderling explains that an at most one need to be looped over at
1189 // most twice. I am not sure exactly how that works, so for now we
1190 // are not fully linear, but on actual instances, we only rarely
1191 // run into this case.
1192 //
1193 // Note that we change the previous node to explore at most one
1194 // since the current node will be settled before the old ones.
1195 //
1196 // TODO(user): avoid looping more than twice on the same at most one
1197 // constraints? Note that the second time we loop we have x => y =>
1198 // not(x), so we can already detect that x must be false which we
1199 // detect below.
1200 previous_node_to_explore_at_most_one_[start] = node;
1201 } else {
1202 // The first node is already settled and so are all its child. Only
1203 // not(first_node) might still need exploring.
1204 tmp_.push_back(
1205 Literal(LiteralIndex(first_node)).NegatedIndex().value());
1206 continue;
1207 }
1208 } else {
1209 at_most_one_already_explored_[start] = true;
1210 previous_node_to_explore_at_most_one_[start] = node;
1211 }
1212
1213 const absl::Span<const Literal> amo =
1214 absl::MakeSpan(&at_most_one_buffer_[start + 1],
1215 at_most_one_buffer_[start].Index().value());
1216 for (const Literal l : amo) {
1217 if (l.Index() == node) continue;
1218 tmp_.push_back(l.NegatedIndex().value());
1219 if (finder_.NodeIsInCurrentDfsPath(l.Index().value())) {
1220 to_fix_.push_back(l.Negated());
1221 }
1222 }
1223 }
1224 }
1225 work_done_ += tmp_.size();
1226 return tmp_;
1227 }
1228
1229 // All these literals where detected to be true during the SCC computation.
1230 mutable std::vector<Literal> to_fix_;
1231
1232 // For the deterministic time.
1233 mutable int64_t work_done_ = 0;
1234
1235 private:
1236 const SccFinder& finder_;
1237 const Implications& implications_;
1238 const AtMostOnes& at_most_ones_;
1239 const std::vector<Literal>& at_most_one_buffer_;
1240
1241 mutable std::vector<int32_t> tmp_;
1242
1243 // Used to get a non-quadratic complexity in the presence of at most ones.
1244 mutable std::vector<bool> at_most_one_already_explored_;
1245 mutable std::vector<int> previous_node_to_explore_at_most_one_;
1246};
1247
1249 // This was already called, and no new constraint where added. Note that new
1250 // fixed variable cannot create new equivalence, only new binary clauses do.
1251 if (is_dag_) return true;
1252 WallTimer wall_timer;
1253 wall_timer.Start();
1254 log_info |= VLOG_IS_ON(1);
1255
1256 // Lets remove all fixed variables first.
1257 if (!Propagate(trail_)) return false;
1259 const VariablesAssignment& assignment = trail_->Assignment();
1260 DCHECK(InvariantsAreOk());
1261
1262 // TODO(user): We could just do it directly though.
1263 const int32_t size(implications_.size());
1265 scc.reserve(size);
1266 double dtime = 0.0;
1267 {
1268 SccGraph::SccFinder finder;
1269 SccGraph graph(&finder, &implications_, &at_most_ones_,
1270 &at_most_one_buffer_);
1272 dtime += 4e-8 * graph.work_done_;
1273
1274 for (const Literal l : graph.to_fix_) {
1275 if (assignment.LiteralIsFalse(l)) return false;
1276 if (assignment.LiteralIsTrue(l)) continue;
1277 if (!FixLiteral(l)) return false;
1278 }
1279 }
1280
1281 // The old values will still be valid.
1282 representative_of_.resize(size, kNoLiteralIndex);
1283 is_redundant_.resize(size);
1284
1285 int num_equivalences = 0;
1286 reverse_topological_order_.clear();
1287 for (int index = 0; index < scc.size(); ++index) {
1288 const absl::Span<int32_t> component = scc[index];
1289
1290 // If one is fixed then all must be fixed. Note that the reason why the
1291 // propagation didn't already do that and we don't always get fixed
1292 // component of size 1 is because of the potential newly fixed literals
1293 // above.
1294 //
1295 // In any case, all fixed literals are marked as redundant.
1296 {
1297 bool all_fixed = false;
1298 bool all_true = false;
1299 for (const int32_t i : component) {
1300 const Literal l = Literal(LiteralIndex(i));
1301 if (trail_->Assignment().LiteralIsAssigned(l)) {
1302 all_fixed = true;
1303 all_true = trail_->Assignment().LiteralIsTrue(l);
1304 break;
1305 }
1306 }
1307 if (all_fixed) {
1308 for (const int32_t i : component) {
1309 const Literal l = Literal(LiteralIndex(i));
1310 if (!is_redundant_[l]) {
1311 ++num_redundant_literals_;
1312 is_redundant_.Set(l);
1313 representative_of_[l] = l.Index();
1314 }
1315 const Literal to_fix = all_true ? l : l.Negated();
1316 if (assignment.LiteralIsFalse(to_fix)) return false;
1317 if (assignment.LiteralIsTrue(to_fix)) continue;
1318 if (!FixLiteral(l)) return false;
1319 }
1320
1321 // Next component.
1322 continue;
1323 }
1324 }
1325
1326 // We ignore variable that appear in no constraints.
1327 if (component.size() == 1 && is_removed_[LiteralIndex(component[0])]) {
1328 continue;
1329 }
1330
1331 // We always take the smallest literal index (which also corresponds to the
1332 // smallest BooleanVariable index) as a representative. This make sure that
1333 // the representative of a literal l and the one of not(l) will be the
1334 // negation of each other. There is also reason to think that it is
1335 // heuristically better to use a BooleanVariable that was created first.
1336 std::sort(component.begin(), component.end());
1337 const LiteralIndex representative(component[0]);
1338 reverse_topological_order_.push_back(representative);
1339
1340 if (component.size() == 1) {
1341 // Note that because we process list in reverse topological order, this
1342 // is only needed if there is any equivalence before this point.
1343 if (num_equivalences > 0) {
1344 auto& representative_list = implications_[representative];
1345 for (Literal& ref : representative_list) {
1346 const LiteralIndex rep = representative_of_[ref];
1347 if (rep == representative) continue;
1348 if (rep == kNoLiteralIndex) continue;
1349 ref = Literal(rep);
1350 }
1351 gtl::STLSortAndRemoveDuplicates(&representative_list);
1352 }
1353 continue;
1354 }
1355
1356 // Sets the representative.
1357 for (int i = 1; i < component.size(); ++i) {
1358 const Literal literal = Literal(LiteralIndex(component[i]));
1359 if (!is_redundant_[literal]) {
1360 ++num_redundant_literals_;
1361 is_redundant_.Set(literal);
1362 }
1363 representative_of_[literal] = representative;
1364
1365 // Detect if x <=> not(x) which means unsat. Note that we relly on the
1366 // fact that when sorted, they will both be consecutive in the list.
1367 if (Literal(LiteralIndex(component[i - 1])).Negated() == literal) {
1368 LOG_IF(INFO, log_info) << "Trivially UNSAT in DetectEquivalences()";
1369 return false;
1370 }
1371 }
1372
1373 // Merge all the lists in implications_[representative].
1374 // Note that we do not want representative in its own list.
1375 auto& representative_list = implications_[representative];
1376 int new_size = 0;
1377 for (const Literal l : representative_list) {
1378 const Literal rep = RepresentativeOf(l);
1379 if (rep.Index() == representative) continue;
1380 representative_list[new_size++] = rep;
1381 }
1382 representative_list.resize(new_size);
1383 for (int i = 1; i < component.size(); ++i) {
1384 const Literal literal = Literal(LiteralIndex(component[i]));
1385 auto& ref = implications_[literal];
1386 for (const Literal l : ref) {
1387 const Literal rep = RepresentativeOf(l);
1388 if (rep.Index() != representative) representative_list.push_back(rep);
1389 }
1390
1391 // Add representative <=> literal.
1392 //
1393 // Remark: this relation do not need to be added to a DRAT proof since
1394 // the redundant variables should never be used again for a pure SAT
1395 // problem.
1396 representative_list.push_back(literal);
1397 ref.clear();
1398 ref.push_back(Literal(representative));
1399 }
1400 gtl::STLSortAndRemoveDuplicates(&representative_list);
1401 num_equivalences += component.size() - 1;
1402 }
1403
1404 is_dag_ = true;
1405 if (num_equivalences != 0) {
1406 // Remap all at most ones. Remove fixed variables, process duplicates. Note
1407 // that this might result in more implications when we expand small at most
1408 // one.
1409 at_most_ones_.clear();
1410 CleanUpAndAddAtMostOnes(/*base_index=*/0);
1411
1412 num_implications_ = 0;
1413 for (LiteralIndex i(0); i < size; ++i) {
1414 num_implications_ += implications_[i].size();
1415 }
1416 dtime += 2e-8 * num_implications_;
1417 }
1418
1419 time_limit_->AdvanceDeterministicTime(dtime);
1420 const int num_fixed_during_scc =
1421 trail_->Index() - num_processed_fixed_variables_;
1423 DCHECK(InvariantsAreOk());
1424 LOG_IF(INFO, log_info) << "SCC. " << num_equivalences
1425 << " redundant equivalent literals. "
1426 << num_fixed_during_scc << " fixed. "
1427 << num_implications_ << " implications left. "
1428 << implications_.size() << " literals."
1429 << " size of at_most_one buffer = "
1430 << at_most_one_buffer_.size() << "."
1431 << " dtime: " << dtime
1432 << " wtime: " << wall_timer.Get();
1433 return true;
1434}
1435
1436// Note that as a side effect this also do a full "failed literal probing"
1437// using the binary implication graph only.
1438//
1439// TODO(user): Track which literal have new implications, and only process
1440// the antecedants of these.
1442 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
1443 if (!DetectEquivalences()) return false;
1444
1445 // TODO(user): the situation with fixed variable is not really "clean".
1446 // Simplify the code so we are sure we don't run into issue or have to deal
1447 // with any of that here.
1448 if (!Propagate(trail_)) return false;
1450 DCHECK(InvariantsAreOk());
1451
1452 log_info |= VLOG_IS_ON(1);
1453 WallTimer wall_timer;
1454 wall_timer.Start();
1455
1456 int64_t num_fixed = 0;
1457 int64_t num_new_redundant_implications = 0;
1458 bool aborted = false;
1459 tmp_removed_.clear();
1460 work_done_in_mark_descendants_ = 0;
1461 int marked_index = 0;
1462
1463 // For each node we do a graph traversal and only keep the literals
1464 // at maximum distance 1. This only works because we have a DAG when ignoring
1465 // the "redundant" literal marked by DetectEquivalences(). Note that we also
1466 // need no duplicates in the implications list for correctness which is also
1467 // guaranteed by DetectEquivalences().
1468 //
1469 // TODO(user): We should be able to reuse some propagation like it is done for
1470 // tree-look. Once a node is processed, we just need to process a node that
1471 // implies it. Test if we can make this faster. Alternatively, only clear
1472 // a part of is_marked_ (after the first child of root in reverse topo order).
1473 //
1474 // TODO(user): Can we exploit the fact that the implication graph is a
1475 // skew-symmetric graph (isomorphic to its transposed) so that we do less
1476 // work?
1477 const LiteralIndex size(implications_.size());
1478 LiteralIndex previous = kNoLiteralIndex;
1479 for (const LiteralIndex root : reverse_topological_order_) {
1480 // In most situation reverse_topological_order_ contains no redundant, fixed
1481 // or removed variables. But the reverse_topological_order_ is only
1482 // recomputed when new binary are added to the graph, not when new variable
1483 // are fixed.
1484 if (is_redundant_[root]) continue;
1485 if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
1486
1487 auto& direct_implications = implications_[root];
1488 if (direct_implications.empty()) continue;
1489
1490 // This is a "poor" version of the tree look stuff, but it does show good
1491 // improvement. If we just processed one of the child of root, we don't
1492 // need to re-explore it.
1493 //
1494 // TODO(user): Another optim we can do is that we never need to expand
1495 // any node with a reverse topo order smaller or equal to the min of the
1496 // ones in this list.
1497 bool clear_previous_reachability = true;
1498 for (const Literal direct_child : direct_implications) {
1499 if (direct_child.Index() == previous) {
1500 clear_previous_reachability = false;
1501 is_marked_.Clear(previous);
1502 break;
1503 }
1504 }
1505 if (clear_previous_reachability) {
1506 is_marked_.ClearAndResize(size);
1507 marked_index = 0;
1508 }
1509 previous = root;
1510
1511 for (const Literal direct_child : direct_implications) {
1512 if (is_redundant_[direct_child]) continue;
1513 if (is_marked_[direct_child]) continue;
1514
1515 // This is a corner case where because of equivalent literal, root
1516 // appear in implications_[root], we will remove it below.
1517 if (direct_child.Index() == root) continue;
1518
1519 // When this happens, then root must be false, we handle this just after
1520 // the loop.
1521 if (direct_child.NegatedIndex() == root) {
1522 is_marked_.Set(direct_child);
1523 break;
1524 }
1525
1526 MarkDescendants(direct_child);
1527
1528 // We have a DAG, so direct_child could only be marked first.
1529 is_marked_.Clear(direct_child);
1530 }
1531 DCHECK(!is_marked_[root])
1532 << "DetectEquivalences() should have removed cycles!";
1533 is_marked_.Set(root);
1534
1535 // Also mark all the ones reachable through the root AMOs.
1536 if (root < at_most_ones_.size()) {
1537 for (const int start : at_most_ones_[root]) {
1538 for (const Literal l : AtMostOne(start)) {
1539 if (l.Index() == root) continue;
1540 if (!is_marked_[l.Negated()] && !is_redundant_[l.Negated()]) {
1541 is_marked_.SetUnsafe(l.Negated());
1542 MarkDescendants(l.Negated());
1543 }
1544 }
1545 }
1546 }
1547
1548 // Failed literal probing. If both x and not(x) are marked then root must be
1549 // false. Note that because we process "roots" in reverse topological order,
1550 // we will fix the LCA of x and not(x) first.
1551 const auto& marked_positions = is_marked_.PositionsSetAtLeastOnce();
1552 for (; marked_index < marked_positions.size(); ++marked_index) {
1553 const LiteralIndex i = marked_positions[marked_index];
1554 if (is_marked_[Literal(i).NegatedIndex()]) {
1555 // We tested that at the beginning.
1556 DCHECK(!trail_->Assignment().LiteralIsAssigned(Literal(root)));
1557
1558 // We propagate right away the binary implications so that we do not
1559 // need to consider all antecedents of root in the transitive
1560 // reduction.
1561 ++num_fixed;
1562 if (!FixLiteral(Literal(root).Negated())) return false;
1563 break;
1564 }
1565 }
1566
1567 // Note that direct_implications will be cleared by
1568 // RemoveFixedVariables() that will need to inspect it to completely
1569 // remove Literal(root) from all lists.
1570 if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
1571
1572 // Only keep the non-marked literal (and the redundant one which are never
1573 // marked). We mark root to remove it in the corner case where it was
1574 // there.
1575 int new_size = 0;
1576 for (const Literal l : direct_implications) {
1577 if (!is_marked_[l]) {
1578 direct_implications[new_size++] = l;
1579 } else {
1580 tmp_removed_.push_back({Literal(root), l});
1581 DCHECK(!is_redundant_[l]);
1582 }
1583 }
1584 const int diff = direct_implications.size() - new_size;
1585 direct_implications.resize(new_size);
1586 direct_implications.shrink_to_fit();
1587 num_new_redundant_implications += diff;
1588 num_implications_ -= diff;
1589
1590 // Abort if the computation involved is too big.
1591 if (work_done_in_mark_descendants_ > 1e8) {
1592 aborted = true;
1593 break;
1594 }
1595 }
1596
1597 is_marked_.ClearAndResize(size);
1598
1599 // If we aborted early, we might no longer have both a=>b and not(b)=>not(a).
1600 // This is not desirable has some algo relies on this invariant. We fix this
1601 // here.
1602 if (aborted) {
1603 absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> removed;
1604 for (const auto [a, b] : tmp_removed_) {
1605 removed.insert({a.Index(), b.Index()});
1606 }
1607 for (LiteralIndex i(0); i < implications_.size(); ++i) {
1608 int new_size = 0;
1609 const LiteralIndex negated_i = Literal(i).NegatedIndex();
1610 auto& implication = implications_[i];
1611 for (const Literal l : implication) {
1612 if (removed.contains({l.NegatedIndex(), negated_i})) continue;
1613 implication[new_size++] = l;
1614 }
1615 implication.resize(new_size);
1616 }
1617 }
1618 if (num_fixed > 0) {
1620 }
1621 DCHECK(InvariantsAreOk());
1622
1623 gtl::STLClearObject(&tmp_removed_);
1624 const double dtime = 1e-8 * work_done_in_mark_descendants_;
1625 time_limit_->AdvanceDeterministicTime(dtime);
1626 num_redundant_implications_ += num_new_redundant_implications;
1627 LOG_IF(INFO, log_info) << "Transitive reduction removed "
1628 << num_new_redundant_implications << " literals. "
1629 << num_fixed << " fixed. " << num_implications_
1630 << " implications left. " << implications_.size()
1631 << " literals." << " dtime: " << dtime
1632 << " wtime: " << wall_timer.Get()
1633 << (aborted ? " Aborted." : "");
1634 return true;
1635}
1636
1637namespace {
1638
1639int ElementInIntersectionOrMinusOne(absl::Span<const int> a,
1640 absl::Span<const int> b) {
1641 DCHECK(std::is_sorted(a.begin(), a.end()));
1642 DCHECK(std::is_sorted(b.begin(), b.end()));
1643 if (a.empty() || b.empty()) return -1;
1644 int i = 0;
1645 int j = 0;
1646 while (true) {
1647 if (a[i] == b[j]) return a[i];
1648 if (a[i] < b[j]) {
1649 if (++i == a.size()) return -1;
1650 } else {
1651 if (++j == b.size()) return -1;
1652 }
1653 }
1654}
1655
1656} // namespace
1657
1659 std::vector<std::vector<Literal>>* at_most_ones,
1660 int64_t max_num_explored_nodes) {
1661 // The code below assumes a DAG.
1662 if (!DetectEquivalences()) return false;
1663 work_done_in_mark_descendants_ = 0;
1664
1665 int num_extended = 0;
1666 int num_removed = 0;
1667 int num_added = 0;
1668
1669 // Data to detect inclusion of base amo into extend amo.
1670 std::vector<int> detector_clique_index;
1672 InclusionDetector detector(storage);
1673 detector.SetWorkLimit(1e9);
1674
1675 std::vector<int> dense_index_to_index;
1677 max_cliques_containing(implications_.size());
1678
1679 // We starts by processing larger constraints first.
1680 // But we want the output order to be stable.
1681 std::vector<std::pair<int, int>> index_size_vector;
1682 index_size_vector.reserve(at_most_ones->size());
1683 for (int index = 0; index < at_most_ones->size(); ++index) {
1684 std::vector<Literal>& clique = (*at_most_ones)[index];
1685 if (clique.size() <= 1) continue;
1686
1687 // Note(user): Because we always use literal with the smallest variable
1688 // indices as representative, this make sure that if possible, we express
1689 // the clique in term of user provided variable (that are always created
1690 // first).
1691 //
1692 // Remap the clique to only use representative.
1693 for (Literal& ref : clique) {
1694 DCHECK_LT(ref.Index(), representative_of_.size());
1695 const LiteralIndex rep = representative_of_[ref];
1696 if (rep == kNoLiteralIndex) continue;
1697 ref = Literal(rep);
1698 }
1699
1700 // We skip anything that can be presolved further as the code below do
1701 // not handle duplicate well.
1702 //
1703 // TODO(user): Shall we presolve it here?
1704 bool skip = false;
1705 std::sort(clique.begin(), clique.end());
1706 for (int i = 1; i < clique.size(); ++i) {
1707 if (clique[i] == clique[i - 1] || clique[i] == clique[i - i].Negated()) {
1708 skip = true;
1709 break;
1710 }
1711 }
1712 if (skip) continue;
1713
1714 index_size_vector.push_back({index, clique.size()});
1715 }
1716 std::stable_sort(
1717 index_size_vector.begin(), index_size_vector.end(),
1718 [](const std::pair<int, int> a, const std::pair<int, int>& b) {
1719 return a.second > b.second;
1720 });
1721
1722 absl::flat_hash_set<int> cannot_be_removed;
1723 std::vector<bool> was_extended(at_most_ones->size(), false);
1724 for (const auto& [index, old_size] : index_size_vector) {
1725 std::vector<Literal>& clique = (*at_most_ones)[index];
1726 if (time_limit_->LimitReached()) break;
1727
1728 // Special case for clique of size 2, we don't expand them if they
1729 // are included in an already added clique.
1730 if (clique.size() == 2) {
1731 DCHECK_NE(clique[0], clique[1]);
1732 const int dense_index = ElementInIntersectionOrMinusOne(
1733 max_cliques_containing[clique[0]], max_cliques_containing[clique[1]]);
1734 if (dense_index >= 0) {
1735 const int superset_index = dense_index_to_index[dense_index];
1736 if (was_extended[superset_index])
1737 cannot_be_removed.insert(superset_index);
1738 ++num_removed;
1739 clique.clear();
1740 continue;
1741 }
1742 }
1743
1744 // Save the non-extended version as possible subset.
1745 // TODO(user): Detect on the fly is superset already exist.
1746 detector_clique_index.push_back(index);
1747 detector.AddPotentialSubset(storage.AddLiterals(clique));
1748
1749 // We only expand the clique as long as we didn't spend too much time.
1750 if (work_done_in_mark_descendants_ < max_num_explored_nodes) {
1751 clique = ExpandAtMostOne(clique, max_num_explored_nodes);
1752 }
1753
1754 // Save the extended version as possible superset.
1755 detector_clique_index.push_back(index);
1756 detector.AddPotentialSuperset(storage.AddLiterals(clique));
1757
1758 // Also index clique for size 2 quick lookup.
1759 const int dense_index = dense_index_to_index.size();
1760 dense_index_to_index.push_back(index);
1761 for (const Literal l : clique) {
1762 max_cliques_containing[l].push_back(dense_index);
1763 }
1764
1765 if (clique.size() > old_size) {
1766 was_extended[index] = true;
1767 ++num_extended;
1768 }
1769 ++num_added;
1770 }
1771
1772 // Remove clique (before extension) that are included in an extended one.
1773 detector.DetectInclusions([&](int subset, int superset) {
1774 const int subset_index = detector_clique_index[subset];
1775 const int superset_index = detector_clique_index[superset];
1776 if (subset_index == superset_index) return;
1777
1778 // Abort if one was already deleted.
1779 if ((*at_most_ones)[subset_index].empty()) return;
1780 if ((*at_most_ones)[superset_index].empty()) return;
1781
1782 // If an extended clique already cover a deleted one, we cannot try to
1783 // remove it by looking at its non-extended version.
1784 if (cannot_be_removed.contains(subset_index)) return;
1785
1786 ++num_removed;
1787 (*at_most_ones)[subset_index].clear();
1788 if (was_extended[superset_index]) cannot_be_removed.insert(superset_index);
1789 });
1790
1791 if (num_extended > 0 || num_removed > 0 || num_added > 0) {
1792 VLOG(1) << "Clique Extended: " << num_extended
1793 << " Removed: " << num_removed << " Added: " << num_added
1794 << (work_done_in_mark_descendants_ > max_num_explored_nodes
1795 ? " (Aborted)"
1796 : "");
1797 }
1798 return true;
1799}
1800
1801template <bool use_weight>
1803 const absl::Span<const Literal> at_most_one,
1804 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
1805 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values) {
1806 std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
1807 std::vector<LiteralIndex> intersection;
1808 double clique_weight = 0.0;
1809 const int64_t old_work = work_done_in_mark_descendants_;
1810 if (use_weight) {
1811 for (const Literal l : clique) {
1812 clique_weight += expanded_lp_values[l];
1813 }
1814 }
1815 for (int i = 0; i < clique.size(); ++i) {
1816 // Do not spend too much time here.
1817 if (work_done_in_mark_descendants_ - old_work > 1e8) break;
1818
1819 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
1820 MarkDescendants(clique[i]);
1821 if (i == 0) {
1822 for (const LiteralIndex index : is_marked_.PositionsSetAtLeastOnce()) {
1823 if (can_be_included[Literal(index).NegatedIndex()]) {
1824 intersection.push_back(index);
1825 }
1826 }
1827 for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
1828 }
1829
1830 int new_size = 0;
1831 double intersection_weight = 0.0;
1832 is_marked_.Clear(clique[i]);
1833 is_marked_.Clear(clique[i].NegatedIndex());
1834 for (const LiteralIndex index : intersection) {
1835 if (!is_marked_[index]) continue;
1836 intersection[new_size++] = index;
1837 if (use_weight) {
1838 intersection_weight += expanded_lp_values[index];
1839 }
1840 }
1841 intersection.resize(new_size);
1842 if (intersection.empty()) break;
1843
1844 // We can't generate a violated cut this way. This is because intersection
1845 // contains all the possible ways to extend the current clique.
1846 if (use_weight && clique_weight + intersection_weight <= 1.0) {
1847 clique.clear();
1848 return clique;
1849 }
1850
1851 // Expand? The negation of any literal in the intersection is a valid way
1852 // to extend the clique.
1853 if (i + 1 == clique.size()) {
1854 // Heuristic: use literal with largest lp value. We randomize slightly.
1855 int index = -1;
1856 double max_lp = 0.0;
1857 for (int j = 0; j < intersection.size(); ++j) {
1858 // If we don't use weight, we prefer variable that comes first.
1859 const double lp =
1860 use_weight ? 1.0 - expanded_lp_values[intersection[j]] +
1861 absl::Uniform<double>(*random_, 0.0, 1e-4)
1862 : can_be_included.size() - intersection[j].value();
1863 if (index == -1 || lp > max_lp) {
1864 index = j;
1865 max_lp = lp;
1866 }
1867 }
1868 if (index != -1) {
1869 clique.push_back(Literal(intersection[index]).Negated());
1870 std::swap(intersection.back(), intersection[index]);
1871 intersection.pop_back();
1872 if (use_weight) {
1873 clique_weight += expanded_lp_values[clique.back()];
1874 }
1875 }
1876 }
1877 }
1878 return clique;
1879}
1880
1881// Make sure both version are compiled.
1882template std::vector<Literal>
1884 const absl::Span<const Literal> at_most_one,
1885 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
1886 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
1887template std::vector<Literal>
1889 const absl::Span<const Literal> at_most_one,
1890 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
1891 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
1892
1893const std::vector<std::vector<Literal>>&
1895 const std::vector<Literal>& literals,
1896 const std::vector<double>& lp_values) {
1897 // We only want to generate a cut with literals from the LP, not extra ones.
1898 const int num_literals = implications_.size();
1899 util_intops::StrongVector<LiteralIndex, bool> can_be_included(num_literals,
1900 false);
1902 num_literals, 0.0);
1903 const int size = literals.size();
1904 for (int i = 0; i < size; ++i) {
1905 const Literal l = literals[i];
1906 can_be_included[l] = true;
1907 can_be_included[l.NegatedIndex()] = true;
1908
1909 const double value = lp_values[i];
1910 expanded_lp_values[l] = value;
1911 expanded_lp_values[l.NegatedIndex()] = 1.0 - value;
1912 }
1913
1914 // We want highest sum first.
1915 struct Candidate {
1916 Literal a;
1917 Literal b;
1918 double sum;
1919 bool operator<(const Candidate& other) const { return sum > other.sum; }
1920 };
1921 std::vector<Candidate> candidates;
1922
1923 // First heuristic. Currently we only consider violated at most one of size 2,
1924 // and extend them. Right now, the code is a bit slow to try too many at every
1925 // LP node so it is why we are defensive like this. Note also that because we
1926 // currently still statically add the initial implications, this will only add
1927 // cut based on newly learned binary clause. Or the one that were not added
1928 // to the relaxation in the first place.
1929 for (int i = 0; i < size; ++i) {
1930 Literal current_literal = literals[i];
1931 double current_value = lp_values[i];
1932 if (trail_->Assignment().LiteralIsAssigned(current_literal)) continue;
1933 if (is_redundant_[current_literal]) continue;
1934
1935 if (current_value < 0.5) {
1936 current_literal = current_literal.Negated();
1937 current_value = 1.0 - current_value;
1938 }
1939
1940 // We consider only one candidate for each current_literal.
1941 LiteralIndex best = kNoLiteralIndex;
1942 double best_value = 0.0;
1943 for (const Literal l : implications_[current_literal]) {
1944 if (!can_be_included[l]) continue;
1945 const double activity =
1946 current_value + expanded_lp_values[l.NegatedIndex()];
1947 if (activity <= 1.01) continue;
1948 const double v = activity + absl::Uniform<double>(*random_, 0.0, 1e-4);
1949 if (best == kNoLiteralIndex || v > best_value) {
1950 best_value = v;
1951 best = l.NegatedIndex();
1952 }
1953 }
1954 if (best != kNoLiteralIndex) {
1955 const double activity = current_value + expanded_lp_values[best];
1956 candidates.push_back({current_literal, Literal(best), activity});
1957 }
1958 }
1959
1960 // Do not genate too many cut at once.
1961 const int kMaxNumberOfCutPerCall = 10;
1962 std::sort(candidates.begin(), candidates.end());
1963 if (candidates.size() > kMaxNumberOfCutPerCall) {
1964 candidates.resize(kMaxNumberOfCutPerCall);
1965 }
1966
1967 // Expand to a maximal at most one each candidates before returning them.
1968 // Note that we only expand using literal from the LP.
1969 tmp_cuts_.clear();
1970 std::vector<Literal> at_most_one;
1971 for (const Candidate& candidate : candidates) {
1972 at_most_one = ExpandAtMostOneWithWeight(
1973 {candidate.a, candidate.b}, can_be_included, expanded_lp_values);
1974 if (!at_most_one.empty()) tmp_cuts_.push_back(at_most_one);
1975 }
1976 return tmp_cuts_;
1977}
1978
1979// TODO(user): Use deterministic limit.
1980// TODO(user): Explore the graph instead of just looking at full amo, especially
1981// since we expand small ones.
1982std::vector<absl::Span<const Literal>>
1983BinaryImplicationGraph::HeuristicAmoPartition(std::vector<Literal>* literals) {
1984 std::vector<absl::Span<const Literal>> result;
1985
1987 implications_.size(), false);
1988 for (const Literal l : *literals) to_consider[l] = true;
1989
1990 // Priority queue of (intersection_size, start_of_amo).
1991 std::priority_queue<std::pair<int, int>> pq;
1992
1993 // Compute for each at most one that might overlap, its overlaping size and
1994 // stores its start in the at_most_one_buffer_.
1995 //
1996 // This is in O(num_literal in amo).
1997 absl::flat_hash_set<int> explored_amo;
1998 for (const Literal l : *literals) {
1999 if (l.Index() >= at_most_ones_.size()) continue;
2000 for (const int start : at_most_ones_[l]) {
2001 const auto [_, inserted] = explored_amo.insert(start);
2002 if (!inserted) continue;
2003
2004 int intersection_size = 0;
2005 for (const Literal l : AtMostOne(start)) {
2006 if (to_consider[l]) ++intersection_size;
2007 }
2008 if (intersection_size > 1) {
2009 pq.push({intersection_size, start});
2010 }
2011
2012 // Abort early if we are done.
2013 if (intersection_size == literals->size()) break;
2014 }
2015 }
2016
2017 // Consume AMOs, update size.
2018 int num_processed = 0;
2019 while (!pq.empty()) {
2020 const auto [old_size, start] = pq.top();
2021 pq.pop();
2022
2023 // Recompute size.
2024 int intersection_size = 0;
2025 for (const Literal l : AtMostOne(start)) {
2026 if (to_consider[l]) ++intersection_size;
2027 }
2028 if (intersection_size != old_size) {
2029 // re-add with new size.
2030 if (intersection_size > 1) {
2031 pq.push({intersection_size, start});
2032 }
2033 continue;
2034 }
2035
2036 // Mark the literal of that at most one at false.
2037 for (const Literal l : AtMostOne(start)) {
2038 to_consider[l] = false;
2039 }
2040
2041 // Extract the intersection by moving it in
2042 // [num_processed, num_processed + intersection_size).
2043 const int span_start = num_processed;
2044 for (int i = num_processed; i < literals->size(); ++i) {
2045 if (to_consider[(*literals)[i]]) continue;
2046 std::swap((*literals)[num_processed], (*literals)[i]);
2047 ++num_processed;
2048 }
2049 result.push_back(absl::MakeSpan(literals->data() + span_start,
2050 num_processed - span_start));
2051 }
2052 return result;
2053}
2054
2055void BinaryImplicationGraph::MarkDescendants(Literal root) {
2056 bfs_stack_.resize(implications_.size());
2057 auto* const stack = bfs_stack_.data();
2058 const int amo_size = static_cast<int>(at_most_ones_.size());
2059 auto is_marked = is_marked_.const_view();
2060 auto is_redundant = is_redundant_.const_view();
2061 if (is_redundant[root]) return;
2062
2063 int stack_size = 1;
2064 stack[0] = root;
2065 is_marked_.Set(root);
2066 for (int j = 0; j < stack_size; ++j) {
2067 const Literal current = stack[j];
2068 if (!implies_something_[current]) continue;
2069
2070 for (const Literal l : implications_[current]) {
2071 if (!is_marked[l] && !is_redundant[l]) {
2072 is_marked_.SetUnsafe(l);
2073 stack[stack_size++] = l;
2074 }
2075 }
2076
2077 if (current.Index() >= amo_size) continue;
2078 for (const int start : at_most_ones_[current]) {
2079 for (const Literal l : AtMostOne(start)) {
2080 if (l == current) continue;
2081 if (!is_marked[l.NegatedIndex()] && !is_redundant[l.NegatedIndex()]) {
2082 is_marked_.SetUnsafe(l.NegatedIndex());
2083 stack[stack_size++] = l.Negated();
2084 }
2085 }
2086 }
2087 }
2088 work_done_in_mark_descendants_ += stack_size;
2089}
2090
2091std::vector<Literal> BinaryImplicationGraph::ExpandAtMostOne(
2092 const absl::Span<const Literal> at_most_one,
2093 int64_t max_num_explored_nodes) {
2094 std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
2095
2096 // Optim.
2097 for (int i = 0; i < clique.size(); ++i) {
2098 if (implications_[clique[i]].empty() || is_redundant_[clique[i]]) {
2099 return clique;
2100 }
2101 }
2102
2103 // TODO(user): Improve algorithm. If we do a dfs, we can know if a literal
2104 // has no descendant in the current intersection. We can keep such literal
2105 // marked.
2106 std::vector<LiteralIndex> intersection;
2107 for (int i = 0; i < clique.size(); ++i) {
2108 if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
2109 is_marked_.ClearAndResize(LiteralIndex(implications_.size()));
2110 MarkDescendants(clique[i]);
2111
2112 if (i == 0) {
2113 intersection = is_marked_.PositionsSetAtLeastOnce();
2114 for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
2115 }
2116
2117 int new_size = 0;
2118 is_marked_.Clear(clique[i].NegatedIndex()); // TODO(user): explain.
2119 for (const LiteralIndex index : intersection) {
2120 if (is_marked_[index]) intersection[new_size++] = index;
2121 }
2122 intersection.resize(new_size);
2123 if (intersection.empty()) break;
2124
2125 // TODO(user): If the intersection is small compared to the members of the
2126 // clique left to explore, we could look at the descendants of the negated
2127 // intersection instead.
2128
2129 // Expand?
2130 if (i + 1 == clique.size()) {
2131 clique.push_back(Literal(intersection.back()).Negated());
2132 intersection.pop_back();
2133 }
2134 }
2135 return clique;
2136}
2137
2138// TODO(user): lazy cleanup the lists on is_removed_?
2139// TODO(user): Mark fixed variable as is_removed_ for faster iteration?
2141 Literal literal) {
2142 DCHECK(!is_removed_[literal]);
2143
2144 // Clear old state.
2145 for (const Literal l : direct_implications_) {
2146 in_direct_implications_[l] = false;
2147 }
2148 direct_implications_.clear();
2149
2150 // Fill new state.
2151 const VariablesAssignment& assignment = trail_->Assignment();
2152 DCHECK(!assignment.LiteralIsAssigned(literal));
2153 for (const Literal l : implications_[literal]) {
2154 if (l == literal) continue;
2155 if (assignment.LiteralIsAssigned(l)) continue;
2156 if (!is_removed_[l] && !in_direct_implications_[l]) {
2157 in_direct_implications_[l] = true;
2158 direct_implications_.push_back(l);
2159 }
2160 }
2161 if (literal.Index() < at_most_ones_.size()) {
2162 if (is_redundant_[literal]) {
2163 DCHECK(at_most_ones_[literal].empty());
2164 }
2165 for (const int start : at_most_ones_[literal]) {
2166 for (const Literal l : AtMostOne(start)) {
2167 if (l == literal) continue;
2168 if (assignment.LiteralIsAssigned(l)) continue;
2169 if (!is_removed_[l] && !in_direct_implications_[l.NegatedIndex()]) {
2170 in_direct_implications_[l.NegatedIndex()] = true;
2171 direct_implications_.push_back(l.Negated());
2172 }
2173 }
2174 }
2175 }
2176 estimated_sizes_[literal] = direct_implications_.size();
2177 return direct_implications_;
2178}
2179
2180absl::Span<const Literal> BinaryImplicationGraph::AtMostOne(int start) const {
2181 const int size = at_most_one_buffer_[start].Index().value();
2182 return absl::MakeSpan(&at_most_one_buffer_[start + 1], size);
2183}
2184
2186 const int size1 = implications_[lhs].size();
2187 const int size2 =
2188 lhs.Index() < at_most_ones_.size() ? at_most_ones_[lhs].size() : 0;
2189 if (size1 + size2 == 0) return kNoLiteralIndex;
2190
2191 const int choice = absl::Uniform<int>(*random_, 0, size1 + size2);
2192 if (choice < size1) {
2193 return implications_[lhs][choice].Index();
2194 }
2195
2196 const absl::Span<const Literal> amo =
2197 AtMostOne(at_most_ones_[lhs][choice - size1]);
2198 CHECK_GE(amo.size(), 2);
2199 const int first_choice = absl::Uniform<int>(*random_, 0, amo.size());
2200 const Literal lit = amo[first_choice];
2201 if (lit != lhs) return lit.NegatedIndex();
2202
2203 // We are unlucky and just picked the wrong literal: take a different one.
2204 int next_choice = absl::Uniform<int>(*random_, 0, amo.size() - 1);
2205 if (next_choice >= first_choice) {
2206 next_choice += 1;
2207 }
2208 CHECK_NE(amo[next_choice], lhs);
2209 return amo[next_choice].NegatedIndex();
2210}
2211
2213 bool* is_unsat) {
2214 const int saved_index = propagation_trail_index_;
2215 DCHECK_EQ(propagation_trail_index_, trail_->Index()); // Propagation done.
2216
2217 const VariablesAssignment& assignment = trail_->Assignment();
2218 if (assignment.VariableIsAssigned(var)) return false;
2219
2220 const Literal literal(var, true);
2221 direct_implications_of_negated_literal_ =
2222 DirectImplications(literal.Negated());
2223 DirectImplications(literal); // Fill in_direct_implications_.
2224 for (const Literal l : direct_implications_of_negated_literal_) {
2225 if (in_direct_implications_[l]) {
2226 // not(l) => literal => l.
2227 if (!FixLiteral(l)) {
2228 *is_unsat = true;
2229 return false;
2230 }
2231 }
2232 }
2233
2234 return propagation_trail_index_ > saved_index;
2235}
2236
2238 BooleanVariable var) {
2239 const Literal literal(var, true);
2240 int64_t result = 0;
2241 direct_implications_of_negated_literal_ =
2242 DirectImplications(literal.Negated());
2243 const int64_t s1 = DirectImplications(literal).size();
2244 for (const Literal l : direct_implications_of_negated_literal_) {
2245 result += s1;
2246
2247 // We should have dealt with that in FindFailedLiteralAroundVar().
2248 DCHECK(!in_direct_implications_[l]);
2249
2250 // l => literal => l: equivalent variable!
2251 if (in_direct_implications_[l.NegatedIndex()]) result--;
2252 }
2253 return result;
2254}
2255
2256// For all possible a => var => b, add a => b.
2258 BooleanVariable var, std::deque<std::vector<Literal>>* postsolve_clauses) {
2259 const Literal literal(var, true);
2260 DCHECK(!is_removed_[literal.Index()]);
2261 DCHECK(!is_redundant_[literal.Index()]);
2262
2263 direct_implications_of_negated_literal_ =
2264 DirectImplications(literal.Negated());
2265 for (const Literal b : DirectImplications(literal)) {
2266 if (is_removed_[b]) continue;
2267 DCHECK(!is_redundant_[b]);
2268 estimated_sizes_[b.NegatedIndex()]--;
2269 for (const Literal a_negated : direct_implications_of_negated_literal_) {
2270 if (a_negated.Negated() == b) continue;
2271 if (is_removed_[a_negated]) continue;
2272 AddImplication(a_negated.Negated(), b);
2273 }
2274 }
2275 for (const Literal a_negated : direct_implications_of_negated_literal_) {
2276 if (is_removed_[a_negated]) continue;
2277 DCHECK(!is_redundant_[a_negated]);
2278 estimated_sizes_[a_negated.NegatedIndex()]--;
2279 }
2280
2281 // Notify the deletion to the proof checker and the postsolve.
2282 // Note that we want var first in these clauses for the postsolve.
2283 for (const Literal b : direct_implications_) {
2284 if (drat_proof_handler_ != nullptr) {
2285 drat_proof_handler_->DeleteClause({Literal(var, false), b});
2286 }
2287 postsolve_clauses->push_back({Literal(var, false), b});
2288 }
2289 for (const Literal a_negated : direct_implications_of_negated_literal_) {
2290 if (drat_proof_handler_ != nullptr) {
2291 drat_proof_handler_->DeleteClause({Literal(var, true), a_negated});
2292 }
2293 postsolve_clauses->push_back({Literal(var, true), a_negated});
2294 }
2295
2296 // We need to remove any occurrence of var in our implication lists, this will
2297 // be delayed to the CleanupAllRemovedVariables() call.
2298 for (const LiteralIndex index : {literal.Index(), literal.NegatedIndex()}) {
2299 is_removed_[index] = true;
2300 implications_[index].clear();
2301 if (!is_redundant_[index]) {
2302 ++num_redundant_literals_;
2303 is_redundant_.Set(index);
2304 }
2305 }
2306}
2307
2309 std::deque<std::vector<Literal>>* postsolve_clauses) {
2310 for (LiteralIndex a(0); a < implications_.size(); ++a) {
2311 if (is_redundant_[a] && !is_removed_[a]) {
2312 postsolve_clauses->push_back(
2314 is_removed_[a] = true;
2315 gtl::STLClearObject(&(implications_[a]));
2316 continue;
2317 }
2318
2319 int new_size = 0;
2320 auto& implication = implications_[a];
2321 for (const Literal l : implication) {
2322 if (!is_redundant_[l]) {
2323 implication[new_size++] = l;
2324 }
2325 }
2326 implication.resize(new_size);
2327 }
2328}
2329
2331 const VariablesAssignment& assignment = trail_->Assignment();
2332 for (LiteralIndex a(0); a < implications_.size(); ++a) {
2333 if (is_removed_[a] || assignment.LiteralIsAssigned(Literal(a))) {
2334 if (DEBUG_MODE && assignment.LiteralIsTrue(Literal(a))) {
2335 // The code assumes that everything is already propagated.
2336 // Otherwise we will remove implications that didn't propagate yet!
2337 for (const Literal lit : implications_[a]) {
2338 DCHECK(trail_->Assignment().LiteralIsTrue(lit));
2339 }
2340 }
2341
2342 gtl::STLClearObject(&(implications_[a]));
2343 continue;
2344 }
2345
2346 int new_size = 0;
2347 auto& implication = implications_[a];
2348 for (const Literal l : implication) {
2349 if (!is_removed_[l] && !assignment.LiteralIsTrue(l)) {
2350 implication[new_size++] = l;
2351 }
2352 }
2353 implication.resize(new_size);
2354 }
2355
2356 // Clean-up at most ones.
2357 at_most_ones_.clear();
2358 CleanUpAndAddAtMostOnes(/*base_index=*/0);
2359
2360 // Note that to please the invariant() we also removed fixed literal above.
2361 DCHECK(InvariantsAreOk());
2362}
2363
2364bool BinaryImplicationGraph::InvariantsAreOk() {
2365 // We check that if a => b then not(b) => not(a).
2366 absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> seen;
2367 int num_redundant = 0;
2368 int num_fixed = 0;
2369 for (LiteralIndex a_index(0); a_index < implications_.size(); ++a_index) {
2370 if (trail_->Assignment().LiteralIsAssigned(Literal(a_index))) {
2371 ++num_fixed;
2372 if (!implications_[a_index].empty()) {
2373 LOG(ERROR) << "Fixed literal has non-cleared implications";
2374 return false;
2375 }
2376 continue;
2377 }
2378 if (is_removed_[a_index]) {
2379 if (!implications_[a_index].empty()) {
2380 LOG(ERROR) << "Removed literal has non-cleared implications";
2381 return false;
2382 }
2383 continue;
2384 }
2385 if (is_redundant_[a_index]) {
2386 ++num_redundant;
2387 if (implications_[a_index].size() != 1) {
2388 LOG(ERROR)
2389 << "Redundant literal should only point to its representative "
2390 << Literal(a_index) << " => " << implications_[a_index];
2391 return false;
2392 }
2393 }
2394 for (const Literal b : implications_[a_index]) {
2395 seen.insert({a_index, b.Index()});
2396 }
2397 }
2398
2399 // Check that reverse topo order is correct.
2401 if (is_dag_) {
2402 lit_to_order.assign(implications_.size(), -1);
2403 for (int i = 0; i < reverse_topological_order_.size(); ++i) {
2404 lit_to_order[reverse_topological_order_[i]] = i;
2405 }
2406 }
2407
2408 VLOG(2) << "num_redundant " << num_redundant;
2409 VLOG(2) << "num_fixed " << num_fixed;
2410 for (LiteralIndex a_index(0); a_index < implications_.size(); ++a_index) {
2411 const LiteralIndex not_a_index = Literal(a_index).NegatedIndex();
2412 for (const Literal b : implications_[a_index]) {
2413 if (is_removed_[b]) {
2414 LOG(ERROR) << "A removed literal still appear! " << Literal(a_index)
2415 << " => " << b;
2416 return false;
2417 }
2418
2419 if (!seen.contains({b.NegatedIndex(), not_a_index})) {
2420 LOG(ERROR) << "We have " << Literal(a_index) << " => " << b
2421 << " but not the reverse implication!";
2422 LOG(ERROR) << "redundant[a]: " << is_redundant_[a_index]
2423 << " assigned[a]: "
2424 << trail_->Assignment().LiteralIsAssigned(Literal(a_index))
2425 << " removed[a]: " << is_removed_[a_index]
2426 << " redundant[b]: " << is_redundant_[b] << " assigned[b]: "
2427 << trail_->Assignment().LiteralIsAssigned(b)
2428 << " removed[b]: " << is_removed_[b];
2429
2430 return false;
2431 }
2432
2433 // Test that if we have a dag, our topo order is correct.
2434 if (is_dag_ && !is_redundant_[b] && !is_redundant_[a_index]) {
2435 DCHECK_NE(lit_to_order[b], -1);
2436 DCHECK_LE(lit_to_order[b], lit_to_order[a_index]);
2437 }
2438 }
2439 }
2440
2441 // Check the at-most ones.
2442 absl::flat_hash_set<std::pair<LiteralIndex, int>> lit_to_start;
2443 for (LiteralIndex i(0); i < at_most_ones_.size(); ++i) {
2444 for (const int start : at_most_ones_[i]) {
2445 lit_to_start.insert({i, start});
2446 }
2447 }
2448
2449 for (int start = 0; start < at_most_one_buffer_.size();) {
2450 const absl::Span<const Literal> amo = AtMostOne(start);
2451 for (const Literal l : amo) {
2452 if (is_removed_[l]) {
2453 LOG(ERROR) << "A removed literal still appear in an amo " << l;
2454 return false;
2455 }
2456 if (!lit_to_start.contains({l, start})) {
2457 return false;
2458 }
2459 }
2460 start += amo.size() + 1;
2461 }
2462
2463 return true;
2464}
2465
2466absl::Span<const Literal> BinaryImplicationGraph::NextAtMostOne() {
2467 if (at_most_one_iterator_ >= at_most_one_buffer_.size()) {
2468 return absl::Span<const Literal>();
2469 }
2470
2471 const absl::Span<const Literal> result = AtMostOne(at_most_one_iterator_);
2472 at_most_one_iterator_ += result.size() + 1;
2473 return result;
2474}
2475
2476// ----- SatClause -----
2477
2478// static
2479SatClause* SatClause::Create(absl::Span<const Literal> literals) {
2480 DCHECK_GE(literals.size(), 2);
2481 SatClause* clause = reinterpret_cast<SatClause*>(
2482 ::operator new(sizeof(SatClause) + literals.size() * sizeof(Literal)));
2483 clause->size_ = literals.size();
2484 for (int i = 0; i < literals.size(); ++i) {
2485 clause->literals_[i] = literals[i];
2486 }
2487 return clause;
2488}
2489
2490// Note that for an attached clause, removing fixed literal is okay because if
2491// any of the watched literal is assigned, then the clause is necessarily true.
2493 const VariablesAssignment& assignment) {
2494 DCHECK(!IsRemoved());
2495 if (assignment.VariableIsAssigned(literals_[0].Variable()) ||
2496 assignment.VariableIsAssigned(literals_[1].Variable())) {
2497 DCHECK(IsSatisfied(assignment));
2498 return true;
2499 }
2500 int j = 2;
2501 while (j < size_ && !assignment.VariableIsAssigned(literals_[j].Variable())) {
2502 ++j;
2503 }
2504 for (int i = j; i < size_; ++i) {
2505 if (assignment.VariableIsAssigned(literals_[i].Variable())) {
2506 if (assignment.LiteralIsTrue(literals_[i])) return true;
2507 } else {
2508 std::swap(literals_[j], literals_[i]);
2509 ++j;
2510 }
2511 }
2512 size_ = j;
2513 return false;
2514}
2515
2516bool SatClause::IsSatisfied(const VariablesAssignment& assignment) const {
2517 for (const Literal literal : *this) {
2518 if (assignment.LiteralIsTrue(literal)) return true;
2519 }
2520 return false;
2521}
2522
2523std::string SatClause::DebugString() const {
2524 std::string result;
2525 for (const Literal literal : *this) {
2526 if (!result.empty()) result.append(" ");
2527 result.append(literal.DebugString());
2528 }
2529 return result;
2530}
2531
2532} // namespace sat
2533} // namespace operations_research
IntegerValue size
void FindStronglyConnectedComponents(const NodeIndex num_nodes, const Graph &graph, SccOutput *components)
double Get() const
Definition timer.h:46
void Start()
When Start() is called multiple times, only the most recent is used.
Definition timer.h:32
void resize(int size)
Resizes the Bitset64 to the given number of bits. New bits are sets to 0.
Definition bitset.h:478
ConstView const_view() const
Definition bitset.h:464
void Set(IndexType i)
Sets the bit at position i to 1.
Definition bitset.h:548
void ClearAndResize(IntegerType size)
Definition bitset.h:839
const std::vector< IntegerType > & PositionsSetAtLeastOnce() const
Definition bitset.h:878
void Clear(IntegerType index)
Definition bitset.h:874
Bitset64< IntegerType >::ConstView const_view() const
Definition bitset.h:895
void Set(IntegerType index)
Definition bitset.h:864
void SetUnsafe(IntegerType index)
Definition bitset.h:870
IntegerType size() const
Definition bitset.h:830
void Resize(IntegerType size)
Definition bitset.h:850
std::string StatString() const
Definition stats.cc:77
void AdvanceDeterministicTime(double deterministic_duration)
Definition time_limit.h:183
bool ComputeTransitiveReduction(bool log_info=false)
Definition clause.cc:1441
Literal RepresentativeOf(Literal l) const
Definition clause.h:614
void Resize(int num_variables)
Resizes the data structure.
Definition clause.cc:501
bool Propagate(Trail *trail) final
SatPropagator interface.
Definition clause.cc:839
void RemoveDuplicates()
Clean up implications list that might have duplicates.
Definition clause.cc:519
void MinimizeConflictWithReachability(std::vector< Literal > *c)
Definition clause.cc:864
std::vector< Literal > ExpandAtMostOneWithWeight(absl::Span< const Literal > at_most_one, const util_intops::StrongVector< LiteralIndex, bool > &can_be_included, const util_intops::StrongVector< LiteralIndex, double > &expanded_lp_values)
Same as ExpandAtMostOne() but try to maximize the weight in the clique.
Definition clause.cc:1802
bool TransformIntoMaxCliques(std::vector< std::vector< Literal > > *at_most_ones, int64_t max_num_explored_nodes=1e8)
Definition clause.cc:1658
int64_t NumImplicationOnVariableRemoval(BooleanVariable var)
Definition clause.cc:2237
bool IsEmpty() const final
Returns true if there is no constraints in this class.
Definition clause.h:514
bool AddBinaryClause(Literal a, Literal b)
Definition clause.cc:531
absl::Span< const Literal > NextAtMostOne()
Returns the next at_most_one, or a span of size 0 when finished.
Definition clause.cc:2466
bool AddImplication(Literal a, Literal b)
Definition clause.h:529
void RemoveBooleanVariable(BooleanVariable var, std::deque< std::vector< Literal > > *postsolve_clauses)
For all possible a => var => b, add a => b.
Definition clause.cc:2257
void MinimizeConflictExperimental(const Trail &trail, std::vector< Literal > *c)
Definition clause.cc:1025
void MinimizeConflictFirstWithTransitiveReduction(const Trail &trail, std::vector< Literal > *c, absl::BitGenRef random)
Definition clause.cc:963
bool DetectEquivalences(bool log_info=false)
Definition clause.cc:1248
void MinimizeConflictFirst(const Trail &trail, std::vector< Literal > *c, SparseBitset< BooleanVariable > *marked)
Definition clause.cc:941
const std::vector< Literal > & DirectImplications(Literal literal)
Definition clause.cc:2140
const std::vector< std::vector< Literal > > & GenerateAtMostOnesWithLargeWeight(const std::vector< Literal > &literals, const std::vector< double > &lp_values)
Definition clause.cc:1894
absl::Span< const Literal > Reason(const Trail &trail, int trail_index, int64_t conflict_id) const final
Definition clause.cc:852
void RemoveAllRedundantVariables(std::deque< std::vector< Literal > > *postsolve_clauses)
Definition clause.cc:2308
bool FindFailedLiteralAroundVar(BooleanVariable var, bool *is_unsat)
Definition clause.cc:2212
LiteralIndex RandomImpliedLiteral(Literal lhs)
Definition clause.cc:2185
ABSL_MUST_USE_RESULT bool AddAtMostOne(absl::Span< const Literal > at_most_one)
Definition clause.cc:591
std::vector< absl::Span< const Literal > > HeuristicAmoPartition(std::vector< Literal > *literals)
Definition clause.cc:1983
SatClause * InprocessingAddClause(absl::Span< const Literal > new_clause)
Definition clause.cc:436
void LazyDetach(SatClause *clause)
Definition clause.cc:315
void InprocessingRemoveClause(SatClause *clause)
These must only be called between [Detach/Attach]AllClauses() calls.
Definition clause.cc:379
void Detach(SatClause *clause)
Definition clause.cc:322
absl::Span< const Literal > Reason(const Trail &trail, int trail_index, int64_t conflict_id) const final
Definition clause.cc:210
bool AddClause(absl::Span< const Literal > literals, Trail *trail, int lbd)
Adds a new clause and perform initial propagation for this clause only.
Definition clause.cc:224
ClauseManager(Model *model)
--— ClauseManager --—
Definition clause.cc:72
SatClause * ReasonClause(int trail_index) const
Definition clause.cc:216
SatClause * AddRemovableClause(absl::Span< const Literal > literals, Trail *trail, int lbd)
Definition clause.cc:232
void Resize(int num_variables)
Must be called before adding clauses referring to such variables.
Definition clause.cc:88
ABSL_MUST_USE_RESULT bool InprocessingFixLiteral(Literal true_literal)
This one do not need the clause to be detached.
Definition clause.cc:360
void Attach(SatClause *clause, Trail *trail)
Definition clause.cc:295
ABSL_MUST_USE_RESULT bool InprocessingRewriteClause(SatClause *clause, absl::Span< const Literal > new_clause)
Definition clause.cc:388
bool Propagate(Trail *trail) final
SatPropagator API.
Definition clause.cc:201
int AddLiterals(const std::vector< L > &wrapped_values)
Definition util.h:758
size_t size() const
Size of the "key" space, always in [0, size()).
Definition util.h:819
void reserve(int size)
Reserve memory if it is already known or tightly estimated.
Definition util.h:86
void AddClause(absl::Span< const Literal > clause)
void DeleteClause(absl::Span< const Literal > clause)
void DetectInclusions(const std::function< void(int subset, int superset)> &process)
Definition inclusion.h:206
void SetWorkLimit(uint64_t work_limit)
Definition inclusion.h:81
LiteralIndex NegatedIndex() const
Definition sat_base.h:92
LiteralIndex Index() const
Definition sat_base.h:91
BooleanVariable Variable() const
Definition sat_base.h:87
static SatClause * Create(absl::Span< const Literal > literals)
--— SatClause --—
Definition clause.cc:2479
absl::Span< const Literal > AsSpan() const
Returns a Span<> representation of the clause.
Definition clause.h:97
const Literal * begin() const
Allows for range based iteration: for (Literal literal : clause) {}.
Definition clause.h:76
bool IsSatisfied(const VariablesAssignment &assignment) const
Definition clause.cc:2516
int size() const
Number of literals in the clause.
Definition clause.h:67
std::string DebugString() const
Definition clause.cc:2523
bool RemoveFixedLiteralsAndTestIfTrue(const VariablesAssignment &assignment)
Definition clause.cc:2492
Base class for all the SAT constraints.
Definition sat_base.h:533
SccGraph(SccFinder *finder, Implications *graph, AtMostOnes *at_most_ones, std::vector< Literal > *at_most_one_buffer)
Definition clause.cc:1149
int64_t work_done_
For the deterministic time.
Definition clause.cc:1233
std::vector< Literal > to_fix_
All these literals where detected to be true during the SCC computation.
Definition clause.cc:1230
const std::vector< int32_t > & operator[](int32_t node) const
Definition clause.cc:1157
void Enqueue(Literal true_literal, int propagator_id)
Definition sat_base.h:317
void EnqueueWithUnitReason(Literal true_literal)
Specific Enqueue() version for a fixed variable.
Definition sat_base.h:328
const AssignmentInfo & Info(BooleanVariable var) const
Definition sat_base.h:463
void RegisterPropagator(SatPropagator *propagator)
Definition sat_base.h:650
void SetCurrentPropagatorId(int propagator_id)
Definition sat_base.h:307
const VariablesAssignment & Assignment() const
Definition sat_base.h:462
bool VariableIsAssigned(BooleanVariable var) const
Returns true iff the given variable is assigned.
Definition sat_base.h:196
bool LiteralIsAssigned(Literal literal) const
Definition sat_base.h:191
bool LiteralIsFalse(Literal literal) const
Definition sat_base.h:185
bool LiteralIsTrue(Literal literal) const
Definition sat_base.h:188
void assign(size_type n, const value_type &val)
void push_back(const value_type &val)
void resize(size_type new_size)
int64_t b
Definition table.cc:45
int64_t a
Definition table.cc:44
GraphType graph
int64_t value
IntVar * var
GRBmodel * model
int lit
int literal
int index
const bool DEBUG_MODE
Definition macros.h:24
void STLSortAndRemoveDuplicates(T *v, const LessFunc &less_func)
Definition stl_util.h:58
void STLDeleteElements(T *container)
Definition stl_util.h:372
void STLClearObject(T *obj)
Definition stl_util.h:123
const LiteralIndex kNoLiteralIndex(-1)
In SWIG mode, we don't want anything besides these top-level includes.
ColIndex representative
std::optional< int64_t > end
int64_t start
#define IF_STATS_ENABLED(instructions)
Definition stats.h:417
#define SCOPED_TIME_STAT(stats)
Definition stats.h:418