Google OR-Tools v9.15
a fast and portable software suite for combinatorial optimization
Loading...
Searching...
No Matches
clause.cc
Go to the documentation of this file.
1// Copyright 2010-2025 Google LLC
2// Licensed under the Apache License, Version 2.0 (the "License");
3// you may not use this file except in compliance with the License.
4// You may obtain a copy of the License at
5//
6// http://www.apache.org/licenses/LICENSE-2.0
7//
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14#include "ortools/sat/clause.h"
15
16#include <stddef.h>
17
18#include <algorithm>
19#include <cstdint>
20#include <deque>
21#include <optional>
22#include <queue>
23#include <stack>
24#include <string>
25#include <utility>
26#include <vector>
27
28#include "absl/algorithm/container.h"
29#include "absl/container/flat_hash_map.h"
30#include "absl/container/flat_hash_set.h"
31#include "absl/container/inlined_vector.h"
32#include "absl/functional/function_ref.h"
33#include "absl/log/check.h"
34#include "absl/log/log.h"
35#include "absl/log/vlog_is_on.h"
36#include "absl/random/distributions.h"
37#include "absl/types/span.h"
42#include "ortools/base/timer.h"
47#include "ortools/sat/model.h"
49#include "ortools/sat/util.h"
50#include "ortools/util/bitset.h"
51#include "ortools/util/stats.h"
54
55namespace operations_research {
56namespace sat {
57
58namespace {
59
60// Returns true if the given watcher list contains the given clause.
61template <typename Watcher>
62bool WatcherListContains(const std::vector<Watcher>& list,
63 const SatClause& candidate) {
64 for (const Watcher& watcher : list) {
65 if (watcher.clause == &candidate) return true;
66 }
67 return false;
68}
69
70bool WatchersAreValid(const Trail& trail, absl::Span<const Literal> literals) {
71 int min_watcher_level = trail.CurrentDecisionLevel();
72 for (Literal w : literals.subspan(0, 2)) {
73 if (trail.Assignment().LiteralIsAssigned(w)) {
74 min_watcher_level = std::min(min_watcher_level, trail.AssignmentLevel(w));
75 }
76 }
77 for (Literal l : literals.subspan(2)) {
78 if (trail.Assignment().LiteralIsFalse(l) &&
79 trail.AssignmentLevel(l) > min_watcher_level) {
80 return false;
81 }
82 }
83 return true;
84}
85
86bool ClauseIsSatisfiedAtRoot(const Trail& trail,
87 absl::Span<const Literal> literals) {
88 return absl::c_any_of(literals, [&](Literal l) {
89 return trail.Assignment().LiteralIsTrue(l) && trail.AssignmentLevel(l) == 0;
90 });
91}
92
93bool LiteralsAreFixedAtRoot(const Trail& trail,
94 absl::Span<const Literal> literals) {
95 return absl::c_any_of(literals, [&](Literal l) {
96 return trail.Assignment().LiteralIsAssigned(l) &&
97 trail.AssignmentLevel(l) == 0;
98 });
99}
100
101} // namespace
102
103// ----- ClauseManager -----
104
106 : SatPropagator("ClauseManager"),
107 clause_id_generator_(model->GetOrCreate<ClauseIdGenerator>()),
108 parameters_(*model->GetOrCreate<SatParameters>()),
109 assignment_(model->GetOrCreate<Trail>()->Assignment()),
110 implication_graph_(model->GetOrCreate<BinaryImplicationGraph>()),
111 trail_(model->GetOrCreate<Trail>()),
112 num_inspected_clauses_(0),
113 num_inspected_clause_literals_(0),
114 num_watched_clauses_(0),
115 stats_("ClauseManager"),
116 lrat_proof_handler_(model->Mutable<LratProofHandler>()) {
117 trail_->RegisterPropagator(this);
118 deletion_counters_.resize(
119 static_cast<int>(DeletionSourceForStat::LastElement));
120}
121
123 gtl::STLDeleteElements(&clauses_);
124 IF_STATS_ENABLED(LOG(INFO) << stats_.StatString());
125}
126
127void ClauseManager::Resize(int num_variables) {
128 watchers_on_false_.resize(num_variables << 1);
129 reasons_.resize(num_variables);
130 needs_cleaning_.Resize(LiteralIndex(num_variables << 1));
131}
132
133// Note that this is the only place where we add Watcher so the DCHECK
134// guarantees that there are no duplicates.
135void ClauseManager::AttachOnFalse(Literal literal, Literal blocking_literal,
136 SatClause* clause) {
137 SCOPED_TIME_STAT(&stats_);
138 DCHECK(!WatcherListContains(watchers_on_false_[literal], *clause));
139 watchers_on_false_[literal].push_back(Watcher(clause, blocking_literal));
140}
141
143 SCOPED_TIME_STAT(&stats_);
145
146 const int old_index = trail->Index();
147 while (trail->Index() == old_index && propagation_trail_index_ < old_index) {
148 const Literal false_literal =
149 (*trail)[propagation_trail_index_++].Negated();
150 std::vector<Watcher>& watchers = watchers_on_false_[false_literal];
151
152 // Note(user): It sounds better to inspect the list in order, this is
153 // because small clauses like binary or ternary clauses will often propagate
154 // and thus stay at the beginning of the list.
155 auto new_it = watchers.begin();
156 const auto end = watchers.end();
157 while (new_it != end && helper.LiteralIsTrue(new_it->blocking_literal)) {
158 ++new_it;
159 }
160 for (auto it = new_it; it != end; ++it) {
161 // Don't even look at the clause memory if the blocking literal is true.
162 if (helper.LiteralIsTrue(it->blocking_literal)) {
163 *new_it++ = *it;
164 continue;
165 }
166 ++num_inspected_clauses_;
167
168 const int size = it->clause->size();
169 // If this clause has been lazily detached, skip and remove the watcher.
170 if (size == 0) continue;
171
172 // If the other watched literal is true, just change the blocking literal.
173 // Note that we use the fact that the first two literals of the clause are
174 // the ones currently watched.
175 Literal* literals = it->clause->literals();
176 const Literal other_watched_literal(LiteralIndex(
177 literals[0].Index().value() ^ literals[1].Index().value() ^
178 false_literal.Index().value()));
179 if (helper.LiteralIsTrue(other_watched_literal)) {
180 *new_it = *it;
181 new_it->blocking_literal = other_watched_literal;
182 ++new_it;
183 ++num_inspected_clause_literals_;
184 continue;
185 }
186
187 // Look for another literal to watch. We go through the list in a cyclic
188 // fashion from start. The first two literals can be ignored as they are
189 // the watched ones.
190 {
191 const int start = it->start_index;
192 DCHECK_GE(start, 2);
193
194 int i = start;
195 while (i < size && helper.LiteralIsFalse(literals[i])) ++i;
196 num_inspected_clause_literals_ += i - start + 2;
197 if (i >= size) {
198 i = 2;
199 while (i < start && helper.LiteralIsFalse(literals[i])) ++i;
200 num_inspected_clause_literals_ += i - 2;
201 if (i >= start) i = size;
202 }
203 if (i < size) {
204 // literal[i] is unassigned or true, it's now the new literal to
205 // watch. Note that by convention, we always keep the two watched
206 // literals at the beginning of the clause.
207 literals[0] = other_watched_literal;
208 literals[1] = literals[i];
209 literals[i] = false_literal;
210 watchers_on_false_[literals[1]].emplace_back(
211 it->clause, other_watched_literal, i + 1);
212 continue;
213 }
214 }
215
216 // At this point other_watched_literal is either false or unassigned, all
217 // other literals are false.
218 if (helper.LiteralIsFalse(other_watched_literal)) {
219 // Conflict: All literals of it->clause are false.
220 //
221 // Note(user): we could avoid a copy here, but the conflict analysis
222 // complexity will be a lot higher than this anyway.
223 trail->MutableConflict()->assign(it->clause->begin(),
224 it->clause->end());
225 trail->SetFailingSatClause(it->clause);
226 num_inspected_clause_literals_ += it - watchers.begin() + 1;
227 watchers.erase(new_it, it);
228 return false;
229 } else {
230 // Propagation: other_watched_literal is unassigned, set it to true and
231 // put it at position 0. Note that the position 0 is important because
232 // we will need later to recover the literal that was propagated from
233 // the clause using this convention.
234 literals[0] = other_watched_literal;
235 literals[1] = false_literal;
236
237 int propagation_level = trail->CurrentDecisionLevel();
239 const int size = it->clause->size();
240 propagation_level = trail->AssignmentLevel(false_literal);
241 for (int i = 2; i < size; ++i) {
242 propagation_level = std::max<int>(
243 propagation_level, trail->AssignmentLevel(literals[i]));
244 }
245 }
246
247 if (propagation_level == 0) {
248 if (lrat_proof_handler_ != nullptr) {
249 std::vector<ClauseId>& unit_ids = clause_ids_scratchpad_;
250 unit_ids.clear();
251 const int size = it->clause->size();
252 for (int i = 1; i < size; ++i) {
253 unit_ids.push_back(
254 trail_->GetUnitClauseId(literals[i].Variable()));
255 }
256 unit_ids.push_back(GetClauseId(it->clause));
257 const ClauseId new_clause_id = clause_id_generator_->GetNextId();
258 lrat_proof_handler_->AddInferredClause(
259 new_clause_id, {other_watched_literal}, unit_ids);
260 helper.EnqueueWithUnitReason(other_watched_literal, new_clause_id);
261 } else {
262 trail_->EnqueueWithUnitReason(other_watched_literal);
263 }
264 } else {
265 reasons_[trail->Index()] = it->clause;
266 helper.EnqueueAtLevel(other_watched_literal, propagation_level);
267 }
268 *new_it++ = *it;
269 }
270 }
271 num_inspected_clause_literals_ += watchers.size(); // The blocking ones.
272 watchers.erase(new_it, end);
273 }
274 return true;
275}
276
277absl::Span<const Literal> ClauseManager::Reason(const Trail& /*trail*/,
278 int trail_index,
279 int64_t /*conflict_id*/) const {
280 return reasons_[trail_index]->PropagationReason();
281}
282
283void ClauseManager::Reimply(Trail* trail, int old_trail_index) {
284 const Literal literal = (*trail)[old_trail_index];
285 const int level = trail->AssignmentLevel(literal);
286 CHECK_LE(trail->Index(), old_trail_index);
287 reasons_[trail->Index()] = reasons_[old_trail_index];
288 DCHECK(absl::c_all_of(
289 reasons_[trail->Index()]->PropagationReason(),
290 [&](Literal l) { return trail->AssignmentLevel(l) <= level; }));
291 DCHECK_EQ(reasons_[trail->Index()]->FirstLiteral(), literal);
292 trail->EnqueueAtLevel(literal, propagator_id_, level);
293}
294
295SatClause* ClauseManager::ReasonClause(int trail_index) const {
296 return reasons_[trail_index];
297}
298
299SatClause* ClauseManager::ReasonClauseOrNull(BooleanVariable var) const {
300 if (!trail_->Assignment().VariableIsAssigned(var)) return nullptr;
301 if (trail_->AssignmentType(var) != propagator_id_) return nullptr;
302 SatClause* result = reasons_[trail_->Info(var).trail_index];
303 DCHECK(result != nullptr) << trail_->Info(var).DebugString();
304
305 // Tricky: In some corner case, that clause was subsumed, so we don't want
306 // to check it nor use it.
307 if (result->size() == 0) return nullptr;
308 DCHECK_EQ(trail_->Reason(var), result->PropagationReason());
309 return result;
310}
311
313 DCHECK(clause != nullptr);
314 if (clause->empty()) return false;
315 return clause == ReasonClauseOrNull(clause->PropagatedLiteral().Variable());
316}
317
318bool ClauseManager::AddClause(absl::Span<const Literal> literals) {
319 return AddClause(kNoClauseId, literals, trail_, -1);
320}
321
322bool ClauseManager::AddClause(ClauseId id, absl::Span<const Literal> literals,
323 Trail* trail, int lbd) {
324 SatClause* clause = SatClause::Create(literals);
325 clauses_.push_back(clause);
326 if (id != kNoClauseId) {
327 clause_id_[clause] = id;
328 }
329 if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, id, literals);
330 return AttachAndPropagate(clause, trail);
331}
332
334 absl::Span<const Literal> literals,
335 Trail* trail, int lbd) {
336 SatClause* clause = SatClause::Create(literals);
337 clauses_.push_back(clause);
338 if (id != kNoClauseId) {
339 clause_id_[clause] = id;
340 }
341 if (add_clause_callback_ != nullptr) add_clause_callback_(lbd, id, literals);
342 CHECK(AttachAndPropagate(clause, trail));
343
344 // Create an entry in clauses_info_ to mark that clause as removable.
345 clauses_info_[clause].lbd = lbd;
346 return clause;
347}
348
349// Sets up the 2-watchers data structure. It selects two non-false literals
350// and attaches the clause to the event: one of the watched literals become
351// false. It returns false if the clause only contains literals assigned to
352// false. If only one literals is not false, it propagates it to true if it
353// is not already assigned.
354bool ClauseManager::AttachAndPropagate(SatClause* clause, Trail* trail) {
355 SCOPED_TIME_STAT(&stats_);
356
357 const int size = clause->size();
358 Literal* literals = clause->literals();
359
360 // Select the first two literals that are not assigned to false and put them
361 // on position 0 and 1.
362 int num_literal_not_false = 0;
363 for (int i = 0; i < size; ++i) {
364 if (!trail->Assignment().LiteralIsFalse(literals[i])) {
365 std::swap(literals[i], literals[num_literal_not_false]);
366 ++num_literal_not_false;
367 if (num_literal_not_false == 2) {
368 break;
369 }
370 }
371 }
372
373 // Returns false if all the literals were false.
374 // This should only happen on an UNSAT problem, and there is no need to attach
375 // the clause in this case.
376 if (num_literal_not_false == 0) return false;
377
378 if (num_literal_not_false == 1) {
379 // To maintain the validity of the 2-watcher algorithm, we need to watch
380 // the false literal with the highest decision level.
381 int max_level = trail->AssignmentLevel(literals[1]);
382 for (int i = 2; i < size; ++i) {
383 const int level = trail->AssignmentLevel(literals[i]);
384 if (level > max_level) {
385 max_level = level;
386 std::swap(literals[1], literals[i]);
387 }
388 }
389
390 // Propagates literals[0] if it is unassigned.
391 if (!trail->Assignment().LiteralIsTrue(literals[0])) {
392 DCHECK(absl::c_all_of(clause->PropagationReason(), [&](Literal l) {
393 return trail->AssignmentLevel(l) <= max_level &&
394 trail->Assignment().LiteralIsFalse(l);
395 }));
396 reasons_[trail->Index()] = clause;
397 trail->EnqueueAtLevel(literals[0], propagator_id_, max_level);
398 }
399 }
400
401 ++num_watched_clauses_;
402 AttachOnFalse(literals[0], literals[1], clause);
403 AttachOnFalse(literals[1], literals[0], clause);
404 return true;
405}
406
407void ClauseManager::Attach(SatClause* clause, Trail* trail) {
408 Literal* literals = clause->literals();
409 DCHECK(!trail->Assignment().LiteralIsAssigned(literals[0]));
410 DCHECK(!trail->Assignment().LiteralIsAssigned(literals[1]));
411
412 ++num_watched_clauses_;
413 AttachOnFalse(literals[0], literals[1], clause);
414 AttachOnFalse(literals[1], literals[0], clause);
415}
416
417void ClauseManager::InternalDetach(SatClause* clause,
418 DeletionSourceForStat source) {
419 // Double-deletion.
420 // TODO(user): change that to a check?
421 if (clause->size() == 0) return;
422
423 --num_watched_clauses_;
424 if (lrat_proof_handler_ != nullptr) {
425 const auto it = clause_id_.find(clause);
426 if (it != clause_id_.end()) {
427 lrat_proof_handler_->DeleteClause(it->second, clause->AsSpan());
428 clause_id_.erase(it);
429 }
430 }
431 deletion_counters_[static_cast<int>(source)]++;
432 clauses_info_.erase(clause);
433 clause->Clear();
434}
435
437 DeletionSourceForStat source) {
438 InternalDetach(clause, source);
439 if (all_clauses_are_attached_) {
440 is_clean_ = false;
441 needs_cleaning_.Set(clause->FirstLiteral());
442 needs_cleaning_.Set(clause->SecondLiteral());
443 }
444}
445
447 if (!all_clauses_are_attached_) return;
448 all_clauses_are_attached_ = false;
449
450 // This is easy, and this allows to reset memory if some watcher lists where
451 // really long at some point.
452 is_clean_ = true;
453 num_watched_clauses_ = 0;
454 watchers_on_false_.clear();
455}
456
458 if (all_clauses_are_attached_) return;
459 all_clauses_are_attached_ = true;
460
461 needs_cleaning_.ResetAllToFalse(); // This doesn't resize it.
462 watchers_on_false_.resize(needs_cleaning_.size().value());
463
465 for (SatClause* clause : clauses_) {
466 ++num_watched_clauses_;
467 DCHECK_GE(clause->size(), 2);
468 AttachOnFalse(clause->FirstLiteral(), clause->SecondLiteral(), clause);
469 AttachOnFalse(clause->SecondLiteral(), clause->FirstLiteral(), clause);
470 }
471}
472
473bool ClauseManager::InprocessingAddUnitClause(ClauseId unit_clause_id,
474 Literal true_literal) {
475 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
476 if (trail_->Assignment().LiteralIsTrue(true_literal)) return true;
477 if (trail_->Assignment().LiteralIsFalse(true_literal)) {
478 if (lrat_proof_handler_ != nullptr) {
479 lrat_proof_handler_->AddInferredClause(
480 clause_id_generator_->GetNextId(), {},
481 {unit_clause_id, trail_->GetUnitClauseId(true_literal.Variable())});
482 }
483 return false;
484 }
485
486 trail_->EnqueueWithUnitReason(unit_clause_id, true_literal);
487
488 // Even when all clauses are detached, we can propagate the implication
489 // graph and we do that right away.
490 return implication_graph_->Propagate(trail_);
491}
492
494 Literal true_literal, absl::Span<const ClauseId> clause_ids) {
495 return implication_graph_->FixLiteral(true_literal, clause_ids);
496}
497
498void ClauseManager::ChangeLbdIfBetter(SatClause* clause, int new_lbd) {
499 auto it = clauses_info_.find(clause);
500 if (it == clauses_info_.end()) return;
501
502 // Always take the min.
503 if (new_lbd > it->second.lbd) return;
504
505 ++num_lbd_promotions_;
506 if (new_lbd <= parameters_.clause_cleanup_lbd_bound()) {
507 // We keep the clause forever.
508 clauses_info_.erase(it);
509 } else {
510 it->second.lbd = new_lbd;
511 }
512}
513
515 if (clause->RemoveFixedLiteralsAndTestIfTrue(assignment_)) {
516 // The clause is always true, detach it.
518 return true;
519 }
520
521 // We should have dealt with unit and unsat clause before this.
522 CHECK_GE(clause->size(), 2);
523 ChangeLbdIfBetter(clause, clause->size());
524 return false;
525}
526
528 SatClause* clause, absl::Span<const Literal> new_clause,
529 absl::Span<const ClauseId> clause_ids) {
530 ClauseId new_clause_id = kNoClauseId;
531 if (lrat_proof_handler_ != nullptr) {
532 new_clause_id = clause_id_generator_->GetNextId();
533 lrat_proof_handler_->AddInferredClause(new_clause_id, new_clause,
534 clause_ids);
535 }
536 const bool is_reason = ClauseIsUsedAsReason(clause);
537
538 CHECK(!is_reason || new_clause[0] == clause->PropagatedLiteral())
539 << new_clause << " old " << clause->AsSpan();
540
541 if (new_clause.empty()) return false; // UNSAT.
542
543 if (new_clause.size() == 1) {
544 if (!InprocessingAddUnitClause(new_clause_id, new_clause[0])) return false;
546 return true;
547 }
548
549 DCHECK(WatchersAreValid(*trail_, new_clause));
550 DCHECK(!ClauseIsSatisfiedAtRoot(*trail_, new_clause));
551 DCHECK(!LiteralsAreFixedAtRoot(*trail_, new_clause));
552
553 if (new_clause.size() == 2) {
554 if (is_reason) {
555 CHECK(implication_graph_->AddBinaryClauseAndChangeReason(
556 new_clause_id, new_clause[0], new_clause[1]));
557 } else {
558 CHECK(implication_graph_->AddBinaryClause(new_clause_id, new_clause[0],
559 new_clause[1]));
560 }
562 return true;
563 }
564
565 if (lrat_proof_handler_ != nullptr) {
566 const auto it = clause_id_.find(clause);
567 if (it != clause_id_.end()) {
568 lrat_proof_handler_->DeleteClause(it->second, clause->AsSpan());
569 }
570 SetClauseId(clause, new_clause_id);
571 }
572
573 if (all_clauses_are_attached_) {
574 // We must eagerly detach the clause
575 // TODO(user): If we were to create a totally new clause instead of
576 // reusing the memory we could use LazyDelete. Investigate.
577 clause->Clear();
578 for (const Literal l : {clause->FirstLiteral(), clause->SecondLiteral()}) {
579 needs_cleaning_.Clear(l);
580 OpenSourceEraseIf(watchers_on_false_[l], [](const Watcher& watcher) {
581 return watcher.clause->IsRemoved();
582 });
583 }
584 }
585
586 clause->Rewrite(new_clause);
587 ChangeLbdIfBetter(clause, new_clause.size());
588
589 // And we reattach it.
590 if (all_clauses_are_attached_) {
591 AttachOnFalse(new_clause[0], new_clause[1], clause);
592 AttachOnFalse(new_clause[1], new_clause[0], clause);
593 }
594
595 // We need to change the reason now that the clause size changed because
596 // we cache the span into the reason data directly.
597 if (is_reason) {
598 trail_->ChangeReason(trail_->Info(new_clause[0].Variable()).trail_index,
600 }
601
602 return true;
603}
604
606 absl::Span<const Literal> new_clause) {
607 DCHECK(!new_clause.empty());
608 DCHECK(!all_clauses_are_attached_);
609 if (DEBUG_MODE) {
610 for (const Literal l : new_clause) {
611 DCHECK(!trail_->Assignment().LiteralIsAssigned(l));
612 }
613 }
614
615 if (new_clause.size() == 1) {
616 // TODO(user): We should return false...
617 if (!InprocessingFixLiteral(new_clause[0])) return nullptr;
618 return nullptr;
619 }
620
621 if (new_clause.size() == 2) {
622 implication_graph_->AddBinaryClause(new_clause[0], new_clause[1]);
623 return nullptr;
624 }
625
626 SatClause* clause = SatClause::Create(new_clause);
627 clauses_.push_back(clause);
628 return clause;
629}
630
632 SCOPED_TIME_STAT(&stats_);
633 for (const LiteralIndex index : needs_cleaning_.PositionsSetAtLeastOnce()) {
634 if (!needs_cleaning_[index]) continue;
635 OpenSourceEraseIf(watchers_on_false_[index], [](const Watcher& watcher) {
636 return watcher.clause->IsRemoved();
637 });
638 needs_cleaning_.Clear(index);
639 }
640 needs_cleaning_.NotifyAllClear();
641 is_clean_ = true;
642}
643
644// We also update to_minimize_index_/to_probe_index_ correctly.
645//
646// TODO(user): If more indices are needed, generalize the code to a vector of
647// indices.
649 if (!is_clean_) CleanUpWatchers();
650
651 if (DEBUG_MODE) {
652 // This help debug issues, as it is easier to check for nullptr rather than
653 // detect a pointer that has been deleted.
654 for (int i = 0; i < reasons_.size(); ++i) {
655 if (reasons_[i] != nullptr && reasons_[i]->empty()) {
656 reasons_[i] = nullptr;
657 }
658 }
659 }
660
661 int new_size = 0;
662 const int old_size = clauses_.size();
663 for (int i = 0; i < old_size; ++i) {
664 if (i == to_minimize_index_) to_minimize_index_ = new_size;
665 if (i == to_first_minimize_index_) to_first_minimize_index_ = new_size;
666 if (i == to_probe_index_) to_probe_index_ = new_size;
667 if (clauses_[i]->IsRemoved()) {
668 DCHECK(!clauses_info_.contains(clauses_[i]));
669 delete clauses_[i];
670 } else {
671 clauses_[new_size++] = clauses_[i];
672 }
673 }
674 clauses_.resize(new_size);
675
676 if (to_minimize_index_ > new_size) to_minimize_index_ = new_size;
677 if (to_first_minimize_index_ > new_size) to_first_minimize_index_ = new_size;
678 if (to_probe_index_ > new_size) to_probe_index_ = new_size;
679}
680
682 // We want to return a clause that has never been minimized before, so we can
683 // safely skip anything that has been returned or skipped by the round-robin
684 // iterator.
685 to_first_minimize_index_ =
686 std::max(to_first_minimize_index_, to_minimize_index_);
687 for (; to_first_minimize_index_ < clauses_.size();
688 ++to_first_minimize_index_) {
689 // If the round-robin is in-sync with the new clauses, we may as well
690 // count this minimization as part of the round-robin and advance both
691 // indexes.
692 if (to_minimize_index_ == to_first_minimize_index_) ++to_minimize_index_;
693
694 if (clauses_[to_first_minimize_index_]->IsRemoved()) continue;
695 if (!IsRemovable(clauses_[to_first_minimize_index_])) {
696 return clauses_[to_first_minimize_index_++];
697 }
698 }
699 return nullptr;
700}
701
703 const int old = to_first_minimize_index_;
704 for (; to_minimize_index_ < clauses_.size(); ++to_minimize_index_) {
705 if (clauses_[to_minimize_index_]->IsRemoved()) continue;
706 if (!IsRemovable(clauses_[to_minimize_index_])) {
707 return clauses_[to_minimize_index_++];
708 }
709 }
710
711 // Lets reset and try once more to find one.
712 to_minimize_index_ = 0;
713 ++num_to_minimize_index_resets_;
714 for (; to_minimize_index_ < old; ++to_minimize_index_) {
715 if (clauses_[to_minimize_index_]->IsRemoved()) continue;
716 if (!IsRemovable(clauses_[to_minimize_index_])) {
717 return clauses_[to_minimize_index_++];
718 }
719 }
720
721 return nullptr;
722}
723
725 for (; to_probe_index_ < clauses_.size(); ++to_probe_index_) {
726 if (clauses_[to_probe_index_]->IsRemoved()) continue;
727 return clauses_[to_probe_index_++];
728 }
729 return nullptr;
730}
731
732ClauseId ClauseManager::ReasonClauseId(Literal literal) const {
733 const BooleanVariable var = literal.Variable();
734 DCHECK(trail_->Assignment().VariableIsAssigned(var));
735 const int assignment_type = trail_->AssignmentType(var);
736 const int trail_index = trail_->Info(var).trail_index;
737 if (assignment_type == AssignmentType::kCachedReason) {
738 return trail_->GetStoredReasonClauseId(var);
739 } else if (assignment_type == AssignmentType::kUnitReason) {
740 return trail_->GetUnitClauseId(var);
741 } else if (assignment_type == implication_graph_->PropagatorId()) {
742 absl::Span<const Literal> reason =
743 implication_graph_->Reason(*trail_, trail_index,
744 /*conflict_id=*/-1);
745 CHECK_EQ(reason.size(), 1);
746 return implication_graph_->GetClauseId(literal, reason[0]);
747 } else if (assignment_type == propagator_id_) {
748 const SatClause* reason = ReasonClause(trail_index);
749 if (reason != nullptr) {
750 return GetClauseId(reason);
751 }
752 }
753 return kNoClauseId;
754}
755
757 absl::Span<const Literal> literals, std::vector<ClauseId>* clause_ids,
758 LiteralIndex decision,
759 std::optional<absl::FunctionRef<ClauseId(int, int)>> root_literals) {
760 SCOPED_TIME_STAT(&stats_);
761 const auto& assignment = trail_->Assignment();
762
763 // Mark the literals whose reason must be expanded, and put them in a heap.
764 tmp_mark_.ClearAndResize(BooleanVariable(trail_->NumVariables()));
765 marked_trail_indices_heap_.clear();
766 for (const Literal lit : literals) {
767 CHECK(assignment.LiteralIsAssigned(lit));
768 tmp_mark_.Set(lit.Variable());
769 marked_trail_indices_heap_.push_back(
770 trail_->Info(lit.Variable()).trail_index);
771 }
772 absl::c_make_heap(marked_trail_indices_heap_);
773 const int current_level = trail_->CurrentDecisionLevel();
774
775 // The min level of the expanded literals.
776 int min_level = current_level;
777
778 // Unit clauses must come first. We put them in clause_ids directly. We put
779 // the others in non_unit_clause_ids and append them to clause_ids at the end.
780 std::vector<ClauseId>& non_unit_clause_ids =
781 tmp_clause_ids_for_append_clauses_fixing_;
782 non_unit_clause_ids.clear();
783
784 const auto& decisions = trail_->Decisions();
785 while (!marked_trail_indices_heap_.empty()) {
786 absl::c_pop_heap(marked_trail_indices_heap_);
787 const int trail_index = marked_trail_indices_heap_.back();
788 marked_trail_indices_heap_.pop_back();
789 const Literal marked_literal = (*trail_)[trail_index];
790
791 // Stop at decisions, at literals fixed at root, and at literals implied by
792 // the decision at their level.
793 const int level = trail_->Info(marked_literal.Variable()).level;
794 if (level > 0) min_level = std::min(min_level, level);
795 if (trail_->AssignmentType(marked_literal.Variable()) ==
797 continue;
798 }
799 if (level == 0) {
800 clause_ids->push_back(trail_->GetUnitClauseId(marked_literal.Variable()));
801 continue;
802 }
803 const Literal level_decision = decisions[level - 1].literal;
804 ClauseId clause_id = implication_graph_->GetClauseId(
805 level_decision.Negated(), marked_literal);
806 if (clause_id == kNoClauseId && root_literals.has_value()) {
807 clause_id = (*root_literals)(level, trail_index);
808 }
809 if (clause_id != kNoClauseId) {
810 non_unit_clause_ids.push_back(clause_id);
811 continue;
812 }
813
814 // Mark all the literals of its reason.
815 for (const Literal literal : trail_->Reason(marked_literal.Variable())) {
816 const BooleanVariable var = literal.Variable();
817 if (!tmp_mark_[var]) {
818 const AssignmentInfo& info = trail_->Info(var);
819 tmp_mark_.Set(var);
820 if (info.level > 0) {
821 marked_trail_indices_heap_.push_back(info.trail_index);
822 absl::c_push_heap(marked_trail_indices_heap_);
823 } else {
824 clause_ids->push_back(trail_->GetUnitClauseId(var));
825 }
826 }
827 }
828 non_unit_clause_ids.push_back(ReasonClauseId(marked_literal));
829 }
830
831 if (decision != kNoLiteralIndex) {
832 // Add the implication chain from `decision` to all the decisions found
833 // during the expansion.
834 if (Literal(decision) != decisions[current_level - 1].literal) {
835 // If `decision` is not the last decision, it must directly imply it.
836 clause_ids->push_back(implication_graph_->GetClauseId(
837 Literal(decision).Negated(), decisions[current_level - 1].literal));
838 }
839 for (int level = current_level - 1; level >= min_level; --level) {
840 clause_ids->push_back(implication_graph_->GetClauseId(
841 decisions[level].literal.Negated(), decisions[level - 1].literal));
842 }
843 }
844
845 clause_ids->insert(clause_ids->end(), non_unit_clause_ids.rbegin(),
846 non_unit_clause_ids.rend());
847}
848
849// ----- BinaryImplicationGraph -----
850
851void BinaryImplicationGraph::Resize(int num_variables) {
852 SCOPED_TIME_STAT(&stats_);
853 const int num_literals = 2 * num_variables;
854 bfs_stack_.resize(num_literals);
855 implications_and_amos_.resize(num_literals);
856 implies_something_.resize(num_literals);
857 might_have_dups_.resize(num_literals);
858 is_redundant_.resize(num_literals);
859 is_removed_.resize(num_literals, false);
860 estimated_sizes_.resize(num_literals, 0);
861 implied_by_.resize(num_literals);
862 in_direct_implications_.resize(num_literals, false);
863 reasons_.resize(num_variables);
864}
865
866void BinaryImplicationGraph::NotifyPossibleDuplicate(Literal a) {
867 if (might_have_dups_[a.Index()]) return;
868 might_have_dups_[a.Index()] = true;
869 to_clean_.push_back(a);
870}
871
872bool BinaryImplicationGraph::CleanUpImplicationList(Literal a) {
873 const absl::Span<Literal> range = implications_and_amos_[a].literals();
874 if (range.empty()) return true;
875
876 std::sort(range.begin(), range.end());
877
878 // We detect and delete duplicate literals by hand to also detect l and not(l)
879 // which should appear consecutively because of our LiteralIndex encoding.
880 int new_size = 1;
881 for (int i = 1; i < range.size(); ++i) {
882 if (range[i] == range[i - 1]) continue;
883
884 // We have a => x and not(x) so a must be false. Lets fix it if not already
885 // done.
886 //
887 // Note that we leave both in the list to satisfy some invariant checks that
888 // we always have both a => b and not(b) => not(a). This will be cleaned by
889 // RemoveFixedVariables().
890 if (range[i] == range[i - 1].Negated() &&
891 !trail_->Assignment().LiteralIsFalse(a)) {
892 if (lrat_proof_handler_ != nullptr) {
893 if (!FixLiteral(a.Negated(),
894 {GetClauseId(a.Negated(), range[i]),
895 GetClauseId(a.Negated(), range[i - 1])})) {
896 return false;
897 }
898 } else {
899 if (!FixLiteral(a.Negated())) return false;
900 }
901 }
902 range[new_size++] = range[i];
903 }
904 implications_and_amos_[a].TruncateLiterals(new_size);
905 return true;
906}
907
909 if (!Propagate(trail_)) return false;
910
911 if (to_clean_.empty()) {
913 }
914
915 // This is a bit tricky: if we fix a variable, we can rewrite an at most one
916 // that might become implications and require more cleanup...
917 while (!to_clean_.empty()) {
918 for (const Literal l : to_clean_) {
919 might_have_dups_[l.Index()] = false;
920 if (!CleanUpImplicationList(l)) return false;
921 }
922 to_clean_.clear();
923
924 // This should be fast if nothing is fixed.
926 }
927
928 DCHECK(HasNoDuplicates());
929 return true;
930}
931
932// To be used in DCHECK().
933//
934// The only potential source of duplicates should be AddBinaryClause() which
935// does not check because of speed. This could happen if a clause gets
936// simplified to a binary that actually already exists.
937//
938// TODO(user): Even that could probably be avoided.
940 tmp_bitset_.ClearAndResize(implications_and_amos_.end_index());
941 for (const LiteralIndex l : implications_and_amos_.index_range()) {
942 for (const Literal b : implications_and_amos_[l].literals()) {
943 if (b.Variable() == Literal(l).Variable()) {
944 return false;
945 }
946 if (tmp_bitset_[Literal(b.Variable(), true)]) {
947 return false;
948 }
949 tmp_bitset_.Set(Literal(b.Variable(), true));
950 }
951 tmp_bitset_.ResetAllToFalse();
952 }
953 return true;
954}
955
956// TODO(user): Not all of the solver knows about representative literal, do
957// use them here to maintains invariant? Explore this when we start cleaning our
958// clauses using equivalence during search. We can easily do it for every
959// conflict we learn instead of here.
960bool BinaryImplicationGraph::AddBinaryClauseInternal(
961 ClauseId id, Literal a, Literal b, bool change_reason,
962 bool delete_non_representative_id) {
963 SCOPED_TIME_STAT(&stats_);
964 DCHECK_GE(a.Index(), 0);
965 DCHECK_GE(b.Index(), 0);
966
967 // Tricky: If this is the first clause, the propagator will be added and
968 // assumed to be in a "propagated" state. This makes sure this is the case.
969 if (no_constraint_ever_added_) propagation_trail_index_ = trail_->Index();
970 no_constraint_ever_added_ = false;
971
972 ClauseId rep_id = kNoClauseId;
973 const Literal rep_a = RepresentativeOf(a);
974 const Literal rep_b = RepresentativeOf(b);
975 if (rep_a == rep_b.Negated()) return true;
976
977 if (lrat_proof_handler_ != nullptr) {
978 // TODO(user): we could use a single LRAT clause instead of two if this
979 // method was responsible for adding it to the LRAT proof handler (currently
980 // is this done before calling this method).
981 rep_id = id;
982 if (rep_a != a || rep_b != b) {
983 absl::InlinedVector<ClauseId, 3> clause_ids;
984 if (rep_a != a) {
985 clause_ids.push_back(GetClauseId(rep_a, a.Negated()));
986 }
987 if (rep_b != b) {
988 clause_ids.push_back(GetClauseId(rep_b, b.Negated()));
989 }
990 clause_ids.push_back(id);
991 rep_id = clause_id_generator_->GetNextId();
992 lrat_proof_handler_->AddInferredClause(rep_id, {rep_a, rep_b},
993 clause_ids);
994 if (change_reason) {
995 // Remember the non-canonical clause so we can delete it on restart.
996 changed_reasons_on_trail_.emplace_back(std::minmax(a, b));
997 AddClauseId(id, a, b);
998 } else if (delete_non_representative_id) {
999 lrat_proof_handler_->DeleteClause(id, {a, b});
1000 }
1001 }
1002 AddClauseId(rep_id, rep_a, rep_b);
1003 }
1004 if (change_reason) {
1005 CHECK(trail_->Assignment().LiteralIsFalse(b));
1006 CHECK(trail_->Assignment().LiteralIsTrue(a));
1007 const int trail_index = trail_->Info(a.Variable()).trail_index;
1008 reasons_[trail_index] = b;
1009 trail_->ChangeReason(trail_index, propagator_id_);
1010 }
1011
1012 // Deal with literal fixing and do not even add a binary clause in that case.
1013 if (rep_a == rep_b) {
1014 return FixLiteral(rep_a, {rep_id});
1015 } else if (trail_->CurrentDecisionLevel() == 0) {
1016 const auto& assignment = trail_->Assignment();
1017
1018 // TODO(user): just make GetUnitClauseId() work all the time? for that
1019 // we need to make sure all level zero are pushed with kUnitReason.
1020 if (lrat_proof_handler_ != nullptr) {
1021 if (assignment.LiteralIsFalse(rep_a)) {
1022 return FixLiteral(rep_b,
1023 {rep_id, trail_->GetUnitClauseId(rep_a.Variable())});
1024 } else if (assignment.LiteralIsFalse(rep_b)) {
1025 return FixLiteral(rep_a,
1026 {rep_id, trail_->GetUnitClauseId(rep_b.Variable())});
1027 }
1028 } else {
1029 if (assignment.LiteralIsFalse(rep_a)) {
1030 return FixLiteral(rep_b, {});
1031 } else if (assignment.LiteralIsFalse(rep_b)) {
1032 return FixLiteral(rep_a, {});
1033 }
1034 }
1035 }
1036
1037 a = rep_a;
1038 b = rep_b;
1039 DCHECK(!is_removed_[a]);
1040 DCHECK(!is_removed_[b]);
1041 estimated_sizes_[a.NegatedIndex()]++;
1042 estimated_sizes_[b.NegatedIndex()]++;
1043 implications_and_amos_[a.NegatedIndex()].PushBackLiteral(b);
1044 implications_and_amos_[b.NegatedIndex()].PushBackLiteral(a);
1045 implies_something_.Set(a.NegatedIndex());
1046 implies_something_.Set(b.NegatedIndex());
1047 NotifyPossibleDuplicate(a.Negated());
1048 NotifyPossibleDuplicate(b.Negated());
1049 is_dag_ = false;
1050
1051 if (enable_sharing_ && add_binary_callback_ != nullptr) {
1052 add_binary_callback_(a, b);
1053 }
1054
1055 const auto& assignment = trail_->Assignment();
1056 if (assignment.LiteralIsFalse(a)) {
1057 if (assignment.LiteralIsAssigned(b)) {
1058 if (assignment.LiteralIsFalse(b)) return false;
1059 } else {
1060 reasons_[trail_->Index()] = a;
1061 trail_->EnqueueAtLevel(b, propagator_id_, trail_->AssignmentLevel(a));
1062 }
1063 } else if (assignment.LiteralIsFalse(b)) {
1064 if (assignment.LiteralIsAssigned(a)) {
1065 if (assignment.LiteralIsFalse(a)) return false;
1066 } else {
1067 reasons_[trail_->Index()] = b;
1068 trail_->EnqueueAtLevel(a, propagator_id_, trail_->AssignmentLevel(b));
1069 }
1070 }
1071
1072 return true;
1073}
1074
1076 absl::Span<const Literal> at_most_one) {
1077 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
1078 if (at_most_one.size() <= 1) return true;
1079
1080 // Same as for AddBinaryClause().
1081 if (no_constraint_ever_added_) propagation_trail_index_ = trail_->Index();
1082 no_constraint_ever_added_ = false;
1083
1084 // Temporarily copy the at_most_one constraint at the end of
1085 // at_most_one_buffer_. It will be cleaned up and added by
1086 // CleanUpAndAddAtMostOnes().
1087 const int base_index = at_most_one_buffer_.size();
1088 at_most_one_buffer_.push_back(Literal(LiteralIndex(at_most_one.size())));
1089 at_most_one_buffer_.insert(at_most_one_buffer_.end(), at_most_one.begin(),
1090 at_most_one.end());
1091
1092 is_dag_ = false;
1093 return CleanUpAndAddAtMostOnes(base_index);
1094}
1095
1096// TODO(user): remove dupl with ClauseManager::InprocessingFixLiteral().
1097//
1098// Note that we currently do not support calling this at a positive level since
1099// we might loose the fixing on backtrack otherwise.
1101 absl::Span<const ClauseId> clause_ids) {
1102 CHECK_EQ(trail_->CurrentDecisionLevel(), 0);
1103 if (trail_->Assignment().LiteralIsTrue(true_literal)) return true;
1104 if (trail_->Assignment().LiteralIsFalse(true_literal)) {
1105 if (lrat_proof_handler_ != nullptr) {
1106 std::vector<ClauseId> proof = {clause_ids.begin(), clause_ids.end()};
1107 proof.push_back(trail_->GetUnitClauseId(true_literal.Variable()));
1108 lrat_proof_handler_->AddInferredClause(clause_id_generator_->GetNextId(),
1109 {}, proof);
1110 }
1111 return false;
1112 }
1113
1114 ClauseId new_clause_id = kNoClauseId;
1115 if (lrat_proof_handler_ != nullptr) {
1116 new_clause_id = clause_id_generator_->GetNextId();
1117 lrat_proof_handler_->AddInferredClause(new_clause_id, {true_literal},
1118 clause_ids);
1119 }
1120
1121 trail_->EnqueueWithUnitReason(new_clause_id, true_literal);
1122 return Propagate(trail_);
1123}
1124
1125// This works by doing a linear scan on the at_most_one_buffer_ and
1126// cleaning/copying the at most ones on the fly to the beginning of the same
1127// buffer.
1128bool BinaryImplicationGraph::CleanUpAndAddAtMostOnes(int base_index) {
1129 const VariablesAssignment& assignment = trail_->Assignment();
1130 int local_end = base_index;
1131 const int buffer_size = at_most_one_buffer_.size();
1132 for (int i = base_index; i < buffer_size;) {
1133 // Process a new at most one.
1134 // It will be copied into buffer[local_start + 1, local_end].
1135 // With its size at buffer[local_start].
1136 const int local_start = local_end;
1137
1138 // Update the iterator now. Even if the current at_most_one is reduced away,
1139 // local_start will still point to the next one, or to the end of the
1140 // buffer.
1141 if (i == at_most_one_iterator_) {
1142 at_most_one_iterator_ = local_start;
1143 }
1144
1145 // We have an at_most_one starting at i, and we increment i to the next one.
1146 const absl::Span<const Literal> initial_amo = AtMostOne(i);
1147 i += initial_amo.size() + 1;
1148
1149 // Reserve space for size.
1150 local_end++;
1151 bool set_all_left_to_false = false;
1152 for (const Literal l : initial_amo) {
1153 if (assignment.LiteralIsFalse(l)) continue;
1154 if (is_removed_[l]) continue;
1155 if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
1156 set_all_left_to_false = true;
1157 continue;
1158 }
1159 at_most_one_buffer_[local_end++] = RepresentativeOf(l);
1160 }
1161 at_most_one_buffer_[local_start] =
1162 Literal(LiteralIndex(local_end - local_start - 1));
1163
1164 // Deal with duplicates.
1165 // Any duplicate in an "at most one" must be false.
1166 bool some_duplicates = false;
1167 BooleanVariable trivializer = kNoBooleanVariable;
1168 if (!set_all_left_to_false) {
1169 // Sort the copied amo.
1170 // We only want to expose a const AtMostOne() so we sort directly here.
1171 Literal* pt = &at_most_one_buffer_[local_start + 1];
1172 std::sort(pt, pt + AtMostOne(local_start).size());
1173
1174 LiteralIndex previous = kNoLiteralIndex;
1175 bool remove_previous = false;
1176 int new_local_end = local_start + 1;
1177 for (const Literal l : AtMostOne(local_start)) {
1178 if (l.Index() == previous) {
1179 if (assignment.LiteralIsTrue(l)) return false;
1180 if (!assignment.LiteralIsFalse(l)) {
1181 if (!FixLiteral(l.Negated())) return false;
1182 }
1183 remove_previous = true;
1184 some_duplicates = true;
1185 continue;
1186 }
1187
1188 if (trivializer == kNoBooleanVariable && l.NegatedIndex() == previous) {
1189 // We have (x, not(x), ...) in an at most one.
1190 // We will deal with that below because we want to deal with the
1191 // corner case of having many x and/or not(x).
1192 trivializer = l.Variable();
1193 }
1194
1195 // We need to pay attention to triplet or more of equal elements, so
1196 // it is why we need this boolean and can't just remove it right away.
1197 if (remove_previous) {
1198 --new_local_end;
1199 remove_previous = false;
1200 }
1201 previous = l.Index();
1202 at_most_one_buffer_[new_local_end++] = l;
1203 }
1204 if (remove_previous) --new_local_end;
1205
1206 // Update local end and the amo size.
1207 local_end = new_local_end;
1208 at_most_one_buffer_[local_start] =
1209 Literal(LiteralIndex(local_end - local_start - 1));
1210 }
1211
1212 // If there was some duplicates, we need to rescan to see if a literal
1213 // didn't become true because its negation was appearing twice!
1214 if (some_duplicates) {
1215 int new_local_end = local_start + 1;
1216 for (const Literal l : AtMostOne(local_start)) {
1217 if (assignment.LiteralIsFalse(l)) continue;
1218 if (!set_all_left_to_false && assignment.LiteralIsTrue(l)) {
1219 set_all_left_to_false = true;
1220 continue;
1221 }
1222 at_most_one_buffer_[new_local_end++] = l;
1223 }
1224
1225 // Update local end and the amo size.
1226 local_end = new_local_end;
1227 at_most_one_buffer_[local_start] =
1228 Literal(LiteralIndex(local_end - local_start - 1));
1229 }
1230
1231 // Deal with all false.
1232 if (set_all_left_to_false || trivializer != kNoBooleanVariable) {
1233 for (const Literal l : AtMostOne(local_start)) {
1234 if (l.Variable() == trivializer) continue;
1235 if (assignment.LiteralIsFalse(l)) continue;
1236 if (assignment.LiteralIsTrue(l)) return false;
1237 if (!FixLiteral(l.Negated())) return false;
1238 }
1239
1240 // Erase this at_most_one.
1241 local_end = local_start;
1242 continue;
1243 }
1244
1245 // Create a Span<> to simplify the code below.
1246 const absl::Span<const Literal> at_most_one = AtMostOne(local_start);
1247
1248 // We expand small sizes into implications.
1249 // TODO(user): Investigate what the best threshold is.
1250 if (at_most_one.size() <= std::max(2, at_most_one_max_expansion_size_)) {
1251 if (at_most_one.size() > 1) {
1252 for (const Literal a : at_most_one) {
1253 implies_something_.Set(a);
1254 NotifyPossibleDuplicate(a);
1255 for (const Literal b : at_most_one) {
1256 if (a == b) continue;
1257 implications_and_amos_[a].PushBackLiteral(b.Negated());
1258 }
1259 }
1260 }
1261
1262 // This will erase the at_most_one from the buffer.
1263 local_end = local_start;
1264 continue;
1265 }
1266
1267 // Index the new at most one.
1268 for (const Literal l : at_most_one) {
1269 DCHECK_LT(l.Index(), implications_and_amos_.size());
1270 DCHECK(!is_redundant_[l]);
1271 implications_and_amos_[l].InsertOffset(local_start);
1272 implies_something_.Set(l);
1273 }
1274 }
1275
1276 at_most_one_buffer_.resize(local_end);
1277 return true;
1278}
1279
1281 SCOPED_TIME_STAT(&stats_);
1282
1283 if (IsEmpty()) {
1285 return true;
1286 }
1288
1289 const auto implies_something = implies_something_.view();
1290 auto* implications = implications_and_amos_.data();
1291
1292 while (propagation_trail_index_ < trail->Index()) {
1293 const Literal true_literal = (*trail)[propagation_trail_index_++];
1294 DCHECK(helper.LiteralIsTrue(true_literal));
1295 if (!implies_something[true_literal]) continue;
1296
1297 const int level = trail->AssignmentLevel(true_literal);
1298
1299 // Note(user): This update is not exactly correct because in case of
1300 // conflict we don't inspect that much clauses. But doing ++num_inspections_
1301 // inside the loop does slow down the code by a few percent.
1302 const auto implied = implications[true_literal.Index().value()].literals();
1303 num_inspections_ += implied.size();
1304 for (const Literal literal : implied) {
1305 if (helper.LiteralIsTrue(literal)) {
1306 // Note(user): I tried to update the reason here if the literal was
1307 // enqueued after the true_literal on the trail. This property is
1308 // important for ComputeFirstUIPConflict() to work since it needs the
1309 // trail order to be a topological order for the deduction graph.
1310 // But the performance was not too good...
1311 continue;
1312 }
1313
1314 ++num_propagations_;
1315 if (helper.LiteralIsFalse(literal)) {
1316 // Conflict.
1317 *(trail->MutableConflict()) = {true_literal.Negated(), literal};
1318 if (lrat_proof_handler_ != nullptr && level == 0 &&
1319 trail->AssignmentLevel(literal) == 0) {
1320 lrat_proof_handler_->AddInferredClause(
1321 clause_id_generator_->GetNextId(), {},
1322 {trail->GetUnitClauseId(true_literal.Variable()),
1323 GetClauseId(true_literal.Negated(), literal),
1324 trail->GetUnitClauseId(literal.Variable())});
1325 }
1326 return false;
1327 } else {
1328 // Propagation.
1329 reasons_[trail->Index()] = true_literal.Negated();
1330 if (level == 0 && lrat_proof_handler_ != nullptr) {
1331 const ClauseId new_clause_id = clause_id_generator_->GetNextId();
1332 lrat_proof_handler_->AddInferredClause(
1333 new_clause_id, {literal},
1334 {trail->GetUnitClauseId(true_literal.Variable()),
1335 GetClauseId(true_literal.Negated(), literal)});
1336 helper.EnqueueWithUnitReason(literal, new_clause_id);
1337 } else {
1338 helper.EnqueueAtLevel(literal, level);
1339 }
1340 }
1341 }
1342
1343 // Propagate the at_most_one constraints.
1344 for (const int start :
1345 implications[true_literal.Index().value()].offsets()) {
1346 bool seen = false;
1347 for (const Literal literal : AtMostOne(start)) {
1348 ++num_inspections_;
1349 if (literal == true_literal) {
1350 if (DEBUG_MODE) {
1351 DCHECK(!seen);
1352 seen = true;
1353 }
1354 continue;
1355 }
1356 if (helper.LiteralIsFalse(literal)) continue;
1357
1358 ++num_propagations_;
1359 if (helper.LiteralIsTrue(literal)) {
1360 // Conflict.
1361 *(trail->MutableConflict()) = {true_literal.Negated(),
1362 literal.Negated()};
1363 // TODO(user): add LRAT unsat proof if level == 0.
1364 return false;
1365 } else {
1366 // Propagation.
1367 reasons_[trail->Index()] = true_literal.Negated();
1368 helper.EnqueueAtLevel(literal.Negated(), level);
1369 }
1370 }
1371 }
1372 }
1373
1374 return true;
1375}
1376
1377absl::Span<const Literal> BinaryImplicationGraph::Reason(
1378 const Trail& /*trail*/, int trail_index, int64_t /*conflict_id*/) const {
1379 return {&reasons_[trail_index], 1};
1380}
1381
1382void BinaryImplicationGraph::Reimply(Trail* trail, int old_trail_index) {
1383 const Literal literal = (*trail)[old_trail_index];
1384 const int level = trail->AssignmentLevel(literal);
1385 reasons_[trail->Index()] = reasons_[old_trail_index];
1386 trail->EnqueueAtLevel(literal, propagator_id_, level);
1387}
1388
1389// Here, we remove all the literals whose negation are implied by the negation
1390// of the 1-UIP literal (which always appear first in the given conflict). Note
1391// that this algorithm is "optimal" in the sense that it leads to a minimized
1392// conflict with a backjump level as low as possible. However, not all possible
1393// literals are removed. We also mark (in the given SparseBitset) the reachable
1394// literals already assigned to false. These literals will be implied if the
1395// 1-UIP literal is assigned to false, and the classic minimization algorithm
1396// can take advantage of that.
1398 const Trail& trail, std::vector<Literal>* conflict,
1399 SparseBitset<BooleanVariable>* marked, std::vector<ClauseId>* clause_ids,
1400 bool also_use_decisions) {
1401 SCOPED_TIME_STAT(&stats_);
1402 DCHECK(!conflict->empty());
1403 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
1404
1405 tmp_to_keep_.clear();
1406 tmp_to_keep_.push_back(conflict->front().Negated());
1407 if (lrat_proof_handler_ != nullptr) {
1408 MarkDescendants</*fill_implied_by=*/true>(conflict->front().Negated());
1409 } else {
1410 MarkDescendants(conflict->front().Negated());
1411 }
1412
1413 // Because the decision cannot be removed from the conflict, we know they will
1414 // stay, so it is okay to see what they propagate in the binary implication
1415 // graph. Technically we could do that for any first literal of a decision
1416 // level. Improve?
1417 std::vector<std::pair<Literal, int>> decisions;
1418 if (also_use_decisions) {
1419 for (int i = 1; i < conflict->size(); ++i) {
1420 const auto& info = trail_->Info((*conflict)[i].Variable());
1421 if (info.type == AssignmentType::kSearchDecision) {
1422 decisions.push_back({(*conflict)[i].Negated(), info.level});
1423 }
1424 }
1425 absl::c_stable_sort(decisions, [](const std::pair<LiteralIndex, int>& a,
1426 const std::pair<LiteralIndex, int>& b) {
1427 return a.second > b.second;
1428 });
1429 }
1430
1431 // Keep marking everything propagated by the decisions, and make sure we
1432 // don't remove the one from which we called MarkDescendants().
1433 for (const auto [literal, unused_level] : decisions) {
1434 if (is_marked_[literal]) continue;
1435 tmp_to_keep_.push_back(literal);
1436 if (lrat_proof_handler_ != nullptr) {
1437 MarkDescendants</*fill_implied_by=*/true>(literal);
1438 } else {
1439 MarkDescendants(literal);
1440 }
1441 }
1442
1443 for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
1444 // TODO(user): if this is false, then we actually have a conflict of size 2.
1445 // This can only happen if the binary clause was not propagated properly
1446 // if for instance we do chronological backtracking without re-enqueuing the
1447 // consequence of a binary clause.
1448 if (trail.Assignment().LiteralIsTrue(Literal(i))) {
1449 marked->Set(Literal(i).Variable());
1450 }
1451 }
1452
1453 // Remove all marked literals from the conflict.
1454 for (const Literal l : tmp_to_keep_) is_marked_.Clear(l);
1455 if (lrat_proof_handler_ != nullptr) {
1456 RemoveRedundantLiterals</*fill_clause_ids=*/true>(conflict, clause_ids);
1457 } else {
1458 RemoveRedundantLiterals(conflict);
1459 }
1460}
1461
1462template <bool fill_clause_ids>
1463void BinaryImplicationGraph::RemoveRedundantLiterals(
1464 std::vector<Literal>* conflict, std::vector<ClauseId>* clause_ids) {
1465 SCOPED_TIME_STAT(&stats_);
1466 int new_index = 1;
1467 if constexpr (fill_clause_ids) {
1468 clause_ids->clear();
1469 }
1470 for (int i = 1; i < conflict->size(); ++i) {
1471 const Literal literal = (*conflict)[i];
1472 LiteralIndex negated_index = literal.NegatedIndex();
1473 if (!is_marked_[negated_index]) {
1474 (*conflict)[new_index] = (*conflict)[i];
1475 ++new_index;
1476 } else if constexpr (fill_clause_ids) {
1477 AppendImplicationChain(literal, clause_ids);
1478 }
1479 }
1480 if (new_index < conflict->size()) {
1481 ++num_minimization_;
1482 num_literals_removed_ += conflict->size() - new_index;
1483 conflict->resize(new_index);
1484 }
1485}
1486
1488 absl::Span<const Literal> literals, std::vector<ClauseId>* clause_ids) {
1489 for (const Literal literal : literals) {
1490 if (trail_->Info(literal.Variable()).level == 0) {
1491 const ClauseId clause_id = trail_->GetUnitClauseId(literal.Variable());
1492 DCHECK_NE(clause_id, kNoClauseId);
1493 if (!processed_unit_clauses_[literal]) {
1494 processed_unit_clauses_.Set(literal);
1495 clause_ids->push_back(clause_id);
1496 }
1497 continue;
1498 } else if (is_marked_[literal.NegatedIndex()]) {
1499 AppendImplicationChain(literal, clause_ids);
1500 }
1501 }
1502}
1503
1504void BinaryImplicationGraph::AppendImplicationChain(
1505 Literal literal, std::vector<ClauseId>* clause_ids) {
1506 LiteralIndex negated_index = literal.NegatedIndex();
1507 const int initial_size = clause_ids->size();
1508 while (implied_by_[negated_index] != Literal(negated_index)) {
1509 const ClauseId clause_id = GetClauseId(
1510 Literal(negated_index), implied_by_[negated_index].Negated());
1511 DCHECK_NE(clause_id, kNoClauseId);
1512 clause_ids->push_back(clause_id);
1513 const LiteralIndex next_negated_index = implied_by_[negated_index];
1514 // Make sure we don't process the same literal twice.
1515 implied_by_[negated_index] = Literal(negated_index);
1516 negated_index = next_negated_index;
1517 }
1518 std::reverse(clause_ids->begin() + initial_size, clause_ids->end());
1519}
1520
1522 SCOPED_TIME_STAT(&stats_);
1523 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
1524 if (IsEmpty()) return;
1525
1526 // Nothing to do if nothing changed since last call.
1527 const int new_num_fixed = trail_->Index();
1528 DCHECK_EQ(propagation_trail_index_, new_num_fixed);
1529 if (num_processed_fixed_variables_ == new_num_fixed) return;
1530
1531 const VariablesAssignment& assignment = trail_->Assignment();
1532 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
1533 for (; num_processed_fixed_variables_ < new_num_fixed;
1534 ++num_processed_fixed_variables_) {
1535 const Literal true_literal = (*trail_)[num_processed_fixed_variables_];
1536 if (DEBUG_MODE) {
1537 // The code assumes that everything is already propagated.
1538 // Otherwise we will remove implications that didn't propagate yet!
1539 for (const Literal lit :
1540 implications_and_amos_[true_literal].literals()) {
1541 DCHECK(trail_->Assignment().LiteralIsTrue(lit));
1542 }
1543 }
1544
1545 // If b is true and a -> b then because not b -> not a, all the
1546 // implications list that contains b will be marked by this process.
1547 // And the ones that contains not(b) should correspond to a false literal!
1548 //
1549 // TODO(user): This might not be true if we remove implication by
1550 // transitive reduction and the process was aborted due to the computation
1551 // limit. I think it will be good to maintain that invariant though,
1552 // otherwise fixed literals might never be removed from these lists...
1553 for (const Literal lit :
1554 implications_and_amos_[true_literal.NegatedIndex()].literals()) {
1555 if (lit.NegatedIndex() < representative_of_.size() &&
1556 representative_of_[lit.Negated()] != kNoLiteralIndex) {
1557 // We mark its representative instead.
1558 is_marked_.Set(representative_of_[lit.Negated()]);
1559 } else {
1560 is_marked_.Set(lit.NegatedIndex());
1561 }
1562 }
1563 implications_and_amos_[true_literal].ClearAndReleaseMemory();
1564 implications_and_amos_[true_literal.NegatedIndex()].ClearAndReleaseMemory();
1565 }
1566 for (const LiteralIndex i : is_marked_.PositionsSetAtLeastOnce()) {
1567 implications_and_amos_[i].RemoveLiteralsIf(
1568 [&assignment](const Literal lit) {
1569 return assignment.LiteralIsTrue(lit);
1570 });
1571 }
1572
1573 // TODO(user): This might be a bit slow. Do not call all the time if needed,
1574 // this shouldn't change the correctness of the code.
1575 for (auto& v : implications_and_amos_) {
1576 v.ClearOffsets();
1577 }
1578 CHECK(CleanUpAndAddAtMostOnes(/*base_index=*/0));
1579 DCHECK(InvariantsAreOk());
1580}
1581
1583 public:
1587
1588 explicit SccGraph(
1589 SccFinder* finder,
1591 implications_and_offsets,
1592 std::vector<Literal>* at_most_one_buffer,
1594 std::vector<Literal>* to_fix, std::vector<Literal>* parent_of_to_fix)
1595 : finder_(*finder),
1596 implications_and_offsets_(*implications_and_offsets),
1597 at_most_one_buffer_(*at_most_one_buffer),
1598 parents_(parents),
1599 to_fix_(*to_fix),
1600 parent_of_to_fix_(parent_of_to_fix) {
1601 if (parents_ != nullptr) {
1602 parents_->resize(implications_and_offsets->size());
1603 for (int i = 0; i < implications_and_offsets->size(); ++i) {
1604 parents_->push_back(LiteralIndex(i));
1605 }
1606 }
1607 }
1608
1609 const std::vector<int32_t>& operator[](int32_t node) const {
1610 tmp_.clear();
1611 for (const Literal l :
1612 implications_and_offsets_[LiteralIndex(node)].literals()) {
1613 if (parents_ != nullptr &&
1614 finder_.NodeIsNotYetExplored(l.Index().value())) {
1615 (*parents_)[l.Index()] = LiteralIndex(node);
1616 }
1617 tmp_.push_back(l.Index().value());
1618 if (finder_.NodeIsInCurrentDfsPath(l.NegatedIndex().value())) {
1619 to_fix_.push_back(l);
1620 // Note that if 'l' is already explored its parent chain might not
1621 // contain a node in the same SCC as not(l). But the parent chain of
1622 // 'node' does. Storing it in parent_of_to_fix_ enables recovering a
1623 // full implication chain from not(l) to l, for the LRAT proof that l
1624 // can be fixed.
1625 if (parent_of_to_fix_ != nullptr) {
1626 parent_of_to_fix_->push_back(Literal(LiteralIndex(node)));
1627 }
1628 }
1629 }
1630 for (const int start :
1631 implications_and_offsets_[LiteralIndex(node)].offsets()) {
1632 if (start >= at_most_one_already_explored_.size()) {
1633 at_most_one_already_explored_.resize(start + 1, false);
1634 previous_node_to_explore_at_most_one_.resize(start + 1);
1635 }
1636
1637 // In the presence of at_most_ones_ constraints, expanding them
1638 // implicitly to implications in the SCC computation can result in a
1639 // quadratic complexity rather than a linear one in term of the input
1640 // data structure size. So this test here is critical on problem with
1641 // large at_most ones like the "ivu06-big.mps.gz" where without it, the
1642 // full FindStronglyConnectedComponents() take more than on hour instead
1643 // of less than a second!
1644 if (at_most_one_already_explored_[start]) {
1645 // We never expand a node twice.
1646 const int first_node = previous_node_to_explore_at_most_one_[start];
1647 DCHECK_NE(node, first_node);
1648
1649 if (finder_.NodeIsInCurrentDfsPath(first_node)) {
1650 // If the first node is not settled, then we do explore the
1651 // at_most_one constraint again. In "Mixed-Integer-Programming:
1652 // Analyzing 12 years of progress", Tobias Achterberg and Roland
1653 // Wunderling explains that an at most one need to be looped over at
1654 // most twice. I am not sure exactly how that works, so for now we
1655 // are not fully linear, but on actual instances, we only rarely
1656 // run into this case.
1657 //
1658 // Note that we change the previous node to explore at most one
1659 // since the current node will be settled before the old ones.
1660 //
1661 // TODO(user): avoid looping more than twice on the same at most one
1662 // constraints? Note that the second time we loop we have x => y =>
1663 // not(x), so we can already detect that x must be false which we
1664 // detect below.
1665 previous_node_to_explore_at_most_one_[start] = node;
1666 } else {
1667 // The first node is already settled and so are all its child. Only
1668 // not(first_node) might still need exploring.
1669 tmp_.push_back(
1670 Literal(LiteralIndex(first_node)).NegatedIndex().value());
1671 continue;
1672 }
1673 } else {
1674 at_most_one_already_explored_[start] = true;
1675 previous_node_to_explore_at_most_one_[start] = node;
1676 }
1677
1678 const absl::Span<const Literal> amo =
1679 absl::MakeSpan(&at_most_one_buffer_[start + 1],
1680 at_most_one_buffer_[start].Index().value());
1681 for (const Literal l : amo) {
1682 if (l.Index() == node) continue;
1683 tmp_.push_back(l.NegatedIndex().value());
1684 if (finder_.NodeIsInCurrentDfsPath(l.Index().value())) {
1685 to_fix_.push_back(l.Negated());
1686 }
1687 }
1688 }
1689 work_done_ += tmp_.size();
1690 return tmp_;
1691 }
1692
1693 // For the deterministic time.
1694 mutable int64_t work_done_ = 0;
1695
1696 private:
1697 const SccFinder& finder_;
1699 implications_and_offsets_;
1700 const std::vector<Literal>& at_most_one_buffer_;
1701 // The spanning forest built during the SCC computation. The parent of a tree
1702 // root is the node itself.
1704 // All these literals were detected to be true during the SCC computation.
1705 std::vector<Literal>& to_fix_;
1706 std::vector<Literal>* parent_of_to_fix_;
1707
1708 mutable std::vector<int32_t> tmp_;
1709
1710 // Used to get a non-quadratic complexity in the presence of at most ones.
1711 mutable std::vector<bool> at_most_one_already_explored_;
1712 mutable std::vector<int> previous_node_to_explore_at_most_one_;
1713};
1714
1715// Helper class to add LRAT inferred clauses proving that the implications added
1716// in DetectEquivalences() are correct.
1717// TODO(user): extend this to support "at most one" constraints.
1719 public:
1721 : graph_(graph),
1722 trail_(graph->trail_),
1723 implications_and_amos_(graph->implications_and_amos_),
1724 clause_id_generator_(graph->clause_id_generator_),
1725 lrat_proof_handler_(graph->lrat_proof_handler_) {}
1726
1727 // Initializes the internal data structures to process the given component
1728 // with the methods below. The components must be processed in reverse
1729 // topological order. After processing, all LRAT clauses "l => rep" and "rep
1730 // => l" are added for all literals l in the component, where rep is the
1731 // representative.
1732 void NewComponent(absl::Span<const int32_t> component) {
1733 component_ = component;
1734 // TODO(user): we could use representative_of_ instead to check if a
1735 // literal is in the component. This requires populating it before calling
1736 // the methods below, which is currently not the case.
1737 in_component_.ClearAndResize(LiteralIndex(graph_->literal_size()));
1738 for (int i = 0; i < component.size(); ++i) {
1739 in_component_.Set(LiteralIndex(component[i]));
1740 }
1741 }
1742
1743 // Adds inferred LRAT clauses proving that all the literals in the component
1744 // can be fixed, knowing that `fixed_literal` (in the component) is already
1745 // fixed. This is done with a DFS starting from `fixed_literal` (each fixed
1746 // literal is proved using the proof of its parent).
1747 bool FixAllLiteralsInComponent(LiteralIndex fixed_literal, bool all_true) {
1748 DCHECK(stack_.empty());
1749 stack_.push(all_true ? fixed_literal : Literal(fixed_literal).Negated());
1750 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
1751 is_marked_.Set(stack_.top());
1752 while (!stack_.empty()) {
1753 const LiteralIndex node = stack_.top();
1754 stack_.pop();
1755 for (const Literal l : implications_and_amos_[node].literals()) {
1756 if (!in_component_[l.Index()] || is_marked_[l.Index()]) continue;
1757 stack_.push(l.Index());
1758 is_marked_.Set(l.Index());
1759 const Literal to_fix = all_true ? l : l.Negated();
1760 if (trail_->Assignment().LiteralIsTrue(to_fix)) continue;
1761 clause_ids_.clear();
1762 clause_ids_.push_back(
1763 trail_->GetUnitClauseId(Literal(node).Variable()));
1764 clause_ids_.push_back(graph_->GetClauseId(Literal(node).Negated(), l));
1765 if (!graph_->FixLiteral(l, clause_ids_)) return false;
1766 }
1767 }
1768 return true;
1769 }
1770
1771 // Adds inferred LRAT clauses proving "l => RepresentativeOf(l')" for all
1772 // literals l' directly implied by l, if the current component is the
1773 // singleton {l}.
1775 LiteralIndex representative = LiteralIndex(component_[0]);
1776 for (Literal& ref : implications_and_amos_[representative].literals()) {
1777 const LiteralIndex rep = graph_->RepresentativeOf(ref);
1778 if (rep == representative) continue;
1779 if (rep == kNoLiteralIndex) continue;
1780 MaybeAddLratImplicationChain(
1781 {Literal(representative), Literal(ref), Literal(rep)});
1782 }
1783 }
1784
1785 // Adds inferred LRAT clauses proving the following:
1786 // a) "representative => RepresentativeOf(l)" for all literals l outside of
1787 // the component. This assumes that the component of l is already processed.
1788 // b) "representative => l" for all literals l in the component. This is done
1789 // with a DFS starting from `representative`, restricted to literals in the
1790 // component (each literal is proved using the proof of its parent).
1791 // c) "representative => RepresentativeOf(l')" for all literals l' outside of
1792 // the component which are directly implied by a literal in the component.
1793 // d) "l => representative" for all literals l in the component. This is
1794 // equivalent to "not(representative) => not(l)", which is done with a DFS
1795 // starting from not(representative), restricted to literals whose negation is
1796 // in the component (each literal is proved using the proof of its parent).
1797 void ProcessComponent(LiteralIndex representative) {
1798 for (const Literal l : implications_and_amos_[representative].literals()) {
1799 const Literal rep = graph_->RepresentativeOf(l);
1800 if (rep.Index() == representative) continue;
1801 // case a)
1802 MaybeAddLratImplicationChain({Literal(representative), l, rep});
1803 }
1804
1805 DCHECK(stack_.empty());
1806 stack_.push(representative);
1807 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
1808 is_marked_.Set(representative);
1809 while (!stack_.empty()) {
1810 const LiteralIndex node = stack_.top();
1811 stack_.pop();
1812 for (const Literal l : implications_and_amos_[node].literals()) {
1813 if (is_marked_[l] || !in_component_[l.Index()]) continue;
1814 stack_.push(l.Index());
1815 is_marked_.Set(l.Index());
1816 // case b)
1817 MaybeAddLratImplicationChain(
1818 {Literal(representative), Literal(node), l});
1819 for (const Literal m : implications_and_amos_[l].literals()) {
1820 const Literal rep = graph_->RepresentativeOf(m);
1821 if (rep.Index() != representative) {
1822 // case c)
1823 MaybeAddLratImplicationChain({Literal(representative), l, m, rep});
1824 }
1825 }
1826 }
1827 }
1828
1829 // Perform a DFS starting from not(representative) to prove cases d).
1830 const LiteralIndex negated_representative =
1831 Literal(representative).NegatedIndex();
1832 DCHECK(stack_.empty());
1833 stack_.push(negated_representative);
1834 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
1835 is_marked_.Set(negated_representative);
1836 while (!stack_.empty()) {
1837 const LiteralIndex node = stack_.top();
1838 stack_.pop();
1839 for (const Literal l : implications_and_amos_[node].literals()) {
1840 if (is_marked_[l] || !in_component_[l.NegatedIndex()]) continue;
1841 stack_.push(l.Index());
1842 is_marked_.Set(l.Index());
1843 // case d)
1844 MaybeAddLratImplicationChain(
1845 {Literal(negated_representative), Literal(node), l});
1846 }
1847 }
1848 }
1849
1850 // Shows UNSAT when literal and not(literal) are both equivalent to
1851 // representative. This assumes that both literal and not(literal) have
1852 // already been shown to be equivalent to representative.
1853 void ProcessUnsatComponent(Literal literal, Literal representative) {
1854 // "literal => representative => not(literal)" proves "not(literal)"
1855 const ClauseId clause_id = clause_id_generator_->GetNextId();
1856 clause_ids_.clear();
1857 AppendLratImplicationChain({literal, representative, literal.Negated()},
1858 clause_ids_);
1859 AddInferredClause(clause_id, {literal.Negated()}, clause_ids_);
1860 // "not(literal) => representative => literal" proves "literal". Combined
1861 // with the previous clause, this gives the empty clause.
1862 clause_ids_.clear();
1863 clause_ids_.push_back(clause_id);
1864 AppendLratImplicationChain({literal.Negated(), representative, literal},
1865 clause_ids_);
1866 AddInferredClause(clause_id_generator_->GetNextId(), {}, clause_ids_);
1867 }
1868
1869 // Sets `clause_ids` to a list of implications proving that "a => b". This
1870 // is done by iterating over the parents of 'b' until a parent p in the same
1871 // SCC as 'a' is found. We can then use the implication chain a => rep(a) => p
1872 // => ... parent(b) => b.
1874 Literal a, Literal b,
1876 std::vector<ClauseId>& clause_ids) {
1877 if (a == b) return;
1878 // Build the steps of the implication chain in reverse order.
1879 std::vector<Literal>& steps = tmp_literals_;
1880 steps.clear();
1881 steps.push_back(b);
1882 const Literal a_rep = graph_->RepresentativeOf(a);
1883 while (graph_->RepresentativeOf(steps.back()) != a_rep) {
1884 DCHECK_NE(parents[steps.back().Index()], steps.back().Index());
1885 steps.push_back(Literal(parents[steps.back().Index()]));
1886 }
1887 steps.push_back(a_rep);
1888 steps.push_back(a);
1889 // Add the corresponding clause IDs to clause_ids.
1890 std::reverse(steps.begin(), steps.end());
1891 clause_ids.clear();
1892 AppendLratImplicationChain(steps, clause_ids);
1893 }
1894
1895 private:
1896 // Adds the LRAT inferred clause "steps.front() => steps.back()" if it doesn't
1897 // exist already. There must be an existing clause for each pair of
1898 // consecutive step literals, unless they are equal.
1899 void MaybeAddLratImplicationChain(absl::Span<const Literal> steps) {
1900 const Literal a = steps.front().Negated();
1901 const Literal b = steps.back();
1902 if (graph_->GetClauseId(a, b) != kNoClauseId) return;
1903 clause_ids_.clear();
1904 AppendLratImplicationChain(steps, clause_ids_);
1905 if (clause_ids_.empty()) return;
1906 const ClauseId new_clause_id = clause_id_generator_->GetNextId();
1907 AddInferredClause(new_clause_id, {a, b}, clause_ids_);
1908 graph_->AddClauseId(new_clause_id, a, b);
1909 }
1910
1911 void AppendLratImplicationChain(absl::Span<const Literal> steps,
1912 std::vector<ClauseId>& clause_ids) {
1913 for (int i = 1; i < steps.size(); ++i) {
1914 if (steps[i] == steps[i - 1]) continue;
1915 clause_ids.push_back(
1916 graph_->GetClauseId(steps[i - 1].Negated(), steps[i]));
1917 }
1918 }
1919
1920 void AddInferredClause(ClauseId new_clause_id,
1921 absl::Span<const Literal> literals,
1922 absl::Span<const ClauseId> clause_ids) {
1923 if (lrat_proof_handler_ != nullptr) {
1924 lrat_proof_handler_->AddInferredClause(new_clause_id, literals,
1925 clause_ids);
1926 }
1927 }
1928
1929 BinaryImplicationGraph* graph_;
1930 Trail* trail_;
1931 util_intops::StrongVector<LiteralIndex, LiteralsOrOffsets>&
1932 implications_and_amos_;
1933 ClauseIdGenerator* clause_id_generator_;
1934 LratProofHandler* lrat_proof_handler_;
1935
1936 // Temporary data structures used by the above methods:
1937 // - component_: the component being processed.
1938 // - in_component: for each literal, whether it is in component_ or not.
1939 // - stack_, is_marked_: used to do DFS in the component.
1940 // - clause_ids_: used to add inferred clauses.
1941 // - tmp_literals_: used in ComputeImplicationChain().
1942 absl::Span<const int32_t> component_;
1943 SparseBitset<LiteralIndex> in_component_;
1944 std::stack<LiteralIndex> stack_;
1945 SparseBitset<LiteralIndex> is_marked_;
1946 std::vector<ClauseId> clause_ids_;
1947 std::vector<Literal> tmp_literals_;
1948};
1949
1951 // This was already called, and no new constraint where added. Note that new
1952 // fixed variable cannot create new equivalence, only new binary clauses do.
1953 if (is_dag_) return true;
1954 WallTimer wall_timer;
1955 wall_timer.Start();
1956 log_info |= VLOG_IS_ON(1);
1957
1958 if (trail_->CurrentDecisionLevel() == 0) {
1959 for (std::pair<Literal, Literal> clause : changed_reasons_on_trail_) {
1960 auto it = clause_id_.find(clause);
1961 lrat_proof_handler_->DeleteClause(it->second,
1962 {clause.first, clause.second});
1963 clause_id_.erase(it);
1964 }
1965 changed_reasons_on_trail_.clear();
1966 }
1967
1968 // Lets remove all fixed/duplicate variables first.
1969 if (!RemoveDuplicatesAndFixedVariables()) return false;
1970 const VariablesAssignment& assignment = trail_->Assignment();
1971 DCHECK(InvariantsAreOk());
1972
1973 // TODO(user): We could just do it directly though.
1974 const int32_t size(implications_and_amos_.size());
1976 scc.reserve(size);
1978 std::vector<Literal> to_fix;
1979 std::vector<Literal> parent_of_to_fix;
1980 double dtime = 0.0;
1981 {
1982 SccGraph::SccFinder finder;
1983 SccGraph graph(&finder, &implications_and_amos_, &at_most_one_buffer_,
1984 lrat_helper_ != nullptr ? &parents : nullptr, &to_fix,
1985 lrat_helper_ != nullptr ? &parent_of_to_fix : nullptr);
1986 finder.FindStronglyConnectedComponents(size, graph, &scc);
1987 dtime += 4e-8 * graph.work_done_;
1988 }
1989
1990 // The old values will still be valid.
1991 representative_of_.resize(size, kNoLiteralIndex);
1992 is_redundant_.resize(size);
1993
1994 int num_equivalences = 0;
1995 int num_new_redundant_literals = 0;
1996
1997 // We increment num_redundant_literals_ only at the end, to avoid breaking the
1998 // invariant "num_redundant_literals_ % 2 == 0" in case of early return.
1999 reverse_topological_order_.clear();
2000 for (int index = 0; index < scc.size(); ++index) {
2001 const absl::Span<int32_t> component = scc[index];
2002 // We ignore variable that appear in no constraints.
2003 if (component.size() == 1 && is_removed_[LiteralIndex(component[0])]) {
2004 continue;
2005 }
2006
2007 // We always take the smallest literal index (which also corresponds to the
2008 // smallest BooleanVariable index) as a representative. This make sure that
2009 // the representative of a literal l and the one of not(l) will be the
2010 // negation of each other. There is also reason to think that it is
2011 // heuristically better to use a BooleanVariable that was created first.
2012 std::sort(component.begin(), component.end());
2013 const LiteralIndex representative(component[0]);
2014 reverse_topological_order_.push_back(representative);
2015
2016 if (lrat_helper_ != nullptr) {
2017 lrat_helper_->NewComponent(component);
2018 }
2019
2020 if (component.size() == 1) {
2021 // Note that because we process list in reverse topological order, this
2022 // is only needed if there is any equivalence before this point.
2023 if (num_equivalences > 0) {
2024 if (lrat_helper_ != nullptr) {
2025 lrat_helper_->ProcessSingletonComponent();
2026 }
2027 auto& representative_list = implications_and_amos_[representative];
2028 for (Literal& ref : representative_list.literals()) {
2029 const LiteralIndex rep = representative_of_[ref];
2030 if (rep == representative) continue;
2031 if (rep == kNoLiteralIndex) continue;
2032 ref = Literal(rep);
2033 }
2034
2035 // We should already have detected fixing because of l => x and not(x).
2036 CHECK(CleanUpImplicationList(Literal(representative)));
2037 }
2038 continue;
2039 }
2040
2041 if (lrat_helper_ != nullptr) {
2042 lrat_helper_->ProcessComponent(representative);
2043 }
2044
2045 // Sets the representative.
2046 for (int i = 1; i < component.size(); ++i) {
2047 const Literal literal = Literal(LiteralIndex(component[i]));
2048 if (!is_redundant_[literal]) {
2049 ++num_new_redundant_literals;
2050 is_redundant_.Set(literal);
2051 }
2052 representative_of_[literal] = representative;
2053
2054 // Detect if x <=> not(x) which means unsat. Note that we rely on the
2055 // fact that when sorted, they will both be consecutive in the list.
2056 if (Literal(LiteralIndex(component[i - 1])).Negated() == literal) {
2057 LOG_IF(INFO, log_info) << "Trivially UNSAT in DetectEquivalences()";
2058 if (lrat_helper_ != nullptr) {
2059 lrat_helper_->ProcessUnsatComponent(literal, Literal(representative));
2060 }
2061 return false;
2062 }
2063 }
2064 // Merge all the lists in implications_and_offsets_[representative].
2065 // Note that we do not want representative in its own list.
2066 auto& representative_list = implications_and_amos_[representative];
2067 int new_size = 0;
2068 for (const Literal l : representative_list.literals()) {
2069 const Literal rep = RepresentativeOf(l);
2070 if (rep.Index() == representative) continue;
2071 representative_list.literals()[new_size++] = rep;
2072 }
2073 representative_list.TruncateLiterals(new_size);
2074 for (int i = 1; i < component.size(); ++i) {
2075 const Literal literal = Literal(LiteralIndex(component[i]));
2076 auto& ref = implications_and_amos_[literal];
2077 for (const Literal l : ref.literals()) {
2078 const Literal rep = RepresentativeOf(l);
2079 if (rep.Index() != representative) {
2080 representative_list.PushBackLiteral(rep);
2081 }
2082 }
2083 dtime += 1e-8 * static_cast<double>(ref.num_literals());
2084
2085 // Add representative <=> literal.
2086 //
2087 // Remark: this relation do not need to be added to a DRAT proof since
2088 // the redundant variables should never be used again for a pure SAT
2089 // problem.
2090 representative_list.PushBackLiteral(literal);
2091 ref.ClearLiterals();
2092 ref.PushBackLiteral(Literal(representative));
2093 }
2094
2095 // We should already have detected fixing because of l => x and not(x).
2096 CHECK(CleanUpImplicationList(Literal(representative)));
2097 num_equivalences += component.size() - 1;
2098 }
2099
2100 std::vector<ClauseId> clause_ids;
2101 // Fix all literals in `to_fix`.
2102 for (int i = 0; i < to_fix.size(); ++i) {
2103 const Literal l = to_fix[i];
2104 if (assignment.LiteralIsTrue(l)) continue;
2105 if (lrat_helper_ != nullptr) {
2106 const Literal parent_of_l = parent_of_to_fix[i];
2107 lrat_helper_->ComputeImplicationChain(l.Negated(), parent_of_l, parents,
2108 clause_ids);
2109 clause_ids.push_back(GetClauseId(parent_of_l.Negated(), l));
2110 }
2111 if (!FixLiteral(l, clause_ids)) return false;
2112 }
2113 // Look for fixed variables in each component, and make sure if one is fixed,
2114 // all variables in the same component are fixed. Note that the reason why the
2115 // propagation didn't already do that and we don't always get fixed component
2116 // of size 1 is because of the potential newly fixed literals above.
2117 //
2118 // In any case, all fixed literals are marked as redundant.
2119 for (int index = 0; index < scc.size(); ++index) {
2120 const absl::Span<int32_t> component = scc[index];
2121 if (component.size() == 1 || is_removed_[LiteralIndex(component[0])]) {
2122 continue;
2123 }
2124 bool all_fixed = false;
2125 bool all_true = false;
2126 LiteralIndex fixed_literal = kNoLiteralIndex;
2127 for (const int32_t i : component) {
2128 const Literal l = Literal(LiteralIndex(i));
2129 if (assignment.LiteralIsAssigned(l)) {
2130 all_fixed = true;
2131 all_true = assignment.LiteralIsTrue(l);
2132 fixed_literal = l.Index();
2133 break;
2134 }
2135 }
2136 if (!all_fixed) continue;
2137 for (const int32_t i : component) {
2138 const Literal l = Literal(LiteralIndex(i));
2139 if (!is_redundant_[l]) {
2140 ++num_new_redundant_literals;
2141 is_redundant_.Set(l);
2142 representative_of_[l] = l.Index();
2143 }
2144 }
2145 if (lrat_helper_ != nullptr) {
2146 lrat_helper_->NewComponent(component);
2147 if (!lrat_helper_->FixAllLiteralsInComponent(fixed_literal, all_true)) {
2148 return false;
2149 }
2150 } else {
2151 for (const int32_t i : component) {
2152 const Literal l = Literal(LiteralIndex(i));
2153 const Literal to_fix = all_true ? l : l.Negated();
2154 if (assignment.LiteralIsFalse(to_fix)) return false;
2155 if (assignment.LiteralIsTrue(to_fix)) continue;
2156 if (!FixLiteral(l)) return false;
2157 }
2158 }
2159 }
2160
2161 is_dag_ = true;
2162 if (num_equivalences != 0) {
2163 // Remap all at most ones. Remove fixed variables, process duplicates. Note
2164 // that this might result in more implications when we expand small at most
2165 // one.
2166 for (auto& v : implications_and_amos_) {
2167 v.ClearOffsets();
2168 }
2169 int saved_trail_index = propagation_trail_index_;
2170 if (!CleanUpAndAddAtMostOnes(/*base_index=*/0)) return false;
2171
2172 // This might have run the propagation on a few variables without taking
2173 // into account the AMOs. Propagate again.
2174 //
2175 // TODO(user): Maybe a better alternative is to not propagate when we fix
2176 // variables inside CleanUpAndAddAtMostOnes().
2177 if (propagation_trail_index_ != saved_trail_index) {
2178 propagation_trail_index_ = saved_trail_index;
2179 Propagate(trail_);
2180 }
2181 }
2182
2183 // Note that all fixed variables should be excluded from here after a
2184 // call to RemoveFixedVariables().
2185 num_redundant_literals_ += num_new_redundant_literals;
2186 num_current_equivalences_ = num_equivalences;
2187 time_limit_->AdvanceDeterministicTime(dtime);
2188 const int num_fixed_during_scc =
2189 trail_->Index() - num_processed_fixed_variables_;
2190
2191 // If we fixed things, they can always be at_most_one that become simple
2192 // implications, so we need to redo a round of cleaning.
2193 if (!RemoveDuplicatesAndFixedVariables()) return false;
2194
2195 DCHECK(InvariantsAreOk());
2196 LOG_IF(INFO, log_info) << "SCC. " << num_equivalences
2197 << " redundant equivalent literals. "
2198 << num_fixed_during_scc << " fixed. "
2200 << " implications left. "
2201 << implications_and_amos_.size() << " literals."
2202 << " size of at_most_one buffer = "
2203 << at_most_one_buffer_.size() << "."
2204 << " dtime: " << dtime
2205 << " wtime: " << wall_timer.Get();
2206 return true;
2207}
2208
2209// Note that as a side effect this also do a full "failed literal probing"
2210// using the binary implication graph only.
2211//
2212// TODO(user): Track which literal have new implications, and only process
2213// the antecedents of these.
2215 DCHECK_EQ(trail_->CurrentDecisionLevel(), 0);
2216 if (time_limit_->LimitReached()) return true;
2217 if (!DetectEquivalences()) return false;
2218
2219 // TODO(user): the situation with fixed variable is not really "clean".
2220 // Simplify the code so we are sure we don't run into issue or have to deal
2221 // with any of that here.
2222 if (time_limit_->LimitReached()) return true;
2223 if (!Propagate(trail_)) return false;
2225 DCHECK(InvariantsAreOk());
2226 if (time_limit_->LimitReached()) return true;
2227
2228 log_info |= VLOG_IS_ON(1);
2229 WallTimer wall_timer;
2230 wall_timer.Start();
2231
2232 int64_t num_fixed = 0;
2233 int64_t num_new_redundant_implications = 0;
2234 bool aborted = false;
2235 tmp_removed_.clear();
2236 work_done_in_mark_descendants_ = 0;
2237 int marked_index = 0;
2238 std::vector<ClauseId> clause_ids;
2239
2240 // For each node we do a graph traversal and only keep the literals
2241 // at maximum distance 1. This only works because we have a DAG when ignoring
2242 // the "redundant" literal marked by DetectEquivalences(). Note that we also
2243 // need no duplicates in the implications list for correctness which is also
2244 // guaranteed by DetectEquivalences().
2245 //
2246 // TODO(user): We should be able to reuse some propagation like it is done for
2247 // tree-look. Once a node is processed, we just need to process a node that
2248 // implies it. Test if we can make this faster. Alternatively, only clear
2249 // a part of is_marked_ (after the first child of root in reverse topo order).
2250 //
2251 // TODO(user): Can we exploit the fact that the implication graph is a
2252 // skew-symmetric graph (isomorphic to its transposed) so that we do less
2253 // work?
2254 const LiteralIndex size(implications_and_amos_.size());
2255 LiteralIndex previous = kNoLiteralIndex;
2256 for (const LiteralIndex root : reverse_topological_order_) {
2257 // In most situation reverse_topological_order_ contains no redundant, fixed
2258 // or removed variables. But the reverse_topological_order_ is only
2259 // recomputed when new binary are added to the graph, not when new variable
2260 // are fixed.
2261 if (is_redundant_[root]) continue;
2262 if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
2263
2264 auto& direct_implications = implications_and_amos_[root];
2265 if (direct_implications.literals().empty()) continue;
2266
2267 // This is a "poor" version of the tree look stuff, but it does show good
2268 // improvement. If we just processed one of the child of root, we don't
2269 // need to re-explore it.
2270 //
2271 // TODO(user): Another optim we can do is that we never need to expand
2272 // any node with a reverse topo order smaller or equal to the min of the
2273 // ones in this list.
2274 bool clear_previous_reachability = true;
2275 for (const Literal direct_child : direct_implications.literals()) {
2276 if (direct_child.Index() == previous) {
2277 clear_previous_reachability = false;
2278 is_marked_.Clear(previous);
2279 break;
2280 }
2281 }
2282 if (clear_previous_reachability) {
2283 is_marked_.ClearAndResize(size);
2284 marked_index = 0;
2285 }
2286 previous = root;
2287
2288 for (const Literal direct_child : direct_implications.literals()) {
2289 if (is_redundant_[direct_child]) continue;
2290 if (is_marked_[direct_child]) continue;
2291
2292 // This is a corner case where because of equivalent literal, root
2293 // appear in implications_[root], we will remove it below.
2294 if (direct_child.Index() == root) continue;
2295
2296 // When this happens, then root must be false, we handle this just after
2297 // the loop.
2298 if (direct_child.NegatedIndex() == root) {
2299 is_marked_.Set(direct_child);
2300 break;
2301 }
2302
2303 if (lrat_proof_handler_ != nullptr) {
2304 MarkDescendants</*fill_implied_by=*/true>(direct_child);
2305 } else {
2306 MarkDescendants(direct_child);
2307 }
2308
2309 // We have a DAG, so direct_child could only be marked first.
2310 is_marked_.Clear(direct_child);
2311 implied_by_[direct_child] = Literal(root);
2312 }
2313 DCHECK(!is_marked_[root])
2314 << "DetectEquivalences() should have removed cycles!";
2315 is_marked_.Set(root);
2316 implied_by_[root] = Literal(root);
2317
2318 // Also mark all the ones reachable through the root AMOs.
2319 {
2320 auto is_marked = is_marked_.BitsetView();
2321 for (const int start : implications_and_amos_[root].offsets()) {
2322 for (const Literal l : AtMostOne(start)) {
2323 if (l.Index() == root) continue;
2324 if (!is_marked[l.Negated()] && !is_redundant_[l.Negated()]) {
2325 is_marked_.SetUnsafe(is_marked, l.Negated());
2326 if (lrat_proof_handler_ != nullptr) {
2327 MarkDescendants</*fill_implied_by=*/true>(l.Negated());
2328 } else {
2329 MarkDescendants(l.Negated());
2330 }
2331 }
2332 }
2333 }
2334 }
2335
2336 // Failed literal probing. If both x and not(x) are marked then root must be
2337 // false. Note that because we process "roots" in reverse topological order,
2338 // we will fix the LCA of x and not(x) first.
2339 const auto& marked_positions = is_marked_.PositionsSetAtLeastOnce();
2340 for (; marked_index < marked_positions.size(); ++marked_index) {
2341 const LiteralIndex i = marked_positions[marked_index];
2342 if (is_marked_[Literal(i).NegatedIndex()]) {
2343 // We tested that at the beginning.
2344 DCHECK(!trail_->Assignment().LiteralIsAssigned(Literal(root)));
2345
2346 // We propagate right away the binary implications so that we do not
2347 // need to consider all antecedents of root in the transitive
2348 // reduction.
2349 ++num_fixed;
2350 if (lrat_proof_handler_ != nullptr) {
2351 clause_ids.clear();
2352 AppendImplicationChain(Literal(i), &clause_ids);
2353 AppendImplicationChain(Literal(i).Negated(), &clause_ids);
2354 }
2355 if (!FixLiteral(Literal(root).Negated(), clause_ids)) return false;
2356 break;
2357 }
2358 }
2359
2360 // Note that direct_implications will be cleared by
2361 // RemoveFixedVariables() that will need to inspect it to completely
2362 // remove Literal(root) from all lists.
2363 if (trail_->Assignment().LiteralIsAssigned(Literal(root))) continue;
2364
2365 // Only keep the non-marked literal (and the redundant one which are never
2366 // marked). We mark root to remove it in the corner case where it was
2367 // there.
2368 int new_size = 0;
2369 for (const Literal l : direct_implications.literals()) {
2370 if (!is_marked_[l]) {
2371 direct_implications.literals()[new_size++] = l;
2372 } else {
2373 tmp_removed_.push_back({Literal(root), l});
2374 DCHECK(!is_redundant_[l]);
2375 }
2376 }
2377 const int diff = direct_implications.num_literals() - new_size;
2378 direct_implications.TruncateLiterals(new_size);
2379 direct_implications.ShrinkToFit();
2380 num_new_redundant_implications += diff;
2381
2382 // Abort if the computation involved is too big.
2383 if (work_done_in_mark_descendants_ > 1e8) {
2384 aborted = true;
2385 break;
2386 }
2387 }
2388
2389 is_marked_.ClearAndResize(size);
2390
2391 // If we aborted early, we might no longer have both a=>b and not(b)=>not(a).
2392 // This is not desirable has some algo relies on this invariant. We fix this
2393 // here.
2394 if (aborted) {
2395 absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> removed;
2396 for (const auto [a, b] : tmp_removed_) {
2397 removed.insert({a.Index(), b.Index()});
2398 }
2399 for (LiteralIndex i(0); i < implications_and_amos_.size(); ++i) {
2400 int new_size = 0;
2401 const LiteralIndex negated_i = Literal(i).NegatedIndex();
2402 auto& implication = implications_and_amos_[i];
2403 for (const Literal l : implication.literals()) {
2404 if (removed.contains({l.NegatedIndex(), negated_i})) continue;
2405 implication.literals()[new_size++] = l;
2406 }
2407 implication.TruncateLiterals(new_size);
2408 }
2409 }
2410 if (num_fixed > 0) {
2412 }
2413 DCHECK(InvariantsAreOk());
2414
2415 gtl::STLClearObject(&tmp_removed_);
2416 const double dtime = 1e-8 * work_done_in_mark_descendants_;
2417 time_limit_->AdvanceDeterministicTime(dtime);
2418 num_redundant_implications_ += num_new_redundant_implications;
2419 LOG_IF(INFO, log_info) << "Transitive reduction removed "
2420 << num_new_redundant_implications << " literals. "
2421 << num_fixed << " fixed. "
2423 << " implications left. "
2424 << implications_and_amos_.size() << " literals."
2425 << " dtime: " << dtime
2426 << " wtime: " << wall_timer.Get()
2427 << (aborted ? " Aborted." : "");
2428 return true;
2429}
2430
2431namespace {
2432
2433int ElementInIntersectionOrMinusOne(absl::Span<const int> a,
2434 absl::Span<const int> b) {
2435 DCHECK(std::is_sorted(a.begin(), a.end()));
2436 DCHECK(std::is_sorted(b.begin(), b.end()));
2437 if (a.empty() || b.empty()) return -1;
2438 int i = 0;
2439 int j = 0;
2440 while (true) {
2441 if (a[i] == b[j]) return a[i];
2442 if (a[i] < b[j]) {
2443 if (++i == a.size()) return -1;
2444 } else {
2445 if (++j == b.size()) return -1;
2446 }
2447 }
2448}
2449
2450} // namespace
2451
2452std::vector<std::pair<int, int>>
2453BinaryImplicationGraph::FilterAndSortAtMostOnes(
2454 absl::Span<std::vector<Literal>> at_most_ones) {
2455 // We starts by processing larger constraints first.
2456 // But we want the output order to be stable.
2457 std::vector<std::pair<int, int>> index_size_vector;
2458 const int num_amos = at_most_ones.size();
2459 index_size_vector.reserve(num_amos);
2460 for (int index = 0; index < num_amos; ++index) {
2461 std::vector<Literal>& clique = at_most_ones[index];
2462 if (clique.size() <= 1) continue;
2463
2464 // Note(user): Because we always use literal with the smallest variable
2465 // indices as representative, this make sure that if possible, we express
2466 // the clique in term of user provided variable (that are always created
2467 // first).
2468 //
2469 // Remap the clique to only use representative.
2470 for (Literal& ref : clique) {
2471 DCHECK_LT(ref.Index(), representative_of_.size());
2472 const LiteralIndex rep = representative_of_[ref];
2473 if (rep == kNoLiteralIndex) continue;
2474 ref = Literal(rep);
2475 }
2476
2477 // We skip anything that can be presolved further as the code below do
2478 // not handle duplicate well.
2479 //
2480 // TODO(user): Shall we presolve it here?
2481 bool skip = false;
2482 std::sort(clique.begin(), clique.end());
2483 for (int i = 1; i < clique.size(); ++i) {
2484 if (clique[i] == clique[i - 1] || clique[i] == clique[i - i].Negated()) {
2485 skip = true;
2486 break;
2487 }
2488 }
2489 if (skip) continue;
2490
2491 index_size_vector.push_back({index, clique.size()});
2492 }
2493 absl::c_stable_sort(index_size_vector, [](const std::pair<int, int> a,
2494 const std::pair<int, int> b) {
2495 return a.second > b.second;
2496 });
2497 return index_size_vector;
2498}
2499
2501 std::vector<std::vector<Literal>>* at_most_ones,
2502 int64_t max_num_explored_nodes) {
2503 // The code below assumes a DAG.
2504 if (!DetectEquivalences()) return false;
2505 work_done_in_mark_descendants_ = 0;
2506
2507 int num_extended = 0;
2508 int num_removed = 0;
2509 int num_added = 0;
2510
2511 // Data to detect inclusion of base amo into extend amo.
2512 std::vector<int> detector_clique_index;
2514 InclusionDetector detector(storage, time_limit_);
2515 detector.SetWorkLimit(1e9);
2516
2517 std::vector<int> dense_index_to_index;
2519 max_cliques_containing(implications_and_amos_.size());
2520
2521 const std::vector<std::pair<int, int>> index_size_vector =
2522 FilterAndSortAtMostOnes(absl::MakeSpan(*at_most_ones));
2523
2524 absl::flat_hash_set<int> cannot_be_removed;
2525 std::vector<bool> was_extended(at_most_ones->size(), false);
2526 for (const auto& [index, old_size] : index_size_vector) {
2527 std::vector<Literal>& clique = (*at_most_ones)[index];
2528 if (time_limit_->LimitReached()) break;
2529
2530 // Special case for clique of size 2, we don't expand them if they
2531 // are included in an already added clique.
2532 if (clique.size() == 2) {
2533 DCHECK_NE(clique[0], clique[1]);
2534 const int dense_index = ElementInIntersectionOrMinusOne(
2535 max_cliques_containing[clique[0]], max_cliques_containing[clique[1]]);
2536 if (dense_index >= 0) {
2537 const int superset_index = dense_index_to_index[dense_index];
2538 if (was_extended[superset_index]) {
2539 cannot_be_removed.insert(superset_index);
2540 }
2541 ++num_removed;
2542 clique.clear();
2543 continue;
2544 }
2545 }
2546
2547 // Save the non-extended version as possible subset.
2548 // TODO(user): Detect on the fly is superset already exist.
2549 detector_clique_index.push_back(index);
2550 detector.AddPotentialSubset(storage.AddLiterals(clique));
2551
2552 // We only expand the clique as long as we didn't spend too much time.
2553 if (work_done_in_mark_descendants_ < max_num_explored_nodes) {
2554 clique = ExpandAtMostOne(clique, max_num_explored_nodes);
2555 }
2556
2557 // Save the extended version as possible superset.
2558 detector_clique_index.push_back(index);
2559 detector.AddPotentialSuperset(storage.AddLiterals(clique));
2560
2561 // Also index clique for size 2 quick lookup.
2562 const int dense_index = dense_index_to_index.size();
2563 dense_index_to_index.push_back(index);
2564 for (const Literal l : clique) {
2565 max_cliques_containing[l].push_back(dense_index);
2566 }
2567
2568 if (clique.size() > old_size) {
2569 was_extended[index] = true;
2570 ++num_extended;
2571 }
2572 ++num_added;
2573 }
2574
2575 // Remove clique (before extension) that are included in an extended one.
2576 detector.DetectInclusions([&](int subset, int superset) {
2577 const int subset_index = detector_clique_index[subset];
2578 const int superset_index = detector_clique_index[superset];
2579 if (subset_index == superset_index) return;
2580
2581 // Abort if one was already deleted.
2582 if ((*at_most_ones)[subset_index].empty()) return;
2583 if ((*at_most_ones)[superset_index].empty()) return;
2584
2585 // If an extended clique already cover a deleted one, we cannot try to
2586 // remove it by looking at its non-extended version.
2587 if (cannot_be_removed.contains(subset_index)) return;
2588
2589 ++num_removed;
2590 (*at_most_ones)[subset_index].clear();
2591 if (was_extended[superset_index]) cannot_be_removed.insert(superset_index);
2592 });
2593
2594 if (num_extended > 0 || num_removed > 0 || num_added > 0) {
2595 VLOG(1) << "Clique Extended: " << num_extended
2596 << " Removed: " << num_removed << " Added: " << num_added
2597 << (work_done_in_mark_descendants_ > max_num_explored_nodes
2598 ? " (Aborted)"
2599 : "");
2600 }
2601 return true;
2602}
2603
2605 absl::Span<std::vector<Literal>> at_most_ones,
2606 int64_t max_num_explored_nodes, double* dtime) {
2607 // The code below assumes a DAG.
2608 if (!DetectEquivalences()) return false;
2609 work_done_in_mark_descendants_ = 0;
2610
2611 const std::vector<std::pair<int, int>> index_size_vector =
2612 FilterAndSortAtMostOnes(at_most_ones);
2613
2614 // Data to detect inclusion of base amo into extend amo.
2615 std::vector<int> detector_clique_index;
2617 for (const auto& [index, old_size] : index_size_vector) {
2618 if (time_limit_->LimitReached()) break;
2619 detector_clique_index.push_back(index);
2620 storage.AddLiterals(at_most_ones[index]);
2621 }
2622
2623 // We use an higher limit here as the code is faster.
2624 SubsetsDetector detector(storage, time_limit_);
2625 detector.SetWorkLimit(10 * max_num_explored_nodes);
2626 detector.IndexAllStorageAsSubsets();
2627
2628 // Now try to expand one by one.
2629 //
2630 // TODO(user): We should process clique with elements in common together so
2631 // that we can reuse MarkDescendants() which is slow. We should be able to
2632 // "cache" a few of the last calls.
2633 std::vector<int> intersection;
2634 const int num_to_consider = index_size_vector.size();
2635 for (int subset_index = 0; subset_index < num_to_consider; ++subset_index) {
2636 const int index = index_size_vector[subset_index].first;
2637 std::vector<Literal>& clique = at_most_ones[index];
2638 if (clique.empty()) continue; // Was deleted.
2639
2640 if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
2641 if (detector.Stopped()) break;
2642
2643 // We start with the clique in the "intersection".
2644 // This prefix will never change.
2645 int clique_i = 0;
2646 int next_index_to_try = 0;
2647 intersection.clear();
2648 tmp_bitset_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
2649 for (const Literal l : clique) {
2650 intersection.push_back(l.Index().value());
2651 tmp_bitset_.Set(l);
2652 }
2653
2654 while (true) {
2655 if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
2656 if (detector.Stopped()) break;
2657
2658 // Compute the intersection of all the element (or the new ones) of this
2659 // clique.
2660 //
2661 // Optimization: if clique_i > 0 && intersection.size() == clique.size()
2662 // we already know that we performed the max possible extension.
2663 if (clique_i > 0 && intersection.size() == clique.size()) {
2664 clique_i = clique.size();
2665 }
2666 for (; clique_i < clique.size(); ++clique_i) {
2667 const Literal l = clique[clique_i];
2668
2669 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
2670 MarkDescendants(l);
2671
2672 if (clique_i == 0) {
2673 // Initially we have the clique + the negation of everything
2674 // propagated by l.
2675 for (const LiteralIndex index :
2676 is_marked_.PositionsSetAtLeastOnce()) {
2677 const Literal lit = Literal(index).Negated();
2678 if (!tmp_bitset_[lit]) {
2679 intersection.push_back(lit.Index().value());
2680 }
2681 }
2682 } else {
2683 // We intersect we the negation of everything propagated by not(l).
2684 // Note that we always keep the clique in case some implication where
2685 // not added to the graph.
2686 int new_size = 0;
2687 const int old_size = intersection.size();
2688 for (int i = 0; i < old_size; ++i) {
2689 if (i == next_index_to_try) {
2690 next_index_to_try = new_size;
2691 }
2692 const int index = intersection[i];
2693 const Literal lit = Literal(LiteralIndex(index));
2694 if (tmp_bitset_[lit] || is_marked_[lit.Negated()]) {
2695 intersection[new_size++] = index;
2696 }
2697 }
2698 intersection.resize(new_size);
2699 }
2700
2701 // We can abort early as soon as there is no extra literal than the
2702 // initial clique.
2703 if (intersection.size() <= clique.size()) break;
2704 }
2705
2706 // Should contains the original clique. If there are no more entry, then
2707 // we will not extend this clique. However, we still call FindSubsets() in
2708 // order to remove fully included ones.
2709 CHECK_GE(intersection.size(), clique.size());
2710
2711 // Look for element included in the intersection.
2712 // Note that we clear element fully included at the same time.
2713 //
2714 // TODO(user): next_index_to_try help, but we might still rescan most of
2715 // the one-watcher list of intersection[next_index_to_try], we could be
2716 // a bit faster here.
2717 int num_extra = 0;
2718 detector.FindSubsets(intersection, &next_index_to_try, [&](int subset) {
2719 if (subset == subset_index) {
2720 detector.StopProcessingCurrentSubset();
2721 return;
2722 }
2723
2724 num_extra = 0;
2725 for (const int index : storage[subset]) {
2726 const LiteralIndex lit_index = LiteralIndex(index);
2727 if (tmp_bitset_[lit_index]) continue; // In clique.
2728 tmp_bitset_.Set(lit_index);
2729 clique.push_back(Literal(lit_index)); // extend.
2730 ++num_extra;
2731 }
2732 if (num_extra == 0) {
2733 // Fully included -- remove.
2734 at_most_ones[detector_clique_index[subset]].clear();
2735 detector.StopProcessingCurrentSubset();
2736 return;
2737 }
2738
2739 detector.StopProcessingCurrentSuperset(); // Finish.
2740 });
2741
2742 // No extension: end loop.
2743 if (num_extra == 0) break;
2744 }
2745 }
2746 if (dtime != nullptr) {
2747 *dtime +=
2748 1e-8 * work_done_in_mark_descendants_ + 1e-9 * detector.work_done();
2749 }
2750 return true;
2751}
2752
2753template <bool use_weight>
2755 const absl::Span<const Literal> at_most_one,
2756 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
2757 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values) {
2758 std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
2759 std::vector<LiteralIndex> intersection;
2760 const int64_t old_work = work_done_in_mark_descendants_;
2761 for (int i = 0; i < clique.size(); ++i) {
2762 // Do not spend too much time here.
2763 if (work_done_in_mark_descendants_ - old_work > 1e8) break;
2764
2765 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
2766 MarkDescendants(clique[i]);
2767 if (i == 0) {
2768 for (const LiteralIndex index : is_marked_.PositionsSetAtLeastOnce()) {
2769 if (can_be_included[Literal(index).NegatedIndex()]) {
2770 intersection.push_back(index);
2771 }
2772 }
2773 for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
2774 }
2775
2776 int new_size = 0;
2777 is_marked_.Clear(clique[i]);
2778 is_marked_.Clear(clique[i].NegatedIndex());
2779 for (const LiteralIndex index : intersection) {
2780 if (!is_marked_[index]) continue;
2781 intersection[new_size++] = index;
2782 }
2783 intersection.resize(new_size);
2784 if (intersection.empty()) break;
2785
2786 // Expand? The negation of any literal in the intersection is a valid way
2787 // to extend the clique.
2788 if (i + 1 == clique.size()) {
2789 // Heuristic: use literal with largest lp value. We randomize slightly.
2790 int index = -1;
2791 double max_lp = 0.0;
2792 for (int j = 0; j < intersection.size(); ++j) {
2793 // If we don't use weight, we prefer variable that comes first.
2794 const double lp =
2795 use_weight
2796 ? expanded_lp_values[Literal(intersection[j]).NegatedIndex()] +
2797 absl::Uniform<double>(*random_, 0.0, 1e-4)
2798 : can_be_included.size() - intersection[j].value();
2799 if (index == -1 || lp > max_lp) {
2800 index = j;
2801 max_lp = lp;
2802 }
2803 }
2804 if (index != -1) {
2805 clique.push_back(Literal(intersection[index]).Negated());
2806 std::swap(intersection.back(), intersection[index]);
2807 intersection.pop_back();
2808 }
2809 }
2810 }
2811 return clique;
2812}
2813
2814// Make sure both version are compiled.
2815template std::vector<Literal>
2817 const absl::Span<const Literal> at_most_one,
2818 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
2819 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
2820template std::vector<Literal>
2822 const absl::Span<const Literal> at_most_one,
2823 const util_intops::StrongVector<LiteralIndex, bool>& can_be_included,
2824 const util_intops::StrongVector<LiteralIndex, double>& expanded_lp_values);
2825
2826// This function and the generated cut serves two purpose:
2827// 1/ If a new clause of size 2 was discovered and not included in the LP, we
2828// will add it.
2829// 2/ The more classical clique cut separation algorithm
2830//
2831// Note that once 1/ Is performed, any literal close to 1.0 in the lp shouldn't
2832// be in the same clique as a literal with positive weight. So for step 2, we
2833// only really need to consider fractional variables.
2834const std::vector<std::vector<Literal>>&
2836 absl::Span<const Literal> literals, absl::Span<const double> lp_values,
2837 absl::Span<const double> reduced_costs) {
2838 // We only want to generate a cut with literals from the LP, not extra ones.
2839 const int num_literals = implications_and_amos_.size();
2840 util_intops::StrongVector<LiteralIndex, bool> can_be_included(num_literals,
2841 false);
2843 num_literals, 0.0);
2845 num_literals, 0.0);
2846 const int size = literals.size();
2847 for (int i = 0; i < size; ++i) {
2848 const Literal l = literals[i];
2849 can_be_included[l] = true;
2850 can_be_included[l.NegatedIndex()] = true;
2851
2852 const double value = lp_values[i];
2853 expanded_lp_values[l] = value;
2854 expanded_lp_values[l.NegatedIndex()] = 1.0 - value;
2855
2856 // This is used for extending clique-cuts.
2857 // In most situation, we will only extend the cuts with literal at zero,
2858 // and we prefer "low" reduced cost first, so we negate it. Variable with
2859 // high reduced costs will likely stay that way and are of less interest in
2860 // a clique cut. At least that is my interpretation.
2861 //
2862 // TODO(user): For large problems or when we base the clique from a newly
2863 // added and violated 2-clique, we might consider only a subset of
2864 // fractional variables, so we might need to include fractional variable
2865 // first, but then their rc should be zero, so it should be already kind of
2866 // doing that.
2867 //
2868 // Remark: This seems to have a huge impact to the cut performance!
2869 const double rc = reduced_costs[i];
2870 heuristic_weights[l] = -rc;
2871 heuristic_weights[l.NegatedIndex()] = rc;
2872 }
2873
2874 // We want highest sum first.
2875 struct Candidate {
2876 Literal a;
2877 Literal b;
2878 double sum;
2879 bool operator<(const Candidate& other) const { return sum > other.sum; }
2880 };
2881 std::vector<Candidate> candidates;
2882
2883 // First heuristic. Currently we only consider violated at most one of size 2,
2884 // and extend them. Right now, the code is a bit slow to try too many at every
2885 // LP node so it is why we are defensive like this. Note also that because we
2886 // currently still statically add the initial implications, this will only add
2887 // cut based on newly learned binary clause. Or the one that were not added
2888 // to the relaxation in the first place.
2889 std::vector<Literal> fractional_literals;
2890 for (int i = 0; i < size; ++i) {
2891 Literal current_literal = literals[i];
2892 double current_value = lp_values[i];
2893 if (trail_->Assignment().LiteralIsAssigned(current_literal)) continue;
2894 if (is_redundant_[current_literal]) continue;
2895
2896 if (current_value < 0.5) {
2897 current_literal = current_literal.Negated();
2898 current_value = 1.0 - current_value;
2899 }
2900
2901 if (current_value < 0.99) {
2902 fractional_literals.push_back(current_literal);
2903 }
2904
2905 // We consider only one candidate for each current_literal.
2906 LiteralIndex best = kNoLiteralIndex;
2907 double best_value = 0.0;
2908 for (const Literal l : implications_and_amos_[current_literal].literals()) {
2909 if (!can_be_included[l]) continue;
2910 const double activity =
2911 current_value + expanded_lp_values[l.NegatedIndex()];
2912 if (activity <= 1.01) continue;
2913 const double v = activity + absl::Uniform<double>(*random_, 0.0, 1e-4);
2914 if (best == kNoLiteralIndex || v > best_value) {
2915 best_value = v;
2916 best = l.NegatedIndex();
2917 }
2918 }
2919 if (best != kNoLiteralIndex) {
2920 const double activity = current_value + expanded_lp_values[best];
2921 candidates.push_back({current_literal, Literal(best), activity});
2922 }
2923 }
2924
2925 // Do not genate too many cut at once.
2926 const int kMaxNumberOfCutPerCall = 10;
2927 std::sort(candidates.begin(), candidates.end());
2928 if (candidates.size() > kMaxNumberOfCutPerCall) {
2929 candidates.resize(kMaxNumberOfCutPerCall);
2930 }
2931
2932 // Expand to a maximal at most one each candidates before returning them.
2933 // Note that we only expand using literal from the LP.
2934 tmp_cuts_.clear();
2935 for (const Candidate& candidate : candidates) {
2936 tmp_cuts_.push_back(ExpandAtMostOneWithWeight(
2937 {candidate.a, candidate.b}, can_be_included, heuristic_weights));
2938 }
2939
2940 // Once we processed new implications, also add "proper" clique cuts.
2941 // We can generate a small graph and separate cut efficiently there.
2942 if (fractional_literals.size() > 1) {
2943 // Lets permute this randomly and truncate if we have too many variables.
2944 // Since we use bitset it is good to have a multiple of 64 there.
2945 //
2946 // TODO(user): Prefer more fractional variables.
2947 const int max_graph_size = 1024;
2948 if (fractional_literals.size() > max_graph_size) {
2949 std::shuffle(fractional_literals.begin(), fractional_literals.end(),
2950 *random_);
2951 fractional_literals.resize(max_graph_size);
2952 }
2953
2954 bron_kerbosch_.Initialize(fractional_literals.size() * 2);
2955
2956 // Prepare a dense mapping.
2957 int i = 0;
2958 tmp_mapping_.resize(implications_and_amos_.size(), -1);
2959 for (const Literal l : fractional_literals) {
2960 bron_kerbosch_.SetWeight(i, expanded_lp_values[l]);
2961 tmp_mapping_[l] = i++;
2962 bron_kerbosch_.SetWeight(i, expanded_lp_values[l.Negated()]);
2963 tmp_mapping_[l.Negated()] = i++;
2964 }
2965
2966 // Copy the implication subgraph and remap it to a dense indexing.
2967 //
2968 // TODO(user): Treat at_most_one more efficiently. We can collect them
2969 // and scan each of them just once.
2970 for (const Literal base : fractional_literals) {
2971 for (const Literal l : {base, base.Negated()}) {
2972 const int from = tmp_mapping_[l];
2973 for (const Literal next : DirectImplications(l)) {
2974 // l => next so (l + not(next) <= 1).
2975 const int to = tmp_mapping_[next.Negated()];
2976 if (to != -1) {
2977 bron_kerbosch_.AddEdge(from, to);
2978 }
2979 }
2980 }
2981 }
2982
2983 // Before running the algo, compute the transitive closure.
2984 // The graph shouldn't be too large, so this should be fast enough.
2985 bron_kerbosch_.TakeTransitiveClosureOfImplicationGraph();
2986
2987 bron_kerbosch_.SetWorkLimit(1e8);
2988 bron_kerbosch_.SetMinimumWeight(1.001);
2989 std::vector<std::vector<int>> cliques = bron_kerbosch_.Run();
2990
2991 // If we have many candidates, we will only expand the first few with
2992 // maximum weights.
2993 const int max_num_per_batch = 5;
2994 std::vector<std::pair<int, double>> with_weight =
2995 bron_kerbosch_.GetMutableIndexAndWeight();
2996 if (with_weight.size() > max_num_per_batch) {
2997 std::sort(
2998 with_weight.begin(), with_weight.end(),
2999 [](const std::pair<int, double>& a, const std::pair<int, double>& b) {
3000 return a.second > b.second;
3001 });
3002 with_weight.resize(max_num_per_batch);
3003 }
3004
3005 std::vector<Literal> at_most_one;
3006 for (const auto [index, weight] : with_weight) {
3007 // Convert.
3008 at_most_one.clear();
3009 for (const int i : cliques[index]) {
3010 const Literal l = fractional_literals[i / 2];
3011 at_most_one.push_back(i % 2 == 1 ? l.Negated() : l);
3012 }
3013
3014 // Expand and add clique.
3015 //
3016 // TODO(user): Expansion is pretty slow. Given that the base clique can
3017 // share literal being part of the same amo, we should be able to speed
3018 // that up, we don't want to scan an amo twice basically.
3019 tmp_cuts_.push_back(ExpandAtMostOneWithWeight(
3020 at_most_one, can_be_included, heuristic_weights));
3021 }
3022
3023 // Clear the dense mapping
3024 for (const Literal l : fractional_literals) {
3025 tmp_mapping_[l] = -1;
3026 tmp_mapping_[l.Negated()] = -1;
3027 }
3028 }
3029
3030 return tmp_cuts_;
3031}
3032
3033// TODO(user): Use deterministic limit.
3034// TODO(user): Explore the graph instead of just looking at full amo, especially
3035// since we expand small ones.
3036std::vector<absl::Span<const Literal>>
3037BinaryImplicationGraph::HeuristicAmoPartition(std::vector<Literal>* literals) {
3038 std::vector<absl::Span<const Literal>> result;
3039
3041 implications_and_amos_.size(), false);
3042 for (const Literal l : *literals) to_consider[l] = true;
3043
3044 // Priority queue of (intersection_size, start_of_amo).
3045 std::priority_queue<std::pair<int, int>> pq;
3046
3047 // Compute for each at most one that might overlap, its overlaping size and
3048 // stores its start in the at_most_one_buffer_.
3049 //
3050 // This is in O(num_literal in amo).
3051 absl::flat_hash_set<int> explored_amo;
3052 for (const Literal l : *literals) {
3053 for (const int start : implications_and_amos_[l].offsets()) {
3054 const auto [_, inserted] = explored_amo.insert(start);
3055 if (!inserted) continue;
3056
3057 int intersection_size = 0;
3058 for (const Literal l : AtMostOne(start)) {
3059 if (to_consider[l]) ++intersection_size;
3060 }
3061 if (intersection_size > 1) {
3062 pq.push({intersection_size, start});
3063 }
3064
3065 // Abort early if we are done.
3066 if (intersection_size == literals->size()) break;
3067 }
3068 }
3069
3070 // Consume AMOs, update size.
3071 int num_processed = 0;
3072 while (!pq.empty()) {
3073 const auto [old_size, start] = pq.top();
3074 pq.pop();
3075
3076 // Recompute size.
3077 int intersection_size = 0;
3078 for (const Literal l : AtMostOne(start)) {
3079 if (to_consider[l]) ++intersection_size;
3080 }
3081 if (intersection_size != old_size) {
3082 // re-add with new size.
3083 if (intersection_size > 1) {
3084 pq.push({intersection_size, start});
3085 }
3086 continue;
3087 }
3088
3089 // Mark the literal of that at most one at false.
3090 for (const Literal l : AtMostOne(start)) {
3091 to_consider[l] = false;
3092 }
3093
3094 // Extract the intersection by moving it in
3095 // [num_processed, num_processed + intersection_size).
3096 const int span_start = num_processed;
3097 for (int i = num_processed; i < literals->size(); ++i) {
3098 if (to_consider[(*literals)[i]]) continue;
3099 std::swap((*literals)[num_processed], (*literals)[i]);
3100 ++num_processed;
3101 }
3102 result.push_back(absl::MakeSpan(literals->data() + span_start,
3103 num_processed - span_start));
3104 }
3105 return result;
3106}
3107
3109 Literal root) {
3110 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
3111 return MarkDescendants(root);
3112}
3113
3115 const LiteralIndex size(implications_and_amos_.size());
3116 is_marked_.ClearAndResize(size);
3117 processed_unit_clauses_.ClearAndResize(size);
3118}
3119
3120template <bool fill_implied_by>
3121absl::Span<const Literal> BinaryImplicationGraph::MarkDescendants(
3122 Literal root) {
3123 auto* const stack = bfs_stack_.data();
3124 auto is_marked = is_marked_.BitsetView();
3125 auto is_redundant = is_redundant_.const_view();
3126
3127 int stack_size = 1;
3128 stack[0] = root;
3129 if (is_redundant[root]) return absl::MakeSpan(stack, 1);
3130 is_marked_.Set(root);
3131 if constexpr (fill_implied_by) {
3132 implied_by_[root] = root;
3133 }
3134 auto implies_something = implies_something_.const_view();
3135 for (int j = 0; j < stack_size; ++j) {
3136 const Literal current = stack[j];
3137 if (!implies_something[current]) continue;
3138
3139 work_done_in_mark_descendants_ +=
3140 implications_and_amos_[current].num_literals();
3141 for (const Literal l : implications_and_amos_[current].literals()) {
3142 if (!is_marked[l] && !is_redundant[l]) {
3143 is_marked_.SetUnsafe(is_marked, l);
3144 if constexpr (fill_implied_by) {
3145 implied_by_[l] = current;
3146 }
3147 stack[stack_size++] = l;
3148 }
3149 }
3150
3151 for (const int start : implications_and_amos_[current].offsets()) {
3152 work_done_in_mark_descendants_ += AtMostOne(start).size();
3153 for (const Literal l : AtMostOne(start)) {
3154 if (l == current) continue;
3155 if (!is_marked[l.NegatedIndex()] && !is_redundant[l.NegatedIndex()]) {
3156 is_marked_.SetUnsafe(is_marked, l.NegatedIndex());
3157 if constexpr (fill_implied_by) {
3158 implied_by_[l.Negated()] = current;
3159 }
3160 stack[stack_size++] = l.Negated();
3161 }
3162 }
3163 }
3164 }
3165 work_done_in_mark_descendants_ += stack_size;
3166 return absl::MakeSpan(stack, stack_size);
3167}
3168
3169std::vector<Literal> BinaryImplicationGraph::ExpandAtMostOne(
3170 const absl::Span<const Literal> at_most_one,
3171 int64_t max_num_explored_nodes) {
3172 std::vector<Literal> clique(at_most_one.begin(), at_most_one.end());
3173
3174 // Optim.
3175 for (const Literal l : clique) {
3176 if (!implies_something_[l]) {
3177 return clique;
3178 }
3179 }
3180
3181 // TODO(user): Improve algorithm. If we do a dfs, we can know if a literal
3182 // has no descendant in the current intersection. We can keep such literal
3183 // marked.
3184 std::vector<LiteralIndex> intersection;
3185 for (int i = 0; i < clique.size(); ++i) {
3186 if (work_done_in_mark_descendants_ > max_num_explored_nodes) break;
3187 is_marked_.ClearAndResize(LiteralIndex(implications_and_amos_.size()));
3188 MarkDescendants(clique[i]);
3189
3190 if (i == 0) {
3191 intersection = is_marked_.PositionsSetAtLeastOnce();
3192 for (const Literal l : clique) is_marked_.Clear(l.NegatedIndex());
3193 }
3194
3195 int new_size = 0;
3196 is_marked_.Clear(clique[i].NegatedIndex()); // TODO(user): explain.
3197 for (const LiteralIndex index : intersection) {
3198 if (is_marked_[index]) intersection[new_size++] = index;
3199 }
3200 intersection.resize(new_size);
3201 if (intersection.empty()) break;
3202
3203 // TODO(user): If the intersection is small compared to the members of the
3204 // clique left to explore, we could look at the descendants of the negated
3205 // intersection instead.
3206
3207 // Expand?
3208 if (i + 1 == clique.size()) {
3209 clique.push_back(Literal(intersection.back()).Negated());
3210 intersection.pop_back();
3211 }
3212 }
3213 return clique;
3214}
3215
3216// TODO(user): lazy cleanup the lists on is_removed_?
3217// TODO(user): Mark fixed variable as is_removed_ for faster iteration?
3219 Literal literal) {
3220 DCHECK(!is_removed_[literal]);
3221
3222 // Clear old state.
3223 for (const Literal l : direct_implications_) {
3224 in_direct_implications_[l] = false;
3225 }
3226 direct_implications_.clear();
3227
3228 // Fill new state.
3229 const VariablesAssignment& assignment = trail_->Assignment();
3230 DCHECK(!assignment.LiteralIsAssigned(literal));
3231 for (const Literal l : implications_and_amos_[literal].literals()) {
3232 if (l == literal) continue;
3233 if (assignment.LiteralIsAssigned(l)) continue;
3234 if (!is_removed_[l] && !in_direct_implications_[l]) {
3235 in_direct_implications_[l] = true;
3236 direct_implications_.push_back(l);
3237 }
3238 }
3239 if (is_redundant_[literal]) {
3240 DCHECK(implications_and_amos_[literal].offsets().empty());
3241 }
3242 for (const int start : implications_and_amos_[literal].offsets()) {
3243 for (const Literal l : AtMostOne(start)) {
3244 if (l == literal) continue;
3245 if (assignment.LiteralIsAssigned(l)) continue;
3246 if (!is_removed_[l] && !in_direct_implications_[l.NegatedIndex()]) {
3247 in_direct_implications_[l.NegatedIndex()] = true;
3248 direct_implications_.push_back(l.Negated());
3249 }
3250 }
3251 }
3252 estimated_sizes_[literal] = direct_implications_.size();
3253 return direct_implications_;
3254}
3255
3256absl::Span<const Literal> BinaryImplicationGraph::AtMostOne(int start) const {
3257 const int size = at_most_one_buffer_[start].Index().value();
3258 return absl::MakeSpan(&at_most_one_buffer_[start + 1], size);
3259}
3260
3262 const int size1 = implications_and_amos_[lhs].num_literals();
3263 const int size2 = implications_and_amos_[lhs].num_offsets();
3264 if (size1 + size2 == 0) return kNoLiteralIndex;
3265
3266 const int choice = absl::Uniform<int>(*random_, 0, size1 + size2);
3267 if (choice < size1) {
3268 return implications_and_amos_[lhs].literals()[choice].Index();
3269 }
3270
3271 const absl::Span<const Literal> amo =
3272 AtMostOne(implications_and_amos_[lhs].offsets()[choice - size1]);
3273 CHECK_GE(amo.size(), 2);
3274 const int first_choice = absl::Uniform<int>(*random_, 0, amo.size());
3275 const Literal lit = amo[first_choice];
3276 if (lit != lhs) return lit.NegatedIndex();
3277
3278 // We are unlucky and just picked the wrong literal: take a different one.
3279 int next_choice = absl::Uniform<int>(*random_, 0, amo.size() - 1);
3280 if (next_choice >= first_choice) {
3281 next_choice += 1;
3282 }
3283 CHECK_NE(amo[next_choice], lhs);
3284 return amo[next_choice].NegatedIndex();
3285}
3286
3288 bool* is_unsat) {
3289 const int saved_index = propagation_trail_index_;
3290 DCHECK_EQ(propagation_trail_index_, trail_->Index()); // Propagation done.
3291
3292 const VariablesAssignment& assignment = trail_->Assignment();
3293 if (assignment.VariableIsAssigned(var)) return false;
3294
3295 const Literal literal(var, true);
3296 direct_implications_of_negated_literal_ =
3297 DirectImplications(literal.Negated());
3298 DirectImplications(literal); // Fill in_direct_implications_.
3299 for (const Literal l : direct_implications_of_negated_literal_) {
3300 if (in_direct_implications_[l]) {
3301 // not(l) => literal => l.
3302 if (!FixLiteral(l)) {
3303 *is_unsat = true;
3304 return false;
3305 }
3306 }
3307 }
3308
3309 return propagation_trail_index_ > saved_index;
3310}
3311
3313 BooleanVariable var) {
3314 const Literal literal(var, true);
3315 int64_t result = 0;
3316 direct_implications_of_negated_literal_ =
3317 DirectImplications(literal.Negated());
3318 const int64_t s1 = DirectImplications(literal).size();
3319 for (const Literal l : direct_implications_of_negated_literal_) {
3320 result += s1;
3321
3322 // We should have dealt with that in FindFailedLiteralAroundVar().
3323 DCHECK(!in_direct_implications_[l]);
3324
3325 // l => literal => l: equivalent variable!
3326 if (in_direct_implications_[l.NegatedIndex()]) result--;
3327 }
3328 return result;
3329}
3330
3331// For all possible a => var => b, add a => b.
3333 BooleanVariable var, std::deque<std::vector<Literal>>* postsolve_clauses) {
3334 const Literal literal(var, true);
3335 DCHECK(!is_removed_[literal.Index()]);
3336 DCHECK(!is_redundant_[literal.Index()]);
3337
3338 direct_implications_of_negated_literal_ =
3339 DirectImplications(literal.Negated());
3340 for (const Literal b : DirectImplications(literal)) {
3341 if (is_removed_[b]) continue;
3342 DCHECK(!is_redundant_[b]);
3343 estimated_sizes_[b.NegatedIndex()]--;
3344 for (const Literal a_negated : direct_implications_of_negated_literal_) {
3345 if (a_negated.Negated() == b) continue;
3346 if (is_removed_[a_negated]) continue;
3347 AddImplication(a_negated.Negated(), b);
3348 }
3349 }
3350 for (const Literal a_negated : direct_implications_of_negated_literal_) {
3351 if (is_removed_[a_negated]) continue;
3352 DCHECK(!is_redundant_[a_negated]);
3353 estimated_sizes_[a_negated.NegatedIndex()]--;
3354 }
3355
3356 // Notify the deletion to the proof checker and the postsolve.
3357 // Note that we want var first in these clauses for the postsolve.
3358 for (const Literal b : direct_implications_) {
3359 postsolve_clauses->push_back({Literal(var, false), b});
3360 }
3361 for (const Literal a_negated : direct_implications_of_negated_literal_) {
3362 postsolve_clauses->push_back({Literal(var, true), a_negated});
3363 }
3364
3365 // We need to remove any occurrence of var in our implication lists, this will
3366 // be delayed to the CleanupAllRemovedVariables() call.
3367 for (const LiteralIndex index : {literal.Index(), literal.NegatedIndex()}) {
3368 is_removed_[index] = true;
3369 implications_and_amos_[index].ClearLiterals();
3370 implications_and_amos_[index].ShrinkToFit();
3371 if (!is_redundant_[index]) {
3372 ++num_redundant_literals_;
3373 is_redundant_.Set(index);
3374 }
3375 }
3376}
3377
3379 std::deque<std::vector<Literal>>* postsolve_clauses) {
3380 for (LiteralIndex a(0); a < implications_and_amos_.size(); ++a) {
3381 if (is_redundant_[a] && !is_removed_[a]) {
3382 postsolve_clauses->push_back(
3384 is_removed_[a] = true;
3385 implications_and_amos_[a].ClearLiterals();
3386 implications_and_amos_[a].ShrinkToFit();
3387 continue;
3388 }
3389
3390 int new_size = 0;
3391 auto& implication = implications_and_amos_[a];
3392 for (const Literal l : implication.literals()) {
3393 if (!is_redundant_[l]) {
3394 implication.literals()[new_size++] = l;
3395 }
3396 }
3397 implication.TruncateLiterals(new_size);
3398 }
3399}
3400
3402 const VariablesAssignment& assignment = trail_->Assignment();
3403 for (LiteralIndex a(0); a < implications_and_amos_.size(); ++a) {
3404 if (is_removed_[a] || assignment.LiteralIsAssigned(Literal(a))) {
3405 if (DEBUG_MODE && assignment.LiteralIsTrue(Literal(a))) {
3406 // The code assumes that everything is already propagated.
3407 // Otherwise we will remove implications that didn't propagate yet!
3408 for (const Literal lit : implications_and_amos_[a].literals()) {
3409 DCHECK(trail_->Assignment().LiteralIsTrue(lit));
3410 }
3411 }
3412
3413 implications_and_amos_[a].ClearLiterals();
3414 implications_and_amos_[a].ShrinkToFit();
3415 continue;
3416 }
3417
3418 int new_size = 0;
3419 auto& implication = implications_and_amos_[a];
3420 for (const Literal l : implication.literals()) {
3421 if (!is_removed_[l] && !assignment.LiteralIsTrue(l)) {
3422 implication.literals()[new_size++] = l;
3423 }
3424 }
3425 implication.TruncateLiterals(new_size);
3426 }
3427
3428 // Clean-up at most ones.
3429 for (auto& v : implications_and_amos_) {
3430 v.ClearOffsets();
3431 }
3432 CHECK(CleanUpAndAddAtMostOnes(/*base_index=*/0));
3433
3434 // Note that to please the invariant() we also removed fixed literal above.
3435 DCHECK(InvariantsAreOk());
3436}
3437
3438bool BinaryImplicationGraph::InvariantsAreOk() {
3439 if (time_limit_->LimitReached()) return true;
3440 // We check that if a => b then not(b) => not(a).
3441 absl::flat_hash_set<std::pair<LiteralIndex, LiteralIndex>> seen;
3442 int num_redundant = 0;
3443 int num_fixed = 0;
3444 TimeLimitCheckEveryNCalls time_limit_check(100, time_limit_);
3445 for (LiteralIndex a_index(0); a_index < implications_and_amos_.size();
3446 ++a_index) {
3447 if (time_limit_check.LimitReached()) return true;
3448 if (trail_->Assignment().LiteralIsAssigned(Literal(a_index))) {
3449 ++num_fixed;
3450 if (!implications_and_amos_[a_index].literals().empty()) {
3451 LOG(ERROR) << "Fixed literal has non-cleared implications";
3452 return false;
3453 }
3454 continue;
3455 }
3456 if (is_removed_[a_index]) {
3457 if (!implications_and_amos_[a_index].literals().empty()) {
3458 LOG(ERROR) << "Removed literal has non-cleared implications";
3459 return false;
3460 }
3461 continue;
3462 }
3463 if (is_redundant_[a_index]) {
3464 ++num_redundant;
3465 if (implications_and_amos_[a_index].num_literals() != 1) {
3466 LOG(ERROR)
3467 << "Redundant literal should only point to its representative "
3468 << Literal(a_index) << " => "
3469 << implications_and_amos_[a_index].literals();
3470 return false;
3471 }
3472 }
3473 for (const Literal b : implications_and_amos_[a_index].literals()) {
3474 seen.insert({a_index, b.Index()});
3475 if (lrat_proof_handler_ != nullptr &&
3476 GetClauseId(Literal(a_index).Negated(), b) == kNoClauseId) {
3477 LOG(ERROR) << "No clause id for " << Literal(a_index) << " => " << b;
3478 return false;
3479 }
3480 }
3481 }
3482
3483 // Check that reverse topo order is correct.
3484 util_intops::StrongVector<LiteralIndex, int> lit_to_order;
3485 if (is_dag_) {
3486 lit_to_order.assign(implications_and_amos_.size(), -1);
3487 for (int i = 0; i < reverse_topological_order_.size(); ++i) {
3488 lit_to_order[reverse_topological_order_[i]] = i;
3489 }
3490 }
3491
3492 VLOG(2) << "num_redundant " << num_redundant;
3493 VLOG(2) << "num_fixed " << num_fixed;
3494 for (LiteralIndex a_index(0); a_index < implications_and_amos_.size();
3495 ++a_index) {
3496 if (time_limit_check.LimitReached()) return true;
3497 const LiteralIndex not_a_index = Literal(a_index).NegatedIndex();
3498 for (const Literal b : implications_and_amos_[a_index].literals()) {
3499 if (is_removed_[b]) {
3500 LOG(ERROR) << "A removed literal still appear! " << Literal(a_index)
3501 << " => " << b;
3502 return false;
3503 }
3504
3505 if (!seen.contains({b.NegatedIndex(), not_a_index})) {
3506 LOG(ERROR) << "We have " << Literal(a_index) << " => " << b
3507 << " but not the reverse implication!";
3508 LOG(ERROR) << "redundant[a]: " << is_redundant_[a_index]
3509 << " assigned[a]: "
3510 << trail_->Assignment().LiteralIsAssigned(Literal(a_index))
3511 << " removed[a]: " << is_removed_[a_index]
3512 << " redundant[b]: " << is_redundant_[b] << " assigned[b]: "
3513 << trail_->Assignment().LiteralIsAssigned(b)
3514 << " removed[b]: " << is_removed_[b];
3515
3516 return false;
3517 }
3518
3519 // Test that if we have a dag, our topo order is correct.
3520 if (is_dag_ && !is_redundant_[b] && !is_redundant_[a_index]) {
3521 DCHECK_NE(lit_to_order[b], -1);
3522 DCHECK_LE(lit_to_order[b], lit_to_order[a_index]);
3523 }
3524 }
3525 }
3526
3527 // Check the at-most ones.
3528 absl::flat_hash_set<std::pair<LiteralIndex, int>> lit_to_start;
3529 for (LiteralIndex i(0); i < implications_and_amos_.size(); ++i) {
3530 for (const int start : implications_and_amos_[i].offsets()) {
3531 lit_to_start.insert({i, start});
3532 }
3533 }
3534
3535 for (int start = 0; start < at_most_one_buffer_.size();) {
3536 const absl::Span<const Literal> amo = AtMostOne(start);
3537 for (const Literal l : amo) {
3538 if (is_removed_[l]) {
3539 LOG(ERROR) << "A removed literal still appear in an amo " << l;
3540 return false;
3541 }
3542 if (!lit_to_start.contains({l, start})) {
3543 return false;
3544 }
3545 }
3546 start += amo.size() + 1;
3547 }
3548
3549 return true;
3550}
3551
3552absl::Span<const Literal> BinaryImplicationGraph::NextAtMostOne() {
3553 if (at_most_one_iterator_ >= at_most_one_buffer_.size()) {
3554 return absl::Span<const Literal>();
3555 }
3556
3557 const absl::Span<const Literal> result = AtMostOne(at_most_one_iterator_);
3558 DCHECK(!result.empty());
3559 at_most_one_iterator_ += result.size() + 1;
3560 return result;
3561}
3562
3564 : SatPropagator("BinaryImplicationGraph"),
3565 stats_("BinaryImplicationGraph"),
3566 time_limit_(model->GetOrCreate<TimeLimit>()),
3567 random_(model->GetOrCreate<ModelRandomGenerator>()),
3568 trail_(model->GetOrCreate<Trail>()),
3569 clause_id_generator_(model->GetOrCreate<ClauseIdGenerator>()),
3570 lrat_proof_handler_(model->Mutable<LratProofHandler>()),
3571 at_most_one_max_expansion_size_(model->GetOrCreate<SatParameters>()
3572 ->at_most_one_max_expansion_size()) {
3573 trail_->RegisterPropagator(this);
3574 if (lrat_proof_handler_ != nullptr) {
3575 lrat_helper_ = new LratEquivalenceHelper(this);
3576 }
3577}
3578
3581 LOG(INFO) << stats_.StatString();
3582 LOG(INFO) << "num_redundant_implications " << num_redundant_implications_;
3583 });
3584 if (lrat_helper_ != nullptr) delete lrat_helper_;
3585}
3586
3587// ----- SatClause -----
3588
3589// static
3590SatClause* SatClause::Create(absl::Span<const Literal> literals) {
3591 DCHECK_GE(literals.size(), 2);
3592 SatClause* clause = reinterpret_cast<SatClause*>(
3593 ::operator new(sizeof(SatClause) + literals.size() * sizeof(Literal)));
3594 clause->size_ = literals.size();
3595 for (int i = 0; i < literals.size(); ++i) {
3596 clause->literals_[i] = literals[i];
3597 }
3598 return clause;
3599}
3600
3601// Note that for an attached clause, removing fixed literal is okay because if
3602// any of the watched literal is assigned, then the clause is necessarily true.
3603bool SatClause::RemoveFixedLiteralsAndTestIfTrue(
3604 const VariablesAssignment& assignment) {
3605 DCHECK(!IsRemoved());
3606 if (assignment.VariableIsAssigned(literals_[0].Variable()) ||
3607 assignment.VariableIsAssigned(literals_[1].Variable())) {
3608 DCHECK(IsSatisfied(assignment));
3609 return true;
3610 }
3611 int j = 2;
3612 while (j < size_ && !assignment.VariableIsAssigned(literals_[j].Variable())) {
3613 ++j;
3614 }
3615 for (int i = j; i < size_; ++i) {
3616 if (assignment.VariableIsAssigned(literals_[i].Variable())) {
3617 if (assignment.LiteralIsTrue(literals_[i])) return true;
3618 } else {
3619 std::swap(literals_[j], literals_[i]);
3620 ++j;
3621 }
3622 }
3623 size_ = j;
3624 return false;
3625}
3626
3627bool SatClause::IsSatisfied(const VariablesAssignment& assignment) const {
3628 for (const Literal literal : *this) {
3629 if (assignment.LiteralIsTrue(literal)) return true;
3630 }
3631 return false;
3632}
3633
3634std::string SatClause::DebugString() const {
3635 std::string result;
3636 for (const Literal literal : *this) {
3637 if (!result.empty()) result.append(" ");
3638 result.append(literal.DebugString());
3639 }
3640 return result;
3641}
3642
3643} // namespace sat
3644} // namespace operations_research
void FindStronglyConnectedComponents(const NodeIndex num_nodes, const Graph &graph, SccOutput *components)
double Get() const
Definition timer.h:44
void Start()
Definition timer.h:30
ConstView const_view() const
Definition bitset.h:459
void Set(IntegerType index)
Definition bitset.h:878
Bitset64< IntegerType >::View BitsetView()
Definition bitset.h:892
void SetUnsafe(typename Bitset64< IntegerType >::View view, IntegerType index)
Definition bitset.h:896
void AppendImplicationChains(absl::Span< const Literal > literals, std::vector< ClauseId > *clause_ids)
Definition clause.cc:1487
bool ComputeTransitiveReduction(bool log_info=false)
Definition clause.cc:2214
Literal RepresentativeOf(Literal l) const
Definition clause.h:765
const std::vector< std::vector< Literal > > & GenerateAtMostOnesWithLargeWeight(absl::Span< const Literal > literals, absl::Span< const double > lp_values, absl::Span< const double > reduced_costs)
Definition clause.cc:2835
void Reimply(Trail *trail, int old_trail_index) final
Definition clause.cc:1382
std::vector< Literal > ExpandAtMostOneWithWeight(absl::Span< const Literal > at_most_one, const util_intops::StrongVector< LiteralIndex, bool > &can_be_included, const util_intops::StrongVector< LiteralIndex, double > &expanded_lp_values)
Definition clause.cc:2754
bool TransformIntoMaxCliques(std::vector< std::vector< Literal > > *at_most_ones, int64_t max_num_explored_nodes=1e8)
Definition clause.cc:2500
absl::Span< const Literal > GetAllImpliedLiterals(Literal root)
Definition clause.cc:3108
int64_t NumImplicationOnVariableRemoval(BooleanVariable var)
Definition clause.cc:3312
absl::Span< const Literal > NextAtMostOne()
Definition clause.cc:3552
bool FixLiteral(Literal true_literal, absl::Span< const ClauseId > clause_ids={})
Definition clause.cc:1100
bool AddImplication(Literal a, Literal b)
Definition clause.h:668
void RemoveBooleanVariable(BooleanVariable var, std::deque< std::vector< Literal > > *postsolve_clauses)
Definition clause.cc:3332
void MinimizeConflictFirst(const Trail &trail, std::vector< Literal > *c, SparseBitset< BooleanVariable > *marked, std::vector< ClauseId > *clause_ids, bool also_use_decisions)
Definition clause.cc:1397
bool DetectEquivalences(bool log_info=false)
Definition clause.cc:1950
const std::vector< Literal > & DirectImplications(Literal literal)
Definition clause.cc:3218
absl::Span< const Literal > Reason(const Trail &trail, int trail_index, int64_t conflict_id) const final
Definition clause.cc:1377
bool MergeAtMostOnes(absl::Span< std::vector< Literal > > at_most_ones, int64_t max_num_explored_nodes=1e8, double *dtime=nullptr)
Definition clause.cc:2604
void RemoveAllRedundantVariables(std::deque< std::vector< Literal > > *postsolve_clauses)
Definition clause.cc:3378
bool FindFailedLiteralAroundVar(BooleanVariable var, bool *is_unsat)
Definition clause.cc:3287
LiteralIndex RandomImpliedLiteral(Literal lhs)
Definition clause.cc:3261
ClauseId GetClauseId(Literal a, Literal b) const
Definition clause.h:672
ABSL_MUST_USE_RESULT bool AddAtMostOne(absl::Span< const Literal > at_most_one)
Definition clause.cc:1075
std::vector< absl::Span< const Literal > > HeuristicAmoPartition(std::vector< Literal > *literals)
Definition clause.cc:3037
SatClause * InprocessingAddClause(absl::Span< const Literal > new_clause)
Definition clause.cc:605
bool AddClause(ClauseId id, absl::Span< const Literal > literals, Trail *trail, int lbd)
Definition clause.cc:322
ABSL_MUST_USE_RESULT bool InprocessingRewriteClause(SatClause *clause, absl::Span< const Literal > new_clause, absl::Span< const ClauseId > clause_ids={})
Definition clause.cc:527
void LazyDelete(SatClause *clause, DeletionSourceForStat source)
Definition clause.cc:436
bool IsRemovable(SatClause *const clause) const
Definition clause.h:254
bool RemoveFixedLiteralsAndTestIfTrue(SatClause *clause)
Definition clause.cc:514
ABSL_MUST_USE_RESULT bool InprocessingFixLiteral(Literal true_literal, absl::Span< const ClauseId > clause_ids={})
Definition clause.cc:493
absl::Span< const Literal > Reason(const Trail &trail, int trail_index, int64_t conflict_id) const final
Definition clause.cc:277
ABSL_MUST_USE_RESULT bool InprocessingAddUnitClause(ClauseId unit_clause_id, Literal true_literal)
Definition clause.cc:473
SatClause * AddRemovableClause(ClauseId id, absl::Span< const Literal > literals, Trail *trail, int lbd)
Definition clause.cc:333
void ChangeLbdIfBetter(SatClause *clause, int new_lbd)
Definition clause.cc:498
ClauseId ReasonClauseId(Literal literal) const
Definition clause.cc:732
void Reimply(Trail *trail, int old_trail_index) final
Definition clause.cc:283
SatClause * ReasonClauseOrNull(BooleanVariable var) const
Definition clause.cc:299
SatClause * ReasonClause(int trail_index) const
Definition clause.cc:295
void Resize(int num_variables)
Definition clause.cc:127
void Attach(SatClause *clause, Trail *trail)
Definition clause.cc:407
void SetClauseId(const SatClause *clause, ClauseId id)
Definition clause.h:300
void AppendClauseIdsFixing(absl::Span< const Literal > literals, std::vector< ClauseId > *clause_ids, LiteralIndex decision=kNoLiteralIndex, std::optional< absl::FunctionRef< ClauseId(int, int)> > root_literals={})
Definition clause.cc:756
ClauseId GetClauseId(const SatClause *clause) const
Definition clause.h:295
bool ClauseIsUsedAsReason(SatClause *clause) const
Definition clause.cc:312
bool Propagate(Trail *trail) final
Definition clause.cc:142
int AddLiterals(const std::vector< L > &wrapped_values)
Definition util.h:967
void DetectInclusions(const std::function< void(int subset, int superset)> &process)
Definition inclusion.h:282
void SetWorkLimit(uint64_t work_limit)
Definition inclusion.h:86
std::string DebugString() const
Definition sat_base.h:101
LiteralIndex NegatedIndex() const
Definition sat_base.h:93
LiteralIndex Index() const
Definition sat_base.h:92
BooleanVariable Variable() const
Definition sat_base.h:88
void NewComponent(absl::Span< const int32_t > component)
Definition clause.cc:1732
LratEquivalenceHelper(BinaryImplicationGraph *graph)
Definition clause.cc:1720
void ProcessComponent(LiteralIndex representative)
Definition clause.cc:1797
void ProcessUnsatComponent(Literal literal, Literal representative)
Definition clause.cc:1853
bool FixAllLiteralsInComponent(LiteralIndex fixed_literal, bool all_true)
Definition clause.cc:1747
void ComputeImplicationChain(Literal a, Literal b, util_intops::StrongVector< LiteralIndex, LiteralIndex > &parents, std::vector< ClauseId > &clause_ids)
Definition clause.cc:1873
void DeleteClause(ClauseId id, absl::Span< const Literal > clause)
bool AddInferredClause(ClauseId id, absl::Span< const Literal > clause, absl::Span< const ClauseId > unit_ids, bool exported=false)
static SatClause * Create(absl::Span< const Literal > literals)
Definition clause.cc:3590
absl::Span< const Literal > AsSpan() const
Definition clause.h:100
absl::Span< const Literal > PropagationReason() const
Definition clause.h:95
bool IsSatisfied(const VariablesAssignment &assignment) const
Definition clause.cc:3627
Literal PropagatedLiteral() const
Definition clause.h:90
std::string DebugString() const
Definition clause.cc:3634
SatPropagator(const std::string &name)
Definition sat_base.h:786
SccGraph(SccFinder *finder, util_intops::StrongVector< LiteralIndex, LiteralsOrOffsets > *implications_and_offsets, std::vector< Literal > *at_most_one_buffer, util_intops::StrongVector< LiteralIndex, LiteralIndex > *parents, std::vector< Literal > *to_fix, std::vector< Literal > *parent_of_to_fix)
Definition clause.cc:1588
StronglyConnectedComponentsFinder< int32_t, SccGraph, CompactVectorVector< int32_t, int32_t > > SccFinder
Definition clause.cc:1584
const std::vector< int32_t > & operator[](int32_t node) const
Definition clause.cc:1609
void FindSubsets(absl::Span< const int > superset, int *next_index_to_try, const std::function< void(int subset)> &process)
Definition inclusion.h:481
void SetWorkLimit(uint64_t work_limit)
Definition inclusion.h:176
void EnqueueWithUnitReason(Literal true_literal, ClauseId clause_id)
Definition sat_base.h:388
void EnqueueAtLevel(Literal true_literal, int level)
Definition sat_base.h:380
bool LiteralIsTrue(Literal literal) const
Definition sat_base.h:404
bool LiteralIsFalse(Literal literal) const
Definition sat_base.h:407
void EnqueueAtLevel(Literal true_literal, int propagator_id, int level)
Definition sat_base.h:358
std::vector< Literal > * MutableConflict()
Definition sat_base.h:613
ClauseId GetUnitClauseId(BooleanVariable var) const
Definition sat_base.h:537
EnqueueHelper GetEnqueueHelper(int propagator_id)
Definition sat_base.h:418
int AssignmentLevel(Literal lit) const
Definition sat_base.h:661
void SetFailingSatClause(SatClause *clause)
Definition sat_base.h:634
bool ChronologicalBacktrackingEnabled() const
Definition sat_base.h:678
const VariablesAssignment & Assignment() const
Definition sat_base.h:654
bool VariableIsAssigned(BooleanVariable var) const
Definition sat_base.h:214
bool LiteralIsAssigned(Literal literal) const
Definition sat_base.h:209
bool LiteralIsFalse(Literal literal) const
Definition sat_base.h:203
bool LiteralIsTrue(Literal literal) const
Definition sat_base.h:206
void assign(size_type n, const value_type &val)
void push_back(const value_type &val)
void resize(size_type new_size)
void STLDeleteElements(T *container)
Definition stl_util.h:369
void STLClearObject(T *obj)
Definition stl_util.h:120
void OpenSourceEraseIf(Container &c, Pred pred)
Definition util.h:55
const LiteralIndex kNoLiteralIndex(-1)
constexpr ClauseId kNoClauseId(0)
const BooleanVariable kNoBooleanVariable(-1)
OR-Tools root namespace.
ClosedInterval::Iterator end(ClosedInterval interval)
const bool DEBUG_MODE
Definition radix_sort.h:266
trees with all degrees equal w the current value of w
trees with all degrees equal to
#define IF_STATS_ENABLED(instructions)
Definition stats.h:412
#define SCOPED_TIME_STAT(stats)
Definition stats.h:419