25#include "absl/algorithm/container.h"
26#include "absl/base/log_severity.h"
27#include "absl/container/flat_hash_map.h"
28#include "absl/container/flat_hash_set.h"
29#include "absl/flags/flag.h"
30#include "absl/log/check.h"
31#include "absl/numeric/int128.h"
32#include "absl/status/status.h"
33#include "absl/strings/str_format.h"
34#include "absl/strings/str_join.h"
35#include "absl/time/clock.h"
36#include "absl/time/time.h"
37#include "absl/types/span.h"
47ABSL_FLAG(
bool, minimize_permutation_support_size,
false,
48 "Tweak the algorithm to try and minimize the support size"
49 " of the generators produced. This may negatively impact the"
50 " performance, but works great on the sat_holeXXX benchmarks"
51 " to reduce the support size.");
59 std::vector<int> num_triangles(graph.num_nodes(), 0);
60 absl::flat_hash_set<std::pair<int, int>> arcs;
61 arcs.reserve(graph.num_arcs());
62 for (
int a = 0; a < graph.num_arcs(); ++a) {
63 arcs.insert({graph.Tail(a), graph.Head(a)});
65 for (
int node = 0; node < graph.num_nodes(); ++node) {
66 if (graph.OutDegree(node) > max_degree)
continue;
68 for (
int neigh1 : graph[node]) {
69 for (
int neigh2 : graph[node]) {
70 if (arcs.contains({neigh1, neigh2})) ++triangles;
73 num_triangles[node] = triangles;
78void LocalBfs(const ::util::StaticGraph<int, int>& graph,
int source,
79 int stop_after_num_nodes, std::vector<int>* visited,
80 std::vector<int>* num_within_radius,
84 std::vector<bool>* tmp_mask) {
85 const int n = graph.num_nodes();
87 num_within_radius->clear();
88 num_within_radius->push_back(1);
89 DCHECK_EQ(tmp_mask->size(), n);
90 DCHECK(absl::c_find(*tmp_mask,
true) == tmp_mask->end());
91 visited->push_back(source);
92 (*tmp_mask)[source] =
true;
94 int next_distance_change = 1;
95 while (num_settled < visited->size()) {
96 const int from = (*visited)[num_settled++];
97 for (
const int child : graph[from]) {
98 if ((*tmp_mask)[child])
continue;
99 (*tmp_mask)[child] =
true;
100 visited->push_back(child);
102 if (num_settled == next_distance_change) {
104 num_within_radius->push_back(visited->size());
105 if (num_settled >= stop_after_num_nodes)
break;
106 next_distance_change = visited->size();
110 for (
const int node : *visited) (*tmp_mask)[node] =
false;
113 if (num_settled == visited->size()) {
114 DCHECK_GE(num_within_radius->size(), 2);
115 DCHECK_EQ(num_within_radius->back(),
116 (*num_within_radius)[num_within_radius->size() - 2]);
117 num_within_radius->pop_back();
123void SwapFrontAndBack(std::vector<int>* v) {
125 std::swap((*v)[0], v->back());
131 const int num_parts = p1.NumParts();
132 if (p2.NumParts() != num_parts)
return false;
133 for (
int p = part_index; p < num_parts; ++p) {
134 if (p1.SizeOfPart(p) != p2.SizeOfPart(p) ||
135 p1.ParentOfPart(p) != p2.ParentOfPart(p)) {
151bool ListMapsToList(
const List& l1,
const List& l2,
153 std::vector<bool>* tmp_node_mask) {
154 int num_elements_delta = 0;
156 for (
const int mapped_x : l2) {
157 ++num_elements_delta;
158 (*tmp_node_mask)[mapped_x] =
true;
160 for (
const int x : l1) {
161 --num_elements_delta;
162 const int mapped_x = permutation.ImageOf(x);
163 if (!(*tmp_node_mask)[mapped_x]) {
167 (*tmp_node_mask)[mapped_x] =
false;
169 if (num_elements_delta != 0) match =
false;
172 for (
const int x : l2) (*tmp_node_mask)[
x] =
false;
180 tmp_dynamic_permutation_(NumNodes()),
181 tmp_node_mask_(NumNodes(), false),
182 tmp_degree_(NumNodes(), 0),
183 tmp_nodes_with_degree_(NumNodes() + 1) {
185 time_limit_ = &dummy_time_limit_;
186 tmp_partition_.Reset(NumNodes());
193 reverse_adj_list_index_.assign(graph.
num_nodes() + 2, 0);
194 for (
const int node : graph.
AllNodes()) {
196 ++reverse_adj_list_index_[graph.
Head(arc) + 2];
202 std::partial_sum(reverse_adj_list_index_.begin() + 2,
203 reverse_adj_list_index_.end(),
204 reverse_adj_list_index_.begin() + 2);
208 flattened_reverse_adj_lists_.assign(graph.
num_arcs(), -1);
209 for (
const int node : graph.
AllNodes()) {
211 flattened_reverse_adj_lists_[reverse_adj_list_index_[graph.
Head(arc) +
220 for (
const int i : flattened_reverse_adj_lists_) DCHECK_NE(i, -1);
229 if (image ==
base)
continue;
230 if (!ListMapsToList(graph_[
base], graph_[image], permutation,
235 if (!reverse_adj_list_index_.empty()) {
240 if (image ==
base)
continue;
241 if (!ListMapsToList(TailsOfIncomingArcsTo(
base),
242 TailsOfIncomingArcsTo(image), permutation,
255inline void IncrementCounterForNonSingletons(
const T& nodes,
257 std::vector<int>* node_count,
258 std::vector<int>* nodes_seen,
259 int64_t* num_operations) {
260 *num_operations += nodes.end() - nodes.begin();
261 for (
const int node : nodes) {
263 const int count = ++(*node_count)[node];
264 if (count == 1) nodes_seen->push_back(node);
272 std::vector<int>& tmp_nodes_with_nonzero_degree = tmp_stack_;
281 int64_t num_operations = 0;
294 std::vector<bool> adjacency_directions(1,
true);
295 if (!reverse_adj_list_index_.empty()) {
296 adjacency_directions.push_back(
false);
298 for (
int part_index = first_unrefined_part_index;
301 for (
const bool outgoing_adjacency : adjacency_directions) {
304 if (outgoing_adjacency) {
306 IncrementCounterForNonSingletons(
307 graph_[node], *partition, &tmp_degree_,
308 &tmp_nodes_with_nonzero_degree, &num_operations);
312 IncrementCounterForNonSingletons(
313 TailsOfIncomingArcsTo(node), *partition, &tmp_degree_,
314 &tmp_nodes_with_nonzero_degree, &num_operations);
319 num_operations += 3 + tmp_nodes_with_nonzero_degree.size();
320 for (
const int node : tmp_nodes_with_nonzero_degree) {
321 const int degree = tmp_degree_[node];
322 tmp_degree_[node] = 0;
323 max_degree = std::max(max_degree, degree);
324 tmp_nodes_with_degree_[degree].push_back(node);
326 tmp_nodes_with_nonzero_degree.clear();
329 for (
int degree = 1; degree <= max_degree; ++degree) {
332 num_operations += 1 + 3 * tmp_nodes_with_degree_[degree].size();
333 partition->
Refine(tmp_nodes_with_degree_[degree]);
334 tmp_nodes_with_degree_[degree].clear();
342 time_limit_->AdvanceDeterministicTime(1e-8 *
343 static_cast<double>(num_operations));
348 const int original_num_parts = partition->
NumParts();
349 partition->
Refine(std::vector<int>(1, node));
353 if (new_singletons !=
nullptr) {
354 new_singletons->clear();
355 for (
int p = original_num_parts; p < partition->
NumParts(); ++p) {
359 if (!tmp_node_mask_[parent] && parent < original_num_parts &&
361 tmp_node_mask_[parent] =
true;
369 for (
int p = original_num_parts; p < partition->
NumParts(); ++p) {
376void MergeNodeEquivalenceClassesAccordingToPermutation(
379 for (
int c = 0; c < perm.
NumCycles(); ++c) {
382 for (
const int e : perm.
Cycle(c)) {
384 const int removed_representative =
386 if (sorted_representatives !=
nullptr && removed_representative != -1) {
387 sorted_representatives->
Remove(removed_representative);
407void GetAllOtherRepresentativesInSamePartAs(
411 std::vector<int>* pruned_other_nodes) {
412 pruned_other_nodes->clear();
413 const int part_index = partition.PartOf(representative_node);
415 int repr = representative_node;
417 DCHECK_EQ(repr, node_equivalence_classes->GetRoot(repr));
418 repr = representatives_sorted_by_index_in_partition.Prev(repr);
419 if (repr < 0 || partition.PartOf(repr) != part_index)
break;
420 pruned_other_nodes->push_back(repr);
423 repr = representative_node;
425 DCHECK_EQ(repr, node_equivalence_classes->GetRoot(repr));
426 repr = representatives_sorted_by_index_in_partition.Next(repr);
427 if (repr < 0 || partition.PartOf(repr) != part_index)
break;
428 pruned_other_nodes->push_back(repr);
436 std::vector<int> expected_output;
437 for (
const int e : partition.ElementsInPart(part_index)) {
438 if (node_equivalence_classes->GetRoot(e) != representative_node) {
439 expected_output.push_back(e);
442 node_equivalence_classes->KeepOnlyOneNodePerPart(&expected_output);
443 for (
int& x : expected_output)
x = node_equivalence_classes->GetRoot(x);
444 std::sort(expected_output.begin(), expected_output.end());
445 std::vector<int> sorted_output = *pruned_other_nodes;
446 std::sort(sorted_output.begin(), sorted_output.end());
447 DCHECK_EQ(absl::StrJoin(expected_output,
" "),
448 absl::StrJoin(sorted_output,
" "));
454 std::vector<int>* node_equivalence_classes_io,
455 std::vector<std::unique_ptr<SparsePermutation>>* generators,
456 std::vector<int>* factorized_automorphism_group_size,
462 factorized_automorphism_group_size->clear();
463 if (node_equivalence_classes_io->size() != NumNodes()) {
464 return absl::Status(absl::StatusCode::kInvalidArgument,
465 "Invalid 'node_equivalence_classes_io'.");
474 if (time_limit_->LimitReached()) {
475 return absl::Status(absl::StatusCode::kDeadlineExceeded,
476 "During the initial refinement.");
478 VLOG(4) <<
"Base partition: "
482 std::vector<std::vector<int>> permutations_displacing_node(NumNodes());
483 std::vector<int> potential_root_image_nodes;
506 struct InvariantDiveState {
508 int num_parts_before_refinement;
510 InvariantDiveState(
int node,
int num_parts)
511 : invariant_node(node), num_parts_before_refinement(num_parts) {}
513 std::vector<InvariantDiveState> invariant_dive_stack;
520 for (
int invariant_node = 0; invariant_node < NumNodes(); ++invariant_node) {
524 invariant_dive_stack.push_back(
525 InvariantDiveState(invariant_node, base_partition.
NumParts()));
527 VLOG(4) <<
"Invariant dive: invariant node = " << invariant_node
528 <<
"; partition after: "
531 if (time_limit_->LimitReached()) {
532 return absl::Status(absl::StatusCode::kDeadlineExceeded,
533 "During the invariant dive.");
544 while (!invariant_dive_stack.empty()) {
545 if (time_limit_->LimitReached())
break;
548 const int root_node = invariant_dive_stack.back().invariant_node;
549 const int base_num_parts =
550 invariant_dive_stack.back().num_parts_before_refinement;
551 invariant_dive_stack.pop_back();
554 VLOG(4) <<
"Backtracking invariant dive: root node = " << root_node
580 GetAllOtherRepresentativesInSamePartAs(
581 root_node, base_partition, representatives_sorted_by_index_in_partition,
582 &node_equivalence_classes, &potential_root_image_nodes);
583 DCHECK(!potential_root_image_nodes.empty());
584 IF_STATS_ENABLED(stats_.invariant_unroll_time.StopTimerAndAddElapsedTime());
588 while (!potential_root_image_nodes.empty()) {
589 if (time_limit_->LimitReached())
break;
590 VLOG(4) <<
"Potential (pruned) images of root node " << root_node
591 <<
" left: [" << absl::StrJoin(potential_root_image_nodes,
" ")
593 const int root_image_node = potential_root_image_nodes.back();
594 VLOG(4) <<
"Trying image of root node: " << root_image_node;
596 std::unique_ptr<SparsePermutation> permutation =
597 FindOneSuitablePermutation(root_node, root_image_node,
598 &base_partition, &image_partition,
599 *generators, permutations_displacing_node);
601 if (permutation !=
nullptr) {
606 MergeNodeEquivalenceClassesAccordingToPermutation(
607 *permutation, &node_equivalence_classes,
608 &representatives_sorted_by_index_in_partition);
613 SwapFrontAndBack(&potential_root_image_nodes);
615 &potential_root_image_nodes);
616 SwapFrontAndBack(&potential_root_image_nodes);
619 const int permutation_index =
static_cast<int>(generators->size());
620 for (
const int node : permutation->Support()) {
621 permutations_displacing_node[node].push_back(permutation_index);
626 generators->push_back(std::move(permutation));
629 potential_root_image_nodes.pop_back();
635 factorized_automorphism_group_size->push_back(
642 if (time_limit_->LimitReached()) {
643 return absl::Status(absl::StatusCode::kDeadlineExceeded,
644 "Some automorphisms were found, but probably not all.");
646 return ::absl::OkStatus();
657 int part_index,
int* base_node,
int* image_node) {
671 if (absl::GetFlag(FLAGS_minimize_permutation_support_size)) {
673 for (
const int node : base_partition.
ElementsInPart(part_index)) {
674 if (image_partition.
PartOf(node) == part_index) {
675 *image_node = *base_node = node;
686 if (image_partition.
PartOf(*base_node) == part_index) {
687 *image_node = *base_node;
697std::unique_ptr<SparsePermutation>
698GraphSymmetryFinder::FindOneSuitablePermutation(
701 absl::Span<
const std::unique_ptr<SparsePermutation>>
702 generators_found_so_far,
703 absl::Span<
const std::vector<int>> permutations_displacing_node) {
706 DCHECK_EQ(
"", tmp_dynamic_permutation_.DebugString());
707 DCHECK_EQ(base_partition->NumParts(), image_partition->NumParts());
709 for (
int i = 0;
i < base_partition->NumParts(); ++
i) {
710 DCHECK_EQ(base_partition->FprintOfPart(i),
711 image_partition->FprintOfPart(i))
712 << base_partition->DebugString(
false)
714 << image_partition->DebugString(
718 DCHECK(search_states_.empty());
721 std::vector<int> base_singletons;
722 std::vector<int> image_singletons;
725 int min_potential_mismatching_part_index;
726 std::vector<int> next_potential_image_nodes;
730 search_states_.emplace_back(
732 base_partition->NumParts(),
733 base_partition->NumParts());
735 search_states_.back().remaining_pruned_image_nodes.assign(1, root_image_node);
740 while (!search_states_.empty()) {
741 if (time_limit_->LimitReached())
return nullptr;
752 const SearchState& ss = search_states_.back();
753 const int image_node = ss.first_image_node >= 0
754 ? ss.first_image_node
755 : ss.remaining_pruned_image_nodes.back();
759 DCHECK_EQ(ss.num_parts_before_trying_to_map_base_node,
760 image_partition->NumParts());
769 VLOG(4) << ss.DebugString();
770 VLOG(4) << base_partition->DebugString(
772 VLOG(4) << image_partition->DebugString(
788 bool compatible =
true;
791 compatible = PartitionsAreCompatibleAfterPartIndex(
792 *base_partition, *image_partition,
793 ss.num_parts_before_trying_to_map_base_node);
794 u.AlsoUpdate(compatible ? &stats_.quick_compatibility_success_time
795 : &stats_.quick_compatibility_fail_time);
797 bool partitions_are_full_match =
false;
801 &stats_.dynamic_permutation_refinement_time);
802 tmp_dynamic_permutation_.AddMappings(base_singletons, image_singletons);
805 min_potential_mismatching_part_index =
806 ss.min_potential_mismatching_part_index;
807 partitions_are_full_match = ConfirmFullMatchOrFindNextMappingDecision(
808 *base_partition, *image_partition, tmp_dynamic_permutation_,
809 &min_potential_mismatching_part_index, &next_base_node,
811 u.AlsoUpdate(partitions_are_full_match
812 ? &stats_.map_election_std_full_match_time
813 : &stats_.map_election_std_mapping_time);
815 if (compatible && partitions_are_full_match) {
816 DCHECK_EQ(min_potential_mismatching_part_index,
817 base_partition->NumParts());
823 bool is_automorphism =
true;
827 u.AlsoUpdate(is_automorphism ? &stats_.automorphism_test_success_time
828 : &stats_.automorphism_test_fail_time);
830 if (is_automorphism) {
834 std::unique_ptr<SparsePermutation> sparse_permutation(
835 tmp_dynamic_permutation_.CreateSparsePermutation());
836 VLOG(4) <<
"Automorphism found: " << sparse_permutation->DebugString();
837 const int base_num_parts =
838 search_states_[0].num_parts_before_trying_to_map_base_node;
839 base_partition->UndoRefineUntilNumPartsEqual(base_num_parts);
840 image_partition->UndoRefineUntilNumPartsEqual(base_num_parts);
841 tmp_dynamic_permutation_.Reset();
842 search_states_.clear();
844 search_time_updater.AlsoUpdate(&stats_.search_time_success);
845 return sparse_permutation;
851 VLOG(4) <<
"Permutation candidate isn't a valid automorphism.";
852 if (base_partition->NumParts() == NumNodes()) {
856 tmp_dynamic_permutation_.UndoLastMappings(&base_singletons);
863 int non_singleton_part = 0;
866 while (base_partition->SizeOfPart(non_singleton_part) == 1) {
867 ++non_singleton_part;
868 DCHECK_LT(non_singleton_part, base_partition->NumParts());
871 time_limit_->AdvanceDeterministicTime(
872 1e-9 *
static_cast<double>(non_singleton_part));
876 GetBestMapping(*base_partition, *image_partition, non_singleton_part,
877 &next_base_node, &next_image_node);
892 while (!search_states_.empty()) {
893 SearchState*
const last_ss = &search_states_.back();
894 image_partition->UndoRefineUntilNumPartsEqual(
895 last_ss->num_parts_before_trying_to_map_base_node);
896 if (last_ss->first_image_node >= 0) {
909 const int part = image_partition->PartOf(last_ss->first_image_node);
910 last_ss->remaining_pruned_image_nodes.reserve(
911 image_partition->SizeOfPart(part));
912 last_ss->remaining_pruned_image_nodes.push_back(
913 last_ss->first_image_node);
914 for (
const int e : image_partition->ElementsInPart(part)) {
915 if (e != last_ss->first_image_node) {
916 last_ss->remaining_pruned_image_nodes.push_back(e);
921 PruneOrbitsUnderPermutationsCompatibleWithPartition(
922 *image_partition, generators_found_so_far,
923 permutations_displacing_node[last_ss->first_image_node],
924 &last_ss->remaining_pruned_image_nodes);
926 SwapFrontAndBack(&last_ss->remaining_pruned_image_nodes);
927 DCHECK_EQ(last_ss->remaining_pruned_image_nodes.back(),
928 last_ss->first_image_node);
929 last_ss->first_image_node = -1;
931 last_ss->remaining_pruned_image_nodes.pop_back();
932 if (!last_ss->remaining_pruned_image_nodes.empty())
break;
934 VLOG(4) <<
"Backtracking one level up.";
935 base_partition->UndoRefineUntilNumPartsEqual(
936 last_ss->num_parts_before_trying_to_map_base_node);
940 tmp_dynamic_permutation_.UndoLastMappings(&base_singletons);
941 search_states_.pop_back();
950 VLOG(4) <<
" Deepening the search.";
951 search_states_.emplace_back(
952 next_base_node, next_image_node,
953 base_partition->NumParts(),
954 min_potential_mismatching_part_index);
962 search_time_updater.AlsoUpdate(&stats_.search_time_fail);
966util::BeginEndWrapper<std::vector<int>::const_iterator>
967GraphSymmetryFinder::TailsOfIncomingArcsTo(
int node)
const {
968 return util::BeginEndWrapper<std::vector<int>::const_iterator>(
969 flattened_reverse_adj_lists_.begin() + reverse_adj_list_index_[node],
970 flattened_reverse_adj_lists_.begin() + reverse_adj_list_index_[node + 1]);
973void GraphSymmetryFinder::PruneOrbitsUnderPermutationsCompatibleWithPartition(
975 absl::Span<
const std::unique_ptr<SparsePermutation>> permutations,
976 absl::Span<const int> permutation_indices, std::vector<int>* nodes) {
977 VLOG(4) <<
" Pruning [" << absl::StrJoin(*nodes,
", ") <<
"]";
984 if (nodes->size() <= 1)
return;
989 std::vector<int>& tmp_nodes_on_support =
991 DCHECK(tmp_nodes_on_support.empty());
995 for (
const int p : permutation_indices) {
996 const SparsePermutation& permutation = *permutations[p];
999 bool compatible =
true;
1000 for (
int c = 0;
c < permutation.NumCycles(); ++
c) {
1001 const SparsePermutation::Iterator cycle = permutation.Cycle(c);
1003 partition.SizeOfPart(partition.PartOf(*cycle.begin()))) {
1008 if (!compatible)
continue;
1011 for (
int c = 0;
c < permutation.NumCycles(); ++
c) {
1013 for (
const int node : permutation.Cycle(c)) {
1014 if (partition.PartOf(node) != part) {
1019 part = partition.PartOf(node);
1023 if (!compatible)
continue;
1026 MergeNodeEquivalenceClassesAccordingToPermutation(permutation,
1027 &tmp_partition_,
nullptr);
1028 for (
const int node : permutation.Support()) {
1029 if (!tmp_node_mask_[node]) {
1030 tmp_node_mask_[node] =
true;
1031 tmp_nodes_on_support.push_back(node);
1037 tmp_partition_.KeepOnlyOneNodePerPart(nodes);
1040 for (
const int node : tmp_nodes_on_support) {
1041 tmp_node_mask_[node] =
false;
1042 tmp_partition_.ResetNode(node);
1044 tmp_nodes_on_support.clear();
1045 VLOG(4) <<
" Pruned: [" << absl::StrJoin(*nodes,
", ") <<
"]";
1048bool GraphSymmetryFinder::ConfirmFullMatchOrFindNextMappingDecision(
1052 int* min_potential_mismatching_part_index_io,
int* next_base_node,
1053 int* next_image_node)
const {
1054 *next_base_node = -1;
1055 *next_image_node = -1;
1059 if (!absl::GetFlag(FLAGS_minimize_permutation_support_size)) {
1063 for (
const int loose_node : current_permutation_candidate.LooseEnds()) {
1064 DCHECK_GT(base_partition.ElementsInSamePartAs(loose_node).size(), 1);
1065 *next_base_node = loose_node;
1066 const int root = current_permutation_candidate.RootOf(loose_node);
1067 DCHECK_NE(root, loose_node);
1068 if (image_partition.PartOf(root) == base_partition.PartOf(loose_node)) {
1071 *next_image_node = root;
1075 if (*next_base_node != -1) {
1080 .ElementsInPart(base_partition.PartOf(*next_base_node))
1096 const int initial_min_potential_mismatching_part_index =
1097 *min_potential_mismatching_part_index_io;
1098 for (; *min_potential_mismatching_part_index_io < base_partition.NumParts();
1099 ++*min_potential_mismatching_part_index_io) {
1100 const int p = *min_potential_mismatching_part_index_io;
1101 if (base_partition.SizeOfPart(p) != 1 &&
1102 base_partition.FprintOfPart(p) != image_partition.FprintOfPart(p)) {
1103 GetBestMapping(base_partition, image_partition, p, next_base_node,
1108 const int parent = base_partition.ParentOfPart(p);
1109 if (parent < initial_min_potential_mismatching_part_index &&
1110 base_partition.SizeOfPart(parent) != 1 &&
1111 base_partition.FprintOfPart(parent) !=
1112 image_partition.FprintOfPart(parent)) {
1113 GetBestMapping(base_partition, image_partition, parent, next_base_node,
1122 for (
int p = 0; p < base_partition.NumParts(); ++p) {
1123 if (base_partition.SizeOfPart(p) != 1) {
1124 CHECK_EQ(base_partition.FprintOfPart(p),
1125 image_partition.FprintOfPart(p));
1132std::string GraphSymmetryFinder::SearchState::DebugString()
const {
1133 return absl::StrFormat(
1134 "SearchState{ base_node=%d, first_image_node=%d,"
1135 " remaining_pruned_image_nodes=[%s],"
1136 " num_parts_before_trying_to_map_base_node=%d }",
1137 base_node, first_image_node,
1138 absl::StrJoin(remaining_pruned_image_nodes,
" "),
1139 num_parts_before_trying_to_map_base_node);
void Remove(int i)
You must not call Remove() twice with the same element.
int ParentOfPart(int part) const
const std::vector< int > & ElementsInHierarchicalOrder() const
std::string DebugString(bool sort_parts_lexicographically) const
IterablePart ElementsInSamePartAs(int i) const
int SizeOfPart(int part) const
void UndoRefineUntilNumPartsEqual(int original_num_parts)
void Refine(absl::Span< const int > distinguished_subset)
IterablePart ElementsInPart(int i) const
*** Implementation of inline methods of the above classes. ***
int PartOf(int element) const
const std::vector< int > & AllMappingsSrc() const
Returns the union of all "src" ever given to AddMappings().
int ImageOf(int i) const
Forced-inline for the speed.
absl::Status FindSymmetries(std::vector< int > *node_equivalence_classes_io, std::vector< std::unique_ptr< SparsePermutation > > *generators, std::vector< int > *factorized_automorphism_group_size, TimeLimit *time_limit=nullptr)
void RecursivelyRefinePartitionByAdjacency(int first_unrefined_part_index, DynamicPartition *partition)
::util::StaticGraph Graph
void DistinguishNodeInPartition(int node, DynamicPartition *partition, std::vector< int > *new_singletons_or_null)
**** Methods below are public FOR TESTING ONLY. ****
GraphSymmetryFinder(const Graph &graph, bool is_undirected)
bool IsGraphAutomorphism(const DynamicPermutation &permutation) const
void KeepOnlyOneNodePerPart(std::vector< int > *nodes)
int NumNodesInSamePartAs(int node)
int MergePartsOf(int node1, int node2)
int FillEquivalenceClasses(std::vector< int > *node_equivalence_classes)
Iterator Cycle(int i) const
NodeIndexType num_nodes() const
ArcIndexType num_arcs() const
Returns the number of valid arcs in the graph.
IntegerRange< NodeIndex > AllNodes() const
BaseGraph implementation -------------------------------------------------—.
BeginEndWrapper< OutgoingArcIterator > OutgoingArcs(NodeIndexType node) const
NodeIndexType Head(ArcIndexType arc) const
ABSL_FLAG(bool, minimize_permutation_support_size, false, "Tweak the algorithm to try and minimize the support size" " of the generators produced. This may negatively impact the" " performance, but works great on the sat_holeXXX benchmarks" " to reduce the support size.")
In SWIG mode, we don't want anything besides these top-level includes.
void LocalBfs(const ::util::StaticGraph< int, int > &graph, int source, int stop_after_num_nodes, std::vector< int > *visited, std::vector< int > *num_within_radius, std::vector< bool > *tmp_mask)
DisabledScopedTimeDistributionUpdater ScopedTimeDistributionUpdater
std::vector< int > CountTriangles(const ::util::StaticGraph< int, int > &graph, int max_degree)
HELPER FUNCTIONS: PUBLIC FOR UNIT TESTING ONLY.
bool GraphIsSymmetric(const Graph &graph)
bool GraphIsSymmetric(const Graph &graph)
#define IF_STATS_ENABLED(instructions)
std::vector< int >::const_iterator begin() const