25#include "absl/algorithm/container.h"
26#include "absl/container/flat_hash_set.h"
27#include "absl/flags/flag.h"
28#include "absl/memory/memory.h"
29#include "absl/status/status.h"
30#include "absl/strings/str_format.h"
31#include "absl/strings/str_join.h"
32#include "absl/time/clock.h"
33#include "absl/time/time.h"
34#include "absl/types/span.h"
42ABSL_FLAG(
bool, minimize_permutation_support_size,
false,
43 "Tweak the algorithm to try and minimize the support size"
44 " of the generators produced. This may negatively impact the"
45 " performance, but works great on the sat_holeXXX benchmarks"
46 " to reduce the support size.");
54 std::vector<int> num_triangles(
graph.num_nodes(), 0);
55 absl::flat_hash_set<std::pair<int, int>> arcs;
56 arcs.reserve(
graph.num_arcs());
57 for (
int a = 0;
a <
graph.num_arcs(); ++
a) {
60 for (
int node = 0; node <
graph.num_nodes(); ++node) {
61 if (
graph.OutDegree(node) > max_degree)
continue;
63 for (
int neigh1 :
graph[node]) {
64 for (
int neigh2 :
graph[node]) {
65 if (arcs.contains({neigh1, neigh2})) ++triangles;
68 num_triangles[node] = triangles;
74 int stop_after_num_nodes, std::vector<int>* visited,
75 std::vector<int>* num_within_radius,
79 std::vector<bool>* tmp_mask) {
80 const int n =
graph.num_nodes();
82 num_within_radius->clear();
83 num_within_radius->push_back(1);
84 DCHECK_EQ(tmp_mask->size(), n);
85 DCHECK(absl::c_find(*tmp_mask,
true) == tmp_mask->end());
86 visited->push_back(source);
87 (*tmp_mask)[source] =
true;
89 int next_distance_change = 1;
90 while (num_settled < visited->
size()) {
91 const int from = (*visited)[num_settled++];
92 for (
const int child :
graph[from]) {
93 if ((*tmp_mask)[child])
continue;
94 (*tmp_mask)[child] =
true;
95 visited->push_back(child);
97 if (num_settled == next_distance_change) {
99 num_within_radius->push_back(visited->size());
100 if (num_settled >= stop_after_num_nodes)
break;
101 next_distance_change = visited->size();
105 for (
const int node : *visited) (*tmp_mask)[node] =
false;
108 if (num_settled == visited->size()) {
109 DCHECK_GE(num_within_radius->size(), 2);
110 DCHECK_EQ(num_within_radius->back(),
111 (*num_within_radius)[num_within_radius->size() - 2]);
112 num_within_radius->pop_back();
118void SwapFrontAndBack(std::vector<int>* v) {
120 std::swap((*v)[0], v->back());
123bool PartitionsAreCompatibleAfterPartIndex(
const DynamicPartition& p1,
124 const DynamicPartition& p2,
126 const int num_parts = p1.NumParts();
127 if (p2.NumParts() != num_parts)
return false;
128 for (
int p = part_index; p < num_parts; ++p) {
129 if (p1.SizeOfPart(p) != p2.SizeOfPart(p) ||
130 p1.ParentOfPart(p) != p2.ParentOfPart(p)) {
146bool ListMapsToList(
const List& l1,
const List& l2,
147 const DynamicPermutation& permutation,
148 std::vector<bool>* tmp_node_mask) {
149 int num_elements_delta = 0;
151 for (
const int mapped_x : l2) {
152 ++num_elements_delta;
153 (*tmp_node_mask)[mapped_x] =
true;
155 for (
const int x : l1) {
156 --num_elements_delta;
157 const int mapped_x = permutation.ImageOf(
x);
158 if (!(*tmp_node_mask)[mapped_x]) {
162 (*tmp_node_mask)[mapped_x] =
false;
164 if (num_elements_delta != 0) match =
false;
167 for (
const int x : l2) (*tmp_node_mask)[
x] =
false;
175 tmp_dynamic_permutation_(NumNodes()),
176 tmp_node_mask_(NumNodes(), false),
177 tmp_degree_(NumNodes(), 0),
178 tmp_nodes_with_degree_(NumNodes() + 1) {
180 time_limit_ = &dummy_time_limit_;
181 tmp_partition_.
Reset(NumNodes());
183 DCHECK(GraphIsSymmetric(
graph));
188 reverse_adj_list_index_.assign(
graph.num_nodes() + 2, 0);
189 for (
const int node :
graph.AllNodes()) {
190 for (
const int arc :
graph.OutgoingArcs(node)) {
191 ++reverse_adj_list_index_[
graph.Head(
arc) + 2];
197 std::partial_sum(reverse_adj_list_index_.begin() + 2,
198 reverse_adj_list_index_.end(),
199 reverse_adj_list_index_.begin() + 2);
203 flattened_reverse_adj_lists_.assign(
graph.num_arcs(), -1);
204 for (
const int node :
graph.AllNodes()) {
205 for (
const int arc :
graph.OutgoingArcs(node)) {
206 flattened_reverse_adj_lists_[reverse_adj_list_index_[
graph.Head(
arc) +
214 DCHECK_EQ(
graph.num_arcs(), reverse_adj_list_index_[
graph.num_nodes()]);
215 for (
const int i : flattened_reverse_adj_lists_) DCHECK_NE(i, -1);
223 const int image = permutation.
ImageOf(base);
224 if (image == base)
continue;
225 if (!ListMapsToList(graph_[base], graph_[image], permutation,
230 if (!reverse_adj_list_index_.empty()) {
234 const int image = permutation.
ImageOf(base);
235 if (image == base)
continue;
236 if (!ListMapsToList(TailsOfIncomingArcsTo(base),
237 TailsOfIncomingArcsTo(image), permutation,
250inline void IncrementCounterForNonSingletons(
const T&
nodes,
252 std::vector<int>* node_count,
253 std::vector<int>* nodes_seen,
254 int64_t* num_operations) {
255 *num_operations +=
nodes.end() -
nodes.begin();
256 for (
const int node :
nodes) {
258 const int count = ++(*node_count)[node];
259 if (count == 1) nodes_seen->push_back(node);
267 std::vector<int>& tmp_nodes_with_nonzero_degree = tmp_stack_;
276 int64_t num_operations = 0;
289 std::vector<bool> adjacency_directions(1,
true);
290 if (!reverse_adj_list_index_.empty()) {
291 adjacency_directions.push_back(
false);
293 for (
int part_index = first_unrefined_part_index;
296 for (
const bool outgoing_adjacency : adjacency_directions) {
299 if (outgoing_adjacency) {
301 IncrementCounterForNonSingletons(
302 graph_[node], *partition, &tmp_degree_,
303 &tmp_nodes_with_nonzero_degree, &num_operations);
307 IncrementCounterForNonSingletons(
308 TailsOfIncomingArcsTo(node), *partition, &tmp_degree_,
309 &tmp_nodes_with_nonzero_degree, &num_operations);
314 num_operations += 3 + tmp_nodes_with_nonzero_degree.size();
315 for (
const int node : tmp_nodes_with_nonzero_degree) {
316 const int degree = tmp_degree_[node];
317 tmp_degree_[node] = 0;
318 max_degree = std::max(max_degree, degree);
319 tmp_nodes_with_degree_[degree].push_back(node);
321 tmp_nodes_with_nonzero_degree.clear();
324 for (
int degree = 1; degree <= max_degree; ++degree) {
327 num_operations += 1 + 3 * tmp_nodes_with_degree_[degree].size();
328 partition->
Refine(tmp_nodes_with_degree_[degree]);
329 tmp_nodes_with_degree_[degree].clear();
338 static_cast<double>(num_operations));
343 const int original_num_parts = partition->
NumParts();
344 partition->
Refine(std::vector<int>(1, node));
348 if (new_singletons !=
nullptr) {
349 new_singletons->clear();
350 for (
int p = original_num_parts; p < partition->
NumParts(); ++p) {
354 if (!tmp_node_mask_[parent] && parent < original_num_parts &&
356 tmp_node_mask_[parent] =
true;
364 for (
int p = original_num_parts; p < partition->
NumParts(); ++p) {
371void MergeNodeEquivalenceClassesAccordingToPermutation(
374 for (
int c = 0; c < perm.
NumCycles(); ++c) {
377 for (
const int e : perm.
Cycle(c)) {
379 const int removed_representative =
381 if (sorted_representatives !=
nullptr && removed_representative != -1) {
382 sorted_representatives->
Remove(removed_representative);
402void GetAllOtherRepresentativesInSamePartAs(
403 int representative_node,
const DynamicPartition& partition,
404 const DenseDoublyLinkedList& representatives_sorted_by_index_in_partition,
405 MergingPartition* node_equivalence_classes,
406 std::vector<int>* pruned_other_nodes) {
407 pruned_other_nodes->clear();
408 const int part_index = partition.PartOf(representative_node);
410 int repr = representative_node;
412 DCHECK_EQ(repr, node_equivalence_classes->GetRoot(repr));
413 repr = representatives_sorted_by_index_in_partition.Prev(repr);
414 if (repr < 0 || partition.PartOf(repr) != part_index)
break;
415 pruned_other_nodes->push_back(repr);
418 repr = representative_node;
420 DCHECK_EQ(repr, node_equivalence_classes->GetRoot(repr));
421 repr = representatives_sorted_by_index_in_partition.Next(repr);
422 if (repr < 0 || partition.PartOf(repr) != part_index)
break;
423 pruned_other_nodes->push_back(repr);
431 std::vector<int> expected_output;
432 for (
const int e : partition.ElementsInPart(part_index)) {
433 if (node_equivalence_classes->GetRoot(e) != representative_node) {
434 expected_output.push_back(e);
437 node_equivalence_classes->KeepOnlyOneNodePerPart(&expected_output);
438 for (
int&
x : expected_output)
x = node_equivalence_classes->GetRoot(
x);
439 std::sort(expected_output.begin(), expected_output.end());
440 std::vector<int> sorted_output = *pruned_other_nodes;
441 std::sort(sorted_output.begin(), sorted_output.end());
442 DCHECK_EQ(absl::StrJoin(expected_output,
" "),
443 absl::StrJoin(sorted_output,
" "));
449 std::vector<int>* node_equivalence_classes_io,
450 std::vector<std::unique_ptr<SparsePermutation>>* generators,
451 std::vector<int>* factorized_automorphism_group_size,
457 factorized_automorphism_group_size->clear();
458 if (node_equivalence_classes_io->size() != NumNodes()) {
459 return absl::Status(absl::StatusCode::kInvalidArgument,
460 "Invalid 'node_equivalence_classes_io'.");
470 return absl::Status(absl::StatusCode::kDeadlineExceeded,
471 "During the initial refinement.");
473 VLOG(4) <<
"Base partition: "
477 std::vector<std::vector<int>> permutations_displacing_node(NumNodes());
478 std::vector<int> potential_root_image_nodes;
501 struct InvariantDiveState {
503 int num_parts_before_refinement;
505 InvariantDiveState(
int node,
int num_parts)
506 : invariant_node(node), num_parts_before_refinement(num_parts) {}
508 std::vector<InvariantDiveState> invariant_dive_stack;
515 for (
int invariant_node = 0; invariant_node < NumNodes(); ++invariant_node) {
519 invariant_dive_stack.push_back(
520 InvariantDiveState(invariant_node, base_partition.
NumParts()));
522 VLOG(4) <<
"Invariant dive: invariant node = " << invariant_node
523 <<
"; partition after: "
527 return absl::Status(absl::StatusCode::kDeadlineExceeded,
528 "During the invariant dive.");
539 while (!invariant_dive_stack.empty()) {
543 const int root_node = invariant_dive_stack.back().invariant_node;
544 const int base_num_parts =
545 invariant_dive_stack.back().num_parts_before_refinement;
546 invariant_dive_stack.pop_back();
549 VLOG(4) <<
"Backtracking invariant dive: root node = " << root_node
575 GetAllOtherRepresentativesInSamePartAs(
576 root_node, base_partition, representatives_sorted_by_index_in_partition,
577 &node_equivalence_classes, &potential_root_image_nodes);
578 DCHECK(!potential_root_image_nodes.empty());
583 while (!potential_root_image_nodes.empty()) {
585 VLOG(4) <<
"Potential (pruned) images of root node " << root_node
586 <<
" left: [" << absl::StrJoin(potential_root_image_nodes,
" ")
588 const int root_image_node = potential_root_image_nodes.back();
589 VLOG(4) <<
"Trying image of root node: " << root_image_node;
591 std::unique_ptr<SparsePermutation> permutation =
592 FindOneSuitablePermutation(root_node, root_image_node,
593 &base_partition, &image_partition,
594 *generators, permutations_displacing_node);
596 if (permutation !=
nullptr) {
601 MergeNodeEquivalenceClassesAccordingToPermutation(
602 *permutation, &node_equivalence_classes,
603 &representatives_sorted_by_index_in_partition);
608 SwapFrontAndBack(&potential_root_image_nodes);
610 &potential_root_image_nodes);
611 SwapFrontAndBack(&potential_root_image_nodes);
614 const int permutation_index =
static_cast<int>(generators->size());
615 for (
const int node : permutation->Support()) {
616 permutations_displacing_node[node].push_back(permutation_index);
621 generators->push_back(std::move(permutation));
624 potential_root_image_nodes.pop_back();
630 factorized_automorphism_group_size->push_back(
638 return absl::Status(absl::StatusCode::kDeadlineExceeded,
639 "Some automorphisms were found, but probably not all.");
641 return ::absl::OkStatus();
652 int part_index,
int* base_node,
int* image_node) {
666 if (absl::GetFlag(FLAGS_minimize_permutation_support_size)) {
668 for (
const int node : base_partition.
ElementsInPart(part_index)) {
669 if (image_partition.
PartOf(node) == part_index) {
670 *image_node = *base_node = node;
681 if (image_partition.
PartOf(*base_node) == part_index) {
682 *image_node = *base_node;
692std::unique_ptr<SparsePermutation>
693GraphSymmetryFinder::FindOneSuitablePermutation(
694 int root_node,
int root_image_node, DynamicPartition* base_partition,
695 DynamicPartition* image_partition,
696 absl::Span<
const std::unique_ptr<SparsePermutation>>
697 generators_found_so_far,
698 absl::Span<
const std::vector<int>> permutations_displacing_node) {
701 DCHECK_EQ(
"", tmp_dynamic_permutation_.
DebugString());
703 base_partition->DebugString(
false),
704 image_partition->DebugString(
false));
705 DCHECK(search_states_.empty());
708 std::vector<int> base_singletons;
709 std::vector<int> image_singletons;
712 int min_potential_mismatching_part_index;
713 std::vector<int> next_potential_image_nodes;
717 search_states_.emplace_back(
719 base_partition->NumParts(),
720 base_partition->NumParts());
722 search_states_.back().remaining_pruned_image_nodes.assign(1, root_image_node);
727 while (!search_states_.empty()) {
739 const SearchState& ss = search_states_.back();
740 const int image_node = ss.first_image_node >= 0
741 ? ss.first_image_node
742 : ss.remaining_pruned_image_nodes.back();
746 DCHECK_EQ(ss.num_parts_before_trying_to_map_base_node,
747 image_partition->NumParts());
756 VLOG(4) << ss.DebugString();
757 VLOG(4) << base_partition->DebugString(
759 VLOG(4) << image_partition->DebugString(
775 bool compatible =
true;
778 compatible = PartitionsAreCompatibleAfterPartIndex(
779 *base_partition, *image_partition,
780 ss.num_parts_before_trying_to_map_base_node);
781 u.AlsoUpdate(compatible ? &stats_.quick_compatibility_success_time
782 : &stats_.quick_compatibility_fail_time);
784 bool partitions_are_full_match =
false;
788 &stats_.dynamic_permutation_refinement_time);
789 tmp_dynamic_permutation_.
AddMappings(base_singletons, image_singletons);
792 min_potential_mismatching_part_index =
793 ss.min_potential_mismatching_part_index;
794 partitions_are_full_match = ConfirmFullMatchOrFindNextMappingDecision(
795 *base_partition, *image_partition, tmp_dynamic_permutation_,
796 &min_potential_mismatching_part_index, &next_base_node,
798 u.AlsoUpdate(partitions_are_full_match
799 ? &stats_.map_election_std_full_match_time
800 : &stats_.map_election_std_mapping_time);
802 if (compatible && partitions_are_full_match) {
803 DCHECK_EQ(min_potential_mismatching_part_index,
804 base_partition->NumParts());
810 bool is_automorphism =
true;
814 u.AlsoUpdate(is_automorphism ? &stats_.automorphism_test_success_time
815 : &stats_.automorphism_test_fail_time);
817 if (is_automorphism) {
821 std::unique_ptr<SparsePermutation> sparse_permutation(
823 VLOG(4) <<
"Automorphism found: " << sparse_permutation->DebugString();
824 const int base_num_parts =
825 search_states_[0].num_parts_before_trying_to_map_base_node;
826 base_partition->UndoRefineUntilNumPartsEqual(base_num_parts);
827 image_partition->UndoRefineUntilNumPartsEqual(base_num_parts);
828 tmp_dynamic_permutation_.
Reset();
829 search_states_.clear();
831 search_time_updater.AlsoUpdate(&stats_.search_time_success);
832 return sparse_permutation;
838 VLOG(4) <<
"Permutation candidate isn't a valid automorphism.";
839 if (base_partition->NumParts() == NumNodes()) {
850 int non_singleton_part = 0;
853 while (base_partition->SizeOfPart(non_singleton_part) == 1) {
854 ++non_singleton_part;
855 DCHECK_LT(non_singleton_part, base_partition->NumParts());
859 1e-9 *
static_cast<double>(non_singleton_part));
863 GetBestMapping(*base_partition, *image_partition, non_singleton_part,
864 &next_base_node, &next_image_node);
879 while (!search_states_.empty()) {
880 SearchState*
const last_ss = &search_states_.back();
881 image_partition->UndoRefineUntilNumPartsEqual(
882 last_ss->num_parts_before_trying_to_map_base_node);
883 if (last_ss->first_image_node >= 0) {
896 const int part = image_partition->PartOf(last_ss->first_image_node);
897 last_ss->remaining_pruned_image_nodes.reserve(
898 image_partition->SizeOfPart(part));
899 last_ss->remaining_pruned_image_nodes.push_back(
900 last_ss->first_image_node);
901 for (
const int e : image_partition->ElementsInPart(part)) {
902 if (e != last_ss->first_image_node) {
903 last_ss->remaining_pruned_image_nodes.push_back(e);
908 PruneOrbitsUnderPermutationsCompatibleWithPartition(
909 *image_partition, generators_found_so_far,
910 permutations_displacing_node[last_ss->first_image_node],
911 &last_ss->remaining_pruned_image_nodes);
913 SwapFrontAndBack(&last_ss->remaining_pruned_image_nodes);
914 DCHECK_EQ(last_ss->remaining_pruned_image_nodes.back(),
915 last_ss->first_image_node);
916 last_ss->first_image_node = -1;
918 last_ss->remaining_pruned_image_nodes.pop_back();
919 if (!last_ss->remaining_pruned_image_nodes.empty())
break;
921 VLOG(4) <<
"Backtracking one level up.";
922 base_partition->UndoRefineUntilNumPartsEqual(
923 last_ss->num_parts_before_trying_to_map_base_node);
928 search_states_.pop_back();
937 VLOG(4) <<
" Deepening the search.";
938 search_states_.emplace_back(
939 next_base_node, next_image_node,
940 base_partition->NumParts(),
941 min_potential_mismatching_part_index);
949 search_time_updater.AlsoUpdate(&stats_.search_time_fail);
954GraphSymmetryFinder::TailsOfIncomingArcsTo(
int node)
const {
956 flattened_reverse_adj_lists_.begin() + reverse_adj_list_index_[node],
957 flattened_reverse_adj_lists_.begin() + reverse_adj_list_index_[node + 1]);
960void GraphSymmetryFinder::PruneOrbitsUnderPermutationsCompatibleWithPartition(
961 const DynamicPartition& partition,
962 absl::Span<
const std::unique_ptr<SparsePermutation>> permutations,
963 absl::Span<const int> permutation_indices, std::vector<int>*
nodes) {
964 VLOG(4) <<
" Pruning [" << absl::StrJoin(*
nodes,
", ") <<
"]";
971 if (
nodes->size() <= 1)
return;
976 std::vector<int>& tmp_nodes_on_support =
978 DCHECK(tmp_nodes_on_support.empty());
982 for (
const int p : permutation_indices) {
983 const SparsePermutation& permutation = *permutations[p];
986 bool compatible =
true;
987 for (
int c = 0;
c < permutation.NumCycles(); ++
c) {
988 const SparsePermutation::Iterator cycle = permutation.Cycle(c);
990 partition.SizeOfPart(partition.PartOf(*cycle.begin()))) {
995 if (!compatible)
continue;
998 for (
int c = 0;
c < permutation.NumCycles(); ++
c) {
1000 for (
const int node : permutation.Cycle(c)) {
1001 if (partition.PartOf(node) != part) {
1006 part = partition.PartOf(node);
1010 if (!compatible)
continue;
1013 MergeNodeEquivalenceClassesAccordingToPermutation(permutation,
1014 &tmp_partition_,
nullptr);
1015 for (
const int node : permutation.Support()) {
1016 if (!tmp_node_mask_[node]) {
1017 tmp_node_mask_[node] =
true;
1018 tmp_nodes_on_support.push_back(node);
1027 for (
const int node : tmp_nodes_on_support) {
1028 tmp_node_mask_[node] =
false;
1031 tmp_nodes_on_support.clear();
1032 VLOG(4) <<
" Pruned: [" << absl::StrJoin(*
nodes,
", ") <<
"]";
1035bool GraphSymmetryFinder::ConfirmFullMatchOrFindNextMappingDecision(
1036 const DynamicPartition& base_partition,
1037 const DynamicPartition& image_partition,
1038 const DynamicPermutation& current_permutation_candidate,
1039 int* min_potential_mismatching_part_index_io,
int* next_base_node,
1040 int* next_image_node)
const {
1041 *next_base_node = -1;
1042 *next_image_node = -1;
1046 if (!absl::GetFlag(FLAGS_minimize_permutation_support_size)) {
1050 for (
const int loose_node : current_permutation_candidate.LooseEnds()) {
1051 DCHECK_GT(base_partition.ElementsInSamePartAs(loose_node).size(), 1);
1052 *next_base_node = loose_node;
1053 const int root = current_permutation_candidate.RootOf(loose_node);
1054 DCHECK_NE(root, loose_node);
1055 if (image_partition.PartOf(root) == base_partition.PartOf(loose_node)) {
1058 *next_image_node = root;
1062 if (*next_base_node != -1) {
1067 .ElementsInPart(base_partition.PartOf(*next_base_node))
1083 const int initial_min_potential_mismatching_part_index =
1084 *min_potential_mismatching_part_index_io;
1085 for (; *min_potential_mismatching_part_index_io < base_partition.NumParts();
1086 ++*min_potential_mismatching_part_index_io) {
1087 const int p = *min_potential_mismatching_part_index_io;
1088 if (base_partition.SizeOfPart(p) != 1 &&
1089 base_partition.FprintOfPart(p) != image_partition.FprintOfPart(p)) {
1090 GetBestMapping(base_partition, image_partition, p, next_base_node,
1095 const int parent = base_partition.ParentOfPart(p);
1096 if (parent < initial_min_potential_mismatching_part_index &&
1097 base_partition.SizeOfPart(parent) != 1 &&
1098 base_partition.FprintOfPart(parent) !=
1099 image_partition.FprintOfPart(parent)) {
1100 GetBestMapping(base_partition, image_partition, parent, next_base_node,
1109 for (
int p = 0; p < base_partition.NumParts(); ++p) {
1110 if (base_partition.SizeOfPart(p) != 1) {
1111 CHECK_EQ(base_partition.FprintOfPart(p),
1112 image_partition.FprintOfPart(p));
1119std::string GraphSymmetryFinder::SearchState::DebugString()
const {
1120 return absl::StrFormat(
1121 "SearchState{ base_node=%d, first_image_node=%d,"
1122 " remaining_pruned_image_nodes=[%s],"
1123 " num_parts_before_trying_to_map_base_node=%d }",
1124 base_node, first_image_node,
1125 absl::StrJoin(remaining_pruned_image_nodes,
" "),
1126 num_parts_before_trying_to_map_base_node);
void Remove(int i)
You must not call Remove() twice with the same element.
int ParentOfPart(int part) const
const std::vector< int > & ElementsInHierarchicalOrder() const
std::string DebugString(bool sort_parts_lexicographically) const
IterablePart ElementsInSamePartAs(int i) const
int SizeOfPart(int part) const
void UndoRefineUntilNumPartsEqual(int original_num_parts)
void Refine(absl::Span< const int > distinguished_subset)
IterablePart ElementsInPart(int i) const
*** Implementation of inline methods of the above classes. ***
int PartOf(int element) const
const std::vector< int > & AllMappingsSrc() const
Returns the union of all "src" ever given to AddMappings().
int ImageOf(int i) const
Forced-inline for the speed.
void UndoLastMappings(std::vector< int > *undone_mapping_src)
void AddMappings(absl::Span< const int > src, absl::Span< const int > dst)
std::string DebugString() const
std::unique_ptr< SparsePermutation > CreateSparsePermutation() const
absl::Status FindSymmetries(std::vector< int > *node_equivalence_classes_io, std::vector< std::unique_ptr< SparsePermutation > > *generators, std::vector< int > *factorized_automorphism_group_size, TimeLimit *time_limit=nullptr)
void RecursivelyRefinePartitionByAdjacency(int first_unrefined_part_index, DynamicPartition *partition)
void DistinguishNodeInPartition(int node, DynamicPartition *partition, std::vector< int > *new_singletons_or_null)
**** Methods below are public FOR TESTING ONLY. ****
GraphSymmetryFinder(const Graph &graph, bool is_undirected)
bool IsGraphAutomorphism(const DynamicPermutation &permutation) const
void KeepOnlyOneNodePerPart(std::vector< int > *nodes)
int NumNodesInSamePartAs(int node)
int MergePartsOf(int node1, int node2)
void Reset(int num_nodes)
int FillEquivalenceClasses(std::vector< int > *node_equivalence_classes)
Iterator Cycle(int i) const
void SetPrintOrder(PrintOrder print_order)
std::string StatString() const
void StartTimer()
Starts the timer in preparation of a StopTimerAndAddElapsedTime().
double StopTimerAndAddElapsedTime()
void AdvanceDeterministicTime(double deterministic_duration)
ABSL_FLAG(bool, minimize_permutation_support_size, false, "Tweak the algorithm to try and minimize the support size" " of the generators produced. This may negatively impact the" " performance, but works great on the sat_holeXXX benchmarks" " to reduce the support size.")
In SWIG mode, we don't want anything besides these top-level includes.
void LocalBfs(const ::util::StaticGraph< int, int > &graph, int source, int stop_after_num_nodes, std::vector< int > *visited, std::vector< int > *num_within_radius, std::vector< bool > *tmp_mask)
DisabledScopedTimeDistributionUpdater ScopedTimeDistributionUpdater
std::vector< int > CountTriangles(const ::util::StaticGraph< int, int > &graph, int max_degree)
HELPER FUNCTIONS: PUBLIC FOR UNIT TESTING ONLY.
bool GraphIsSymmetric(const Graph &graph)
#define IF_STATS_ENABLED(instructions)
std::vector< int >::const_iterator begin() const