Skip to content

Commit

Permalink
refactor: remove legacy two-hop implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
DanielSeemaier committed Nov 14, 2024
1 parent e5efcee commit c9e4d52
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 77 deletions.
2 changes: 2 additions & 0 deletions kaminpar-dist/coarsening/clustering/lp/global_lp_clusterer.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
#include "kaminpar-dist/datastructures/distributed_graph.h"

namespace kaminpar::dist {

class GlobalLPClusterer : public Clusterer {
public:
explicit GlobalLPClusterer(const Context &ctx);
Expand All @@ -31,4 +32,5 @@ class GlobalLPClusterer : public Clusterer {
private:
std::unique_ptr<class GlobalLPClusteringImplWrapper> _impl;
};

} // namespace kaminpar::dist
73 changes: 0 additions & 73 deletions kaminpar-shm/coarsening/clustering/lp_clusterer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,6 @@ class LPClusteringImpl final
case TwoHopStrategy::CLUSTER_THREADWISE:
Base::cluster_two_hop_nodes_threadwise();
break;
case TwoHopStrategy::LEGACY:
handle_two_hop_clustering_legacy();
break;
case TwoHopStrategy::DISABLE:
break;
}
Expand Down Expand Up @@ -160,76 +157,6 @@ class LPClusteringImpl final
return (1.0 - 1.0 * _current_num_clusters / _graph->n()) <= _lp_ctx.two_hop_threshold;
}

// @todo: old implementation that should no longer be used
void handle_two_hop_clustering_legacy() {
// Reset _favored_clusters entries for nodes that are not considered for
// 2-hop clustering, i.e., nodes that are already clustered with at least one other node or
// nodes that have more weight than max_weight/2.
// Set _favored_clusters to dummy entry _graph->n() for isolated nodes
tbb::parallel_for<NodeID>(0, _graph->n(), [&](const NodeID u) {
if (u != cluster(u)) {
Base::_favored_clusters[u] = u;
} else {
const auto initial_weight = initial_cluster_weight(u);
const auto current_weight = ClusterWeightBase::cluster_weight(u);
const auto max_weight = max_cluster_weight(u);
if (current_weight != initial_weight || current_weight > max_weight / 2) {
Base::_favored_clusters[u] = u;
}
}
});

tbb::parallel_for<NodeID>(0, _graph->n(), [&](const NodeID u) {
// Abort once we have merged enough clusters to achieve the configured minimum shrink factor
if (Base::should_stop()) {
return;
}

// Skip nodes that should not be considered during 2-hop clustering
const NodeID favored_leader = Base::_favored_clusters[u];
if (favored_leader == u) {
return;
}

do {
// If this works, we set ourself as clustering partners for nodes that have the same favored
// cluster we have
NodeID expected_value = favored_leader;
if (__atomic_compare_exchange_n(
&Base::_favored_clusters[favored_leader],
&expected_value,
u,
false,
__ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST
)) {
break;
}

// If this did not work, there is another node that has the same favored cluster
// Try to join the cluster of that node
const NodeID partner = expected_value;
if (__atomic_compare_exchange_n(
&Base::_favored_clusters[favored_leader],
&expected_value,
favored_leader,
false,
__ATOMIC_SEQ_CST,
__ATOMIC_SEQ_CST
)) {
if (ClusterWeightBase::move_cluster_weight(
u, partner, ClusterWeightBase::cluster_weight(u), max_cluster_weight(partner)
)) {
move_node(u, partner);
--_current_num_clusters;
}

break;
}
} while (true);
});
}

public:
[[nodiscard]] NodeID initial_cluster(const NodeID u) {
return u;
Expand Down
2 changes: 2 additions & 0 deletions kaminpar-shm/coarsening/clustering/noop_clusterer.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
#include "kaminpar-common/datastructures/static_array.h"

namespace kaminpar::shm {

class NoopClusterer : public Clusterer {
public:
NoopClusterer() = default;
Expand Down Expand Up @@ -43,4 +44,5 @@ class NoopClusterer : public Clusterer {
tbb::parallel_for<NodeID>(0, graph.n(), [&](const NodeID i) { clustering[i] = i; });
}
};

} // namespace kaminpar::shm
3 changes: 0 additions & 3 deletions kaminpar-shm/context_io.cc
Original file line number Diff line number Diff line change
Expand Up @@ -303,8 +303,6 @@ std::ostream &operator<<(std::ostream &out, const TwoHopStrategy strategy) {
return out << "cluster";
case TwoHopStrategy::CLUSTER_THREADWISE:
return out << "cluster-threadwise";
case TwoHopStrategy::LEGACY:
return out << "legacy";
}

return out << "<invalid>";
Expand All @@ -317,7 +315,6 @@ std::unordered_map<std::string, TwoHopStrategy> get_two_hop_strategies() {
{"match-threadwise", TwoHopStrategy::MATCH_THREADWISE},
{"cluster", TwoHopStrategy::CLUSTER},
{"cluster-threadwise", TwoHopStrategy::CLUSTER_THREADWISE},
{"legacy", TwoHopStrategy::LEGACY},
};
}

Expand Down
1 change: 0 additions & 1 deletion kaminpar-shm/kaminpar.h
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ enum class TwoHopStrategy {
MATCH_THREADWISE,
CLUSTER,
CLUSTER_THREADWISE,
LEGACY,
};

enum class IsolatedNodesClusteringStrategy {
Expand Down
2 changes: 2 additions & 0 deletions kaminpar-shm/label_propagation.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
#include "kaminpar-common/timer.h"

namespace kaminpar {

struct LabelPropagationConfig {
// Data structures used to accumulate edge weights for gain value calculation
using RatingMap = ::kaminpar::RatingMap<shm::EdgeWeight, shm::NodeID>;
Expand Down Expand Up @@ -2302,4 +2303,5 @@ class OwnedRelaxedTwoLevelClusterWeightVector {
private:
TwoLevelClusterWeightVector _cluster_weights;
};

} // namespace kaminpar

0 comments on commit c9e4d52

Please sign in to comment.