Skip to content

Commit

Permalink
new community handling
Browse files Browse the repository at this point in the history
  • Loading branch information
N-Maas committed Sep 12, 2024
1 parent 0e6d505 commit e888028
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -176,26 +176,9 @@ class MultilevelVertexPairRater {
fillRatingMap<ScorePolicy>(hypergraph, u, tmp_ratings, cluster_ids);
}

int cpu_id = THREAD_ID;
const HypernodeWeight weight_u = cluster_weight[u];
const PartitionID community_u_id = hypergraph.communityID(u);
bool ignore_communities = false;
if (may_ignore_communities) {
ignore_communities = true;
// ignore communities if no contraction within the community is possible
for (const auto& entry: tmp_ratings) {
const HypernodeID tmp_target = entry.key;
const HypernodeWeight target_weight = cluster_weight[entry.key];
if ( tmp_target != u && weight_u + target_weight <= max_allowed_node_weight
&& similarity_policy.acceptContraction(hypergraph, _context, u, tmp_target)
&& community_u_id == hypergraph.communityID(tmp_target) ) {
// TODO: fixed vertices?!
ignore_communities = false;
break;
}
}
}

int cpu_id = THREAD_ID;
RatingType max_rating = std::numeric_limits<RatingType>::min();
HypernodeID target = std::numeric_limits<HypernodeID>::max();
HypernodeID target_id = std::numeric_limits<HypernodeID>::max();
Expand All @@ -219,7 +202,7 @@ class MultilevelVertexPairRater {

DBG << "r(" << u << "," << tmp_target << ")=" << tmp_rating;
if ( accept_fixed_vertex_contraction &&
(ignore_communities || community_u_id == hypergraph.communityID(tmp_target)) &&
(may_ignore_communities || community_u_id == hypergraph.communityID(tmp_target)) &&
AcceptancePolicy::acceptRating( tmp_rating, max_rating,
target_id, tmp_target_id, cpu_id, _already_matched) ) {
max_rating = tmp_rating;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ class ThreePhaseCoarsener : public ICoarsener,
_clustering_data(_hg.initialNumNodes(), context),
_num_nodes_tracker(),
_initial_num_nodes(_hg.initialNumNodes()),
_num_communities(kInvalidPartition),
_current_vertices(),
_pass_nr(0),
_progress_bar(_hg.initialNumNodes(), 0, false),
Expand Down Expand Up @@ -153,6 +154,10 @@ class ThreePhaseCoarsener : public ICoarsener,
bool did_second_lp = false;
bool did_second_two_hop = false;

if (shouldIgnoreCommunities(hierarchy_contraction_limit)) {
cc.may_ignore_communities = true;
}

// TODO: degree zero nodes?!
// Phase 1: LP coarsening, but forbid contraction of low degree nodes onto high degree nodes
coarseningRound("first_lp_round", "First LP round",
Expand All @@ -171,10 +176,15 @@ class ThreePhaseCoarsener : public ICoarsener,
}

// Phase 3: LP and two-hop coarsening with all contractions allowed (as well as contracting size 1 communities)
cc.may_ignore_communities = true; // TODO: test/disable this
cc.contract_aggressively = true;
cc.hierarchy_contraction_limit = target_contraction_size;
if (current_num_nodes > target_contraction_size) {
// lazy initialization of community count, since it requires scanning all nodes
initializeCommunityCount(current_hg);
if (shouldIgnoreCommunities(target_contraction_size)) {
cc.may_ignore_communities = true;
}

DBG << "Start Second LP round: " << V(_num_nodes_tracker.currentNumNodes()) << V(target_contraction_size);
coarseningRound("second_lp_round", "Second LP round",
current_hg, _lp_clustering, _always_accept_policy, cc);
Expand Down Expand Up @@ -281,6 +291,31 @@ class ThreePhaseCoarsener : public ICoarsener,
_context.coarsening.contraction_limit );
}

void initializeCommunityCount(const Hypergraph& hypergraph) {
if (_num_communities == kInvalidPartition) {
_num_communities =
tbb::parallel_reduce(
tbb::blocked_range<HypernodeID>(ID(0), hypergraph.initialNumNodes()),
0, [&](const tbb::blocked_range<HypernodeID>& range, PartitionID init) {
PartitionID my_range_num_communities = init;
for (HypernodeID hn = range.begin(); hn < range.end(); ++hn) {
if ( hypergraph.nodeIsEnabled(hn) ) {
my_range_num_communities = std::max(my_range_num_communities, hypergraph.communityID(hn) + 1);
}
}
return my_range_num_communities;
},
[](const PartitionID lhs, const PartitionID rhs) {
return std::max(lhs, rhs);
});
_num_communities = std::max(_num_communities, 1);
}
}

bool shouldIgnoreCommunities(HypernodeID hierarchy_contraction_limit) {
return _num_communities != kInvalidPartition && UL(_num_communities) > hierarchy_contraction_limit;
}

LPClustering _lp_clustering;
TwoHopClustering _two_hop_clustering;
SimilarityPolicy _similarity_policy;
Expand All @@ -289,6 +324,7 @@ class ThreePhaseCoarsener : public ICoarsener,
ConcurrentClusteringData _clustering_data;
NumNodesTracker _num_nodes_tracker;
HypernodeID _initial_num_nodes;
PartitionID _num_communities;
parallel::scalable_vector<HypernodeID> _current_vertices;
int _pass_nr;
utils::ProgressBar _progress_bar;
Expand Down

0 comments on commit e888028

Please sign in to comment.