-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
340 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,5 @@ | ||
from networkx.algorithms import * | ||
from networkx.algorithms import bipartite | ||
from networky.algorithms.bipartite import rank_maximal_matching | ||
from networky.algorithms.bipartite import rank_maximal_matching | ||
from networkx.algorithms import approximation | ||
from networky.algorithms.approximation import coalition_formation |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
from networky.algorithms.approximation.coalition_formation import * |
168 changes: 168 additions & 0 deletions
168
networky/algorithms/approximation/coalition_formation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
""" | ||
Implementation of the Social Aware Assignment of Passengers in Ridesharing | ||
The social aware assignment problem belongs to the field of coalition formation, which is an important research branch | ||
within multiagent systems. It analyses the outcome that results when a set of agents is partitioned into coalitions. | ||
Actually, Match_And_Merge model is a special case of simple Additively Separable Hedonic Games (ASHGs). | ||
Which was described in the article: | ||
Levinger C., Hazon N., Azaria A. Social Aware Assignment of Passengers in Ridesharing. - 2022, https://github.com/VictoKu1/ResearchAlgorithmsCourse1/raw/main/Article/2022%2C%20Chaya%20Amos%20Noam%2C%20Socially%20aware%20assignment%20of%20passengers%20in%20ride%20sharing.pdf | ||
Implementation of match_and_merge | ||
algorithm is based on the pseudocode from the article | ||
which is written by Victor Kushnir. | ||
Also, an online web page was built for running the algorithm: | ||
https://victoku1.pythonanywhere.com/ | ||
""" | ||
import networkx as nx | ||
from networkx.utils import not_implemented_for | ||
|
||
__all__ = ["match_and_merge"] | ||
|
||
|
||
@not_implemented_for("directed") | ||
def match_and_merge(Graph: nx.Graph, k: int) -> list: | ||
""" | ||
An approximation algorithm for any k ≥ 3, provides a solution for the social aware assignment problem with a ratio of 1/(k-1). | ||
Social aware assignment definition: | ||
Given a number k and an undirected friendship graph G = (V, E) where (v_i , v_j) ∈ E if v_i and v_j are connected. | ||
The goal is to find an assignment P, which is a partition of the set V , such that ∀S ∈ P, |S|≤ k, and the value of P, | ||
V_P = |{(v_i , v_j) ∈ E: ∃S ∈ P where v_i ∈ S and v_j ∈ S}| is maximized. | ||
As described in the article under the section "Algorithm 1: Match and Merge". | ||
The article: | ||
Levinger C., Hazon N., Azaria A. Social Aware Assignment of Passengers in Ridesharing. - 2022, https://github.com/VictoKu1/ResearchAlgorithmsCourse1/raw/main/Article/2022%2C%20Chaya%20Amos%20Noam%2C%20Socially%20aware%20assignment%20of%20passengers%20in%20ride%20sharing.pdf. | ||
Function receives a graph G and a number k, and returns a partition P of G of all matched sets, so for ∀S ∈ P, |S|≤ k, and the value of P, V_P = |{(v_i , v_j) ∈ E: ∃S ∈ P where v_i ∈ S and v_j ∈ S}| is maximized. | ||
The algorithm consists of k - 1 rounds. Each round is composed of a matching phase followed by a merging phase. | ||
Specifically, in round l MnM computes a maximum matching, M_l ⊆ E_l , for G_l (where G_1 = G). In the merging phase, MnM creates a graph | ||
G_(l+1) that includes a unified node for each pair of matched nodes. G_(l+1) also includes all unmatched nodes, along with their | ||
edges to the unified nodes. Clearly, each node in V_l is composed of up-to l nodes | ||
from V_1. Finally, MnM returns the partition, P, of all the matched sets in a way that ∀S ∈ P, |S|≤ k, and the value of P, V_P = |{(v_i , v_j) ∈ E: ∃S ∈ P where v_i ∈ S and v_j ∈ S}| is maximized. | ||
:param G: Graph | ||
:param k: Number of passengers | ||
:return: A partition P of G of all matched sets so ∀S ∈ P, |S|≤ k, and the value of P, V_P = |{(v_i , v_j) ∈ E: ∃S ∈ P where v_i ∈ S and v_j ∈ S}| is maximized. | ||
Examples: | ||
Example where G={(v1,v2),(v2,v3),(v3,v4),(v4,v5),(v4,v6)} and k=1: | ||
>>> G = nx.Graph() | ||
>>> list_of_edges = [(4, 6), (4, 5), (3, 4), (2, 3), (1, 2)] | ||
>>> G.add_edges_from(list_of_edges) | ||
>>> k = 1 | ||
>>> print(match_and_merge(G, k)) | ||
[[1], [2], [3], [4], [5], [6]] | ||
Example where G={(v1,v2),(v2,v3),(v3,v4),(v4,v5),(v4,v6)} and k=2: | ||
>>> G = nx.Graph() | ||
>>> list_of_edges = [(4, 6), (4, 5), (3, 4), (2, 3), (1, 2)] | ||
>>> G.add_edges_from(list_of_edges) | ||
>>> k = 2 | ||
>>> print(match_and_merge(G, k)) | ||
[[1, 2], [3, 4], [5], [6]] | ||
Example where G={(v1,v2),(v2,v3),(v3,v4),(v4,v5),(v4,v6)} and k=3: | ||
>>> G = nx.Graph() | ||
>>> list_of_edges = [(4, 6), (4, 5), (3, 4), (2, 3), (1, 2)] | ||
>>> G.add_edges_from(list_of_edges) | ||
>>> k = 3 | ||
>>> print(match_and_merge(G, k)) | ||
[[1, 2], [3, 4, 6], [5]] | ||
Example where G={(v1,v2),(v2,v3),(v3,v4),(v4,v5),(v4,v6)} and k=4: | ||
>>> G = nx.Graph() | ||
>>> list_of_edges = [(4, 6), (4, 5), (3, 4), (2, 3), (1, 2)] | ||
>>> G.add_edges_from(list_of_edges) | ||
>>> k = 4 | ||
>>> print(match_and_merge(G, k)) | ||
[[1, 2], [3, 4, 5, 6]] | ||
""" | ||
# Check if k is correct | ||
if Graph.number_of_nodes() < k: | ||
raise nx.NetworkXError( | ||
"k cannot be greater than the number of nodes in the Graph" | ||
) | ||
elif k < 0: | ||
raise nx.NetworkXError("k should be 0≤k≤|V(Graph)|") | ||
elif k == 0: | ||
return [] | ||
# If k is 1, return a partition of the Graph, where each node is a list | ||
elif k == 1: | ||
return sorted([[node] for node in Graph.nodes()]) | ||
else: | ||
# The nodes and the edges of G_1 are sorted in descending order so the maximal matching will be as close to the matching in the article as possible | ||
G_1 = nx.Graph() | ||
G_1.add_nodes_from(sorted((Graph.nodes()), reverse=True)) | ||
G_1.add_edges_from(sorted((Graph.edges()), reverse=True)) | ||
# Implement G_l=(V_l,E_l) using a dictionary which contains a tuple of V_l and E_l | ||
G: dict[int, nx.Graph] = {1: G_1} | ||
# Should contain the maximal matching of G_l | ||
M: dict[int, list] = {} | ||
# Loop to find the lth maximal matching and put it in G_(l+1) | ||
for l in range(1, k): | ||
# Initialization of the unified nodes list | ||
unified_nodes: list = [] | ||
# Find the maximum matching of G_l | ||
M[l] = list(nx.max_weight_matching(G[l], weight=1)) | ||
# Make sure that G_(l+1) is a empty graph (It was one of the steps of the algorithm in the article) | ||
if l + 1 not in G: | ||
G[l + 1] = nx.Graph() | ||
# Put the nodes of G_l in G_(l+1) | ||
G[l + 1].add_nodes_from(tuple(G[l].nodes())) | ||
# For every match in M_l, add a unified node to G_(l+1) so it will be used to find it when needed | ||
for match in M[l]: | ||
# Add the match to the unified nodes dictionary, so it will be easier to find the unified nodes in each round | ||
unified_nodes.append(match) | ||
# Add a unified node to G_(l+1), which is a tuple of the nodes in the match | ||
G[l + 1].add_node(match) | ||
# Remove the nodes in the match from G_(l+1) | ||
G[l + 1].remove_nodes_from(list(match)) | ||
# For every unified node in G_(l+1), add every v_q in G_(l+1) that is connected to it in G_l, add an edge between them in G_(l+1) | ||
for unified_node in unified_nodes: | ||
for v_q in G[l + 1].nodes(): | ||
if ( | ||
G[l].has_edge(unified_node, v_q) | ||
or G[l].has_edge(unified_node[0], v_q) | ||
or G[l].has_edge(unified_node[1], v_q) | ||
): | ||
G[l + 1].add_edge(unified_node, v_q) | ||
# Initialization of the partition P and for every unified node (which is a tuple of nodes) in G_k, add it to P | ||
P = [[unified_node] for unified_node in G[k].nodes()] | ||
P = tuplesflattener(P) | ||
# Return P | ||
return P | ||
|
||
|
||
def tuplesflattener(P: list) -> list: | ||
""" | ||
This function receives a list of partitions, which may contain nested tuples, and returns a list of lists which doesn't contain any tuples. | ||
:param P: A list of partitions, which may contain nested tuples | ||
:return: A list of lists which doesn't contain any tuples | ||
Examples: | ||
>>> P = [[(1, (2, (3, (4, 5))))]] | ||
>>> print(tuplesflattener(P)) | ||
[[1, 2, 3, 4, 5]] | ||
""" | ||
# Loop through every partition in P | ||
for partition in P: | ||
# While there are tuples in the partition, remove them and add their elements to the partition | ||
while any(isinstance(node, tuple) for node in partition): | ||
for node in partition: | ||
# If the node is a tuple, remove it and add its elements to the partition | ||
if isinstance(node, tuple): | ||
partition.remove(node) | ||
partition.extend(list(node)) | ||
# Sort the partitions | ||
partition.sort() | ||
# Sort P | ||
P.sort() | ||
return P |
Empty file.
168 changes: 168 additions & 0 deletions
168
networky/algorithms/approximation/tests/test_coalition_formation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
""" | ||
Testing the :mod:`networkx.algorithms.approximation.coalition_formation` module. | ||
Which is the implementation of the Social Aware Assignment of Passengers in Ridesharing | ||
The social aware assignment problem belongs to the field of coalition formation, which is an important research branch | ||
within multiagent systems. It analyses the outcome that results when a set of agents is partitioned into coalitions. | ||
Actually, Match_And_Merge model is a special case of simple Additively Separable Hedonic Games (ASHGs). | ||
Which was described in the article: | ||
Levinger C., Hazon N., Azaria A. Social Aware Assignment of Passengers in Ridesharing. - 2022, https://github.com/VictoKu1/ResearchAlgorithmsCourse1/raw/main/Article/2022%2C%20Chaya%20Amos%20Noam%2C%20Socially%20aware%20assignment%20of%20passengers%20in%20ride%20sharing.pdf. | ||
The match_and_merge algorithm is based on the pseudocode from the article | ||
which is written (as well as the tests) by Victor Kushnir. | ||
""" | ||
import math | ||
import random | ||
|
||
import pytest | ||
|
||
import networkx as nx | ||
from networky.algorithms.approximation.coalition_formation import match_and_merge | ||
|
||
|
||
def small_chain_graph(): | ||
G = nx.Graph() | ||
list_of_edges = [(1, 2), (2, 3), (3, 4), (4, 5), (4, 6)] | ||
G.add_edges_from(list_of_edges) | ||
return G | ||
|
||
|
||
def clique_graph_of_size_3(): | ||
G = nx.Graph() | ||
list_of_edges = [(1, 2), (2, 3), (3, 1)] | ||
G.add_edges_from(list_of_edges) | ||
return G | ||
|
||
|
||
class Test_coalition_formation: | ||
def test_empty_graph_returns_empty_list(self): | ||
G_empty = nx.Graph() | ||
assert match_and_merge(G_empty, k=0) == [] | ||
|
||
def test_small_chain_graph_with_k_4_returns_correct_partition(self): | ||
G_1 = small_chain_graph() | ||
assert match_and_merge(G_1, k=4) == [[1, 2], [3, 4, 5, 6]] | ||
|
||
def test_small_chain_graph_with_k_3_returns_correct_partition(self): | ||
G_1 = small_chain_graph() | ||
assert match_and_merge(G_1, k=3) == [[1, 2], [3, 4, 6], [5]] | ||
|
||
def test_small_chain_graph_with_k_2_returns_correct_partition(self): | ||
G_1 = small_chain_graph() | ||
assert match_and_merge(G_1, k=2) == [[1, 2], [3, 4], [5], [6]] | ||
|
||
def test_clique_graph_of_size_3_with_k_3_returns_correct_partition(self): | ||
G_clique_3 = clique_graph_of_size_3() | ||
assert match_and_merge(G_clique_3, k=3) == [[1, 2, 3]] | ||
|
||
def test_approximation_ratio(self): | ||
# For each n between 5 and 15, generate a clique graph with n nodes and check for 5<k≤15 | ||
for n in range(5, 69): | ||
G = nx.complete_graph(n) | ||
P = match_and_merge(G, 2) | ||
value_of_P = sum( | ||
len([(u, v) for u, v in G.edges() if u in S and v in S]) for S in P | ||
) | ||
optimal_value = len(nx.algorithms.max_weight_matching(G, weight=1)) | ||
approximation_ratio = value_of_P / optimal_value | ||
assert approximation_ratio >= 0.99999 | ||
|
||
def test_clique_graph_with_k_in_range_every_node_in_exactly_one_partition(self): | ||
# Check that every node is in exactly one partition | ||
for n in range(5, 15): | ||
G = nx.complete_graph(n) | ||
for k in range(5, 15): | ||
if k <= n: | ||
P = match_and_merge(G, k) | ||
assert [len([p for p in P if n in p]) == 1 for n in G.nodes()] | ||
|
||
def test_clique_graph_with_k_in_range_number_of_partitions_at_most_ceil_n_2(self): | ||
# Check that the number of partitions is at most ceil(n/2) | ||
for n in range(5, 15): | ||
G = nx.complete_graph(n) | ||
for k in range(5, 15): | ||
if k <= n: | ||
P = match_and_merge(G, k) | ||
assert len(P) <= math.ceil(G.number_of_nodes() / 2) | ||
|
||
def test_k_greater_than_n_raises_error(self): | ||
# Check that it raises an error when k>n | ||
for n in range(5, 15): | ||
G = nx.complete_graph(n) | ||
for k in range(5, 15): | ||
if k > n: | ||
with pytest.raises(nx.NetworkXError): | ||
match_and_merge(G, k) | ||
|
||
def test_random_graph_with_k_in_range_returns_correct_partition(self): | ||
# For each n between 5 and 15 (inclusive), generate a random graph with n nodes and check for 5<k≤15 | ||
for n in range(5, 15): | ||
p = 0.5 | ||
G = nx.gnp_random_graph(n, p) | ||
for k in range(5, 15): | ||
if k <= n: | ||
P = match_and_merge(G, k) | ||
assert [len(p) <= k for p in P] | ||
|
||
def test_random_graph_with_k_in_range_every_node_in_exactly_one_partition(self): | ||
# Check that every node is in exactly one partition | ||
for n in range(5, 15): | ||
p = 0.5 | ||
G = nx.gnp_random_graph(n, p) | ||
for k in range(5, 15): | ||
if k <= n: | ||
P = match_and_merge(G, k) | ||
assert [len([p for p in P if n in p]) == 1 for n in G.nodes()] | ||
|
||
def test_maximum_matching(self): | ||
# Check that the maximum matching is a partition | ||
for n in range(5, 15): | ||
p = 0.5 | ||
G = nx.gnp_random_graph(n, p) | ||
P = [tuple(p) for p in match_and_merge(G, 2) if len(p) > 1] | ||
G_test = nx.Graph() | ||
G_test.add_nodes_from(sorted((G.nodes()), reverse=True)) | ||
G_test.add_edges_from(sorted((G.edges()), reverse=True)) | ||
assert len(P) == len( | ||
[ | ||
tuple(sorted(p)) | ||
for p in sorted(nx.max_weight_matching(G_test, weight=1)) | ||
] | ||
) | ||
|
||
def test_partition_is_a_social_aware_assignment_check_size_of_subset(self): | ||
for n in range(5, 15): | ||
p = 0.5 | ||
G = nx.gnp_random_graph(n, p) | ||
for k in range(5, n): | ||
P = match_and_merge(G, k) | ||
# Check that every partition has at most k nodes | ||
assert [len(p) <= k for p in P] | ||
|
||
def test_partition_is_a_social_aware_assignment_V_P(self): | ||
for n in range(5, 15): | ||
p = 0.5 | ||
G = nx.gnp_random_graph(n, p) | ||
P = match_and_merge(G, 2) | ||
# Check that the value of V_P is maximized | ||
V_P = 0 | ||
for p in P: | ||
for i in p: | ||
for j in p: | ||
if G.has_edge(i, j): | ||
V_P += 1 | ||
assert V_P >= len( | ||
[tuple(sorted(p)) for p in sorted(nx.max_weight_matching(G, weight=1))] | ||
) | ||
|
||
def test_disconnected_components_weighted_graph_with_k_5_returns_correct_partition( | ||
self, | ||
): | ||
# Check that the partition of graph G is correct for a disconnected graph | ||
G = small_chain_graph() | ||
G.add_edges_from([(7, 8), (8, 9), (9, 7)]) | ||
# Add random weights to the edges | ||
for u, v in G.edges(): | ||
G[u][v]["weight"] = random.randint(1, 205) | ||
assert match_and_merge(G, k=5) == [[1, 2], [3, 4, 5, 6], [7, 8, 9]] |