def _build_clusters_no_overlapping(self, graph): print(nx.dijkstra_path(graph, '0', '7', 'weight')) print(nx.dijkstra_path_length(graph, '0', '7', 'weight')) paths_for_diameter = nx.shortest_path_length(graph, weight='weight') for path in nx.shortest_path_length(graph, weight='weight'): print(path) ecc = nx.eccentricity(graph, sp=dict(paths_for_diameter)) # ecc = nx.eccentricity(graph, sp=shortest_paths_for_diameter) diameter = nx.diameter(graph, e=ecc) # for path in paths_for_diameter: # print(path) print('The graph diameter is ', diameter) height_of_cluster = math.ceil(math.log(diameter, 2)) + 1 print('height_of_the hierarchy is ', height_of_cluster) # lowest level clusters print('lowest level clusters') for n in graph.nodes(): paths = nx.single_source_dijkstra_path_length(graph, n, 0, weight='weight') print(paths) cluster_graph = graph.subgraph([n]) cluster = Cluster('c' + str(n) + '_l' + '0', cluster_graph, 0) self._network.add_cluster_level(0) self._network.add_cluster(0, cluster) self._network.draw_cluster(cluster.cluster_id) for i in range(int(height_of_cluster)): self._network.add_cluster_level(i+1) print('AT LEVEL ------- ', i + 1) distance = pow(2, i + 1) print('THE DISTANCE LIMIT IS ', distance) clustered_peers = [] for n in range(self._peer_count): # for n in graph.nodes(): print('clustering peer ', n) if n in clustered_peers: print('peer ', n, ' already clustered') continue paths_found = nx.single_source_dijkstra_path_length(graph, str(n), distance, weight='weight') peers_to_cluster = [] print('paths found in the level ', paths_found) for peer in paths_found: if int(peer) in clustered_peers: print('peer already clustered in another cluster') else: clustered_peers.append(int(peer)) peers_to_cluster.append(int(peer)) cluster_graph = graph.subgraph([str(i) for i in peers_to_cluster]) # cluster = Cluster('c' + str(n) + '_l' + str(i + 1), peers_to_cluster) cluster = Cluster('c' + str(n) + '_l' + str(i + 1), cluster_graph, i + 1) self._network.add_cluster(i + 1, cluster)
def __init__(self, operationCount, peerCount, graphInput): print("Inside Simulation init") self._op_count = int(operationCount) self._peer_count = int(peerCount) plt.axis('off') graph = nx.read_weighted_edgelist(graphInput) print(graph) # some properties print("node degree clustering") for v in nx.nodes(graph): print('%s %d %f' % (v, nx.degree(graph, v), nx.clustering(graph, v))) # print the adjacency list to terminal try: nx.write_adjlist(graph, sys.stdout) except TypeError: nx.write_adjlist(graph, sys.stdout.buffer) # node_pos = nx.spring_layout(graph) # # edge_weight = nx.get_edge_attributes(graph, 'weight') # # Draw the nodes # nx.draw_networkx(graph, node_pos, node_color='grey', node_size=100) # # Draw the edges # nx.draw_networkx_edges(graph, node_pos, edge_color='black') # # Draw the edge labels # nx.draw_networkx_edge_labels(graph, node_pos, edge_color='red', edge_labels=edge_weight) # plt.show() paths_for_diameter = nx.shortest_path_length(graph, weight='weight') ecc = nx.eccentricity(graph, sp=dict(paths_for_diameter)) self._diameter = nx.diameter(graph, e=ecc) print('The graph diameter is ', self._diameter) self._height_of_cluster = math.ceil(math.log(self._diameter, 2)) + 1 if not os.path.exists(graphInput+'_network'): print("CREATING NEW NETWORK") self._network = Network(graph) self._setup_peers(graph) # self._build_clusters_exactly_logn_membership(graph) self._build_clusters_at_least_one_and_at_most_logn(graph) # self._build_clusters_no_overlapping(graph) self._build_tree(graph) self.save_network(graphInput + '_network') exit(0) else: print("LOADING NETWORK FROM INPUT FILE") # load network self.load_network(graphInput + '_network')
# write to a file # nx.write_gml(G, "test_gml") # json_graph.node_link_data(G) # nx.write_edgelist(G,"test.edgeList") # print the adjacency list to terminal print("THE ADJACENCY LIST IS ") # try: # nx.write_adjlist(G, sys.stdout) # except TypeError: # nx.write_adjlist(G, sys.stdout.buffer) for a in [(n, nbrdict) for n, nbrdict in G.adjacency()]: print(a) for i in range(0, n): print("NODE ", i) temp = [] for a in [n for n in G.neighbors(i)]: temp.append(a) print(temp) assert len(temp) > 0 # diameter paths_for_diameter = nx.shortest_path_length(G, weight='weight') ecc = nx.eccentricity(G, sp=dict(paths_for_diameter)) diameter = nx.diameter(G, e=ecc) print("OK diameter without weight?", nx.diameter(G)) print('The graph diameter with weight is ', diameter) # nx.write_weighted_edgelist(G, str(n)+'_'+str(p)+'diamter'+str(diameter)+'_newtest.weighted.edgelist')
num_max_orders = orders_configuration_parameters["num_max_orders"] # numero massimo di ordini, solo se limited_num_orders = true limited_num_orders = True # se = True => vengono salvati al max num_max_orders ordini unique_sources = orders_configuration_parameters["unique_sources"] # se = True => i nodi partenza degli ordini saranno tutti diversi nodes = graph.nodes() orders = [] # lista di ordini order_count = 0 for n1 in nodes: tmp = list(nodes) tmp.remove(n1) for n2 in tmp: if limited_num_orders and len(orders) == num_max_orders: # vengono fatti comunque dei cicli inutili, ma pazienza break try: shortest_path_len = nx.shortest_path_length(graph, source=n1, target=n2) # print str(n1) + " -> " + str(n2) + " - shortest path length: " + str(shortest_path_len) if shortest_path_len >= min_len_shortest_path: print str(n1) + " -> " + str(n2) + " - shortest path length: " + str(shortest_path_len) order_name = "order_" + str(order_count) order_count += 1 orders.append({ order_name: { 'from': n1, 'to': n2, 'supply': randint(1, min_capacity) } }) if unique_sources: break except nx.NetworkXNoPath as e:
largest_component = max(nx.weakly_connected_component_subgraphs(graph), key=len) print len(largest_component) print len(graph.edges()) # diameter = nx.diameter(largest) nodes = largest_component.nodes() print len(nodes) # print diameter diameter = 0 # create train arcs inside the random network of truck for n1 in nodes: tmp = list(nodes) tmp.remove(n1) for n2 in tmp: try: shortest_path_len = nx.shortest_path_length(largest_component, source=n1, target=n2) if shortest_path_len > diameter: diameter = shortest_path_len print str(n1) + " -> " + str(n2) + " - shortest path length: " + str(shortest_path_len) except nx.NetworkXNoPath as e: pass # print str(n1) + " -> " + str(n2) + " - no shortest path" print diameter # sys.exit() arc_train = [] arc_train_count = 0 distance_for_train = math.ceil(float(diameter*percentage_of_diameter_train_link)/float(100)) num_arc_train = math.ceil(float(len(largest_component.edges())*percentage_train)/float(100)) unique_sources = True train_graph = nx.DiGraph()
from networkx import nx import random import pickle import numpy as np graphData = pickle.load(open("/data/wuning/map-matching/allGraph", "rb")) stats = [] adj = np.matrix(graphData)[:15500, :15500] G = nx.from_numpy_matrix(adj, create_using=nx.DiGraph()) for i in range(1000): start = random.randint(0, 15000) end = random.randint(0, 15000) try: shortest_path_length = nx.shortest_path_length(G, source=start, target=end) except: continue print(shortest_path_length)
def _build_clusters_at_least_one_and_at_most_logn(self, graph): paths_for_diameter = nx.shortest_path_length(graph, weight='weight') for path in nx.shortest_path_length(graph, weight='weight'): print(path) ecc = nx.eccentricity(graph, sp=dict(paths_for_diameter)) diameter = nx.diameter(graph, e=ecc) print('The graph diameter is ', diameter) height_of_cluster = math.ceil(math.log(diameter, 2)) + 1 self._network.set_height_of_clusters(height_of_cluster) print('height_of_the hierarchy is ', height_of_cluster) # lowest level clusters print('lowest level clusters') self._network.add_cluster_level(0) # level 0 clusters for n in graph.nodes(): paths = nx.single_source_dijkstra_path_length(graph, n, 0, weight='weight') print(paths) cluster_graph = graph.subgraph([n]) cluster = Cluster('c' + str(n) + '_l' + '0', cluster_graph, 0, str(n)) self._network.add_cluster(0, cluster) # self._network.draw_cluster(cluster.cluster_id) # form upper level clusters for i in range(int(height_of_cluster)): self._network.add_cluster_level(i + 1) clustered_peers_list = [] # for naming the cluster properly cluster_ids_list = [] print ('AT LEVEL ----- ', i + 1) distance = pow(2, i + 1) print('THE DISTANCE LIMIT IS ', distance) clusterize = {} n = 0 # iterate over the peers once while n < self._peer_count: print('clustering peer ', n) paths_found = nx.single_source_dijkstra_path_length(graph, str(n), distance, weight='weight') peers_to_cluster = [] print('paths found in the level ', paths_found) tmp_peers_list_to_cluster = [] for peer in paths_found: peers_to_cluster.append(peer) # clustered_peers_list.append(peer) tmp_peers_list_to_cluster.append(peer) c_id = 'c' + str(n) + '_l' + str(i + 1) # for naming the clusters properly cluster_ids_list.append(c_id) cluster_ids_count = Counter(cluster_ids_list) c_id = c_id + "_" + str(cluster_ids_count[c_id]) temp_clustered_peers_list = copy.deepcopy(clustered_peers_list) temp_clustered_peers_list.extend(tmp_peers_list_to_cluster) duplicate = False if self.at_most_logn(Counter(temp_clustered_peers_list)) < 0: # check if duplicate peers or not for inner_key, inner_value in clusterize.items(): if Counter(inner_value) == Counter(tmp_peers_list_to_cluster): print ("FOUND DUPLICATE CLUSTER") duplicate = True break print("DOESN'T VIOLATE AT MOST LOG N") if not duplicate: clustered_peers_list.extend(tmp_peers_list_to_cluster) clusterize[c_id] = peers_to_cluster n += 1 print("CHECKING MEMBERSHIP") print(self.at_least_one(Counter(clustered_peers_list)) < 0) print(self.at_most_logn(Counter(clustered_peers_list)) < 0) assert (self.at_most_logn(Counter(clustered_peers_list)) < 0) # if not built yet build a cluster and remove peers who appear more than logn times missing_cluster_id = self.at_least_one(Counter(clustered_peers_list)) while missing_cluster_id > -1: print("PEER ", missing_cluster_id, " IS MISSING, BUILDING A CLUSTER AROUND IT.") paths_found = nx.single_source_dijkstra_path_length(graph, str(missing_cluster_id), distance, weight='weight') peers_to_cluster = [] print('paths found in the level ', paths_found) tmp_peers_list_to_cluster = [] for peer in paths_found: peers_to_cluster.append(peer) # clustered_peers_list.append(peer) tmp_peers_list_to_cluster.append(peer) # todo creating single node cluster tmp_peers_list_to_cluster = [str(missing_cluster_id)] c_id = 'c' + str(missing_cluster_id) + '_l' + str(i + 1) # for naming the clusters properly cluster_ids_list.append(c_id) cluster_ids_count = Counter(cluster_ids_list) c_id = c_id + "_" + str(cluster_ids_count[c_id]) temp_clustered_peers_list = copy.deepcopy(clustered_peers_list) temp_clustered_peers_list.extend(tmp_peers_list_to_cluster) duplicate = False if self.at_most_logn(Counter(temp_clustered_peers_list)) < 0: # check if duplicate peers or not for inner_key, inner_value in clusterize.items(): if Counter(inner_value) == Counter(tmp_peers_list_to_cluster): print("FOUND DUPLICATE CLUSTER") duplicate = True break print("DOESN'T VIOLATE AT MOST LOG N") if not duplicate: print("WHY HERE") clustered_peers_list.extend(tmp_peers_list_to_cluster) clusterize[c_id] = tmp_peers_list_to_cluster else: # find the peer that appears more than logn excess_cluster_id = self.at_most_logn(Counter(temp_clustered_peers_list)) while excess_cluster_id != -1: print(tmp_peers_list_to_cluster) print(excess_cluster_id) tmp_peers_list_to_cluster.remove(str(excess_cluster_id)) tmp_clustered_peers_list = copy.deepcopy(clustered_peers_list) tmp_clustered_peers_list.extend(tmp_peers_list_to_cluster) # removing print(excess_cluster_id, " APPEARS IN MORE THAN LOGN CLUSTER, REMOVING IT") excess_cluster_id = self.at_most_logn(Counter(tmp_clustered_peers_list)) print("PREPARING THE CLUSTER FROM MODIFIED PEERS LIST ", tmp_peers_list_to_cluster) clustered_peers_list.extend(tmp_peers_list_to_cluster) clusterize[c_id] = tmp_peers_list_to_cluster assert (self.at_most_logn(Counter(clustered_peers_list)) < 0) missing_cluster_id = self.at_least_one(Counter(clustered_peers_list)) print("ASSERTING") assert (self.at_least_one(Counter(clustered_peers_list)) < 0) assert (self.at_most_logn(Counter(clustered_peers_list)) < 0) print(clusterize) print(Counter(clustered_peers_list)) print("FINALLY ADDING CLUSTERS") for key in clusterize: print (len(clusterize[key])) print(nx.number_of_nodes(graph.subgraph([str(i) for i in clusterize[key]]))) assert(nx.number_of_nodes(graph.subgraph([str(i) for i in clusterize[key]])) == len(clusterize[key])) cluster_graph = graph.subgraph([str(i) for i in clusterize[key]]) cluster = Cluster(key, cluster_graph, i + 1) if i + 1 == height_of_cluster: cluster.root = True self._network._root_cluster = cluster self._network.add_cluster(i + 1, cluster) # self._network.draw_cluster(cluster.cluster_id) print("CLUSTERIZE ", clusterize) return
def _build_clusters_exactly_logn_membership(self, graph): paths_for_diameter = nx.shortest_path_length(graph, weight='weight') for path in nx.shortest_path_length(graph, weight='weight'): print(path) ecc = nx.eccentricity(graph, sp=dict(paths_for_diameter)) # ecc = nx.eccentricity(graph, sp=shortest_paths_for_diameter) diameter = nx.diameter(graph, e=ecc) # for path in paths_for_diameter: # print(path) print('The graph diameter is ', diameter) height_of_cluster = math.ceil(math.log(diameter, 2)) + 1 print('height_of_the hierarchy is ', height_of_cluster) # lowest level clusters print('lowest level clusters') self._network.add_cluster_level(0) for n in graph.nodes(): paths = nx.single_source_dijkstra_path_length(graph, n, 0, weight='weight') print(paths) cluster_graph = graph.subgraph([n]) cluster = Cluster('c' + str(n) + '_l' + '0', cluster_graph, 0) self._network.add_cluster(0, cluster) # self._network.draw_cluster(cluster.cluster_id) for i in range(int(height_of_cluster)): self._network.add_cluster_level(i + 1) clustered_peers_list = [] cluster_ids_list = [] print('AT LEVEL ------- ', i + 1) distance = pow(2, i + 1) print('THE DISTANCE LIMIT IS ', distance) clustered_peers = [] # for naming the clusters properly n = 0 while n < self._peer_count: incomplete = False # for n in graph.nodes(): print('clustering peer ', n) paths_found = nx.single_source_dijkstra_path_length(graph, str(n), distance, weight='weight') peers_to_cluster = [] print('paths found in the level ', paths_found) for peer in paths_found: clustered_peers.append(int(peer)) peers_to_cluster.append(int(peer)) clustered_peers_list.append(peer) cluster_graph = graph.subgraph([str(i) for i in peers_to_cluster]) c_id = 'c' + str(n) + '_l' + str(i + 1) cluster_ids_list.append(c_id) cluster_ids_count = Counter(cluster_ids_list) c_id = c_id + "_" + str(cluster_ids_count[c_id]) cluster = Cluster(c_id, cluster_graph, i + 1) self._network.add_cluster(i + 1, cluster) # self._network.draw_cluster(cluster.cluster_id) clustered_peers_count = Counter(clustered_peers_list) # print(self._network._clusters) for j in range(self._peer_count): if clustered_peers_count[str(j)] < math.log(self._peer_count, 2): incomplete = True if not incomplete: # make peers fall in exactly log(n) clusters for peer in range(self._peer_count): # count of peers in all clusters peer_count = clustered_peers_count[str(peer)] while peer_count > math.log(self._peer_count, 2): for cluster in reversed(self._network.clusters_by_level(i + 1)): if clustered_peers_count[str(peer)] > math.log(self._peer_count, 2) and cluster.graph.has_node( str(peer)): cluster.graph.remove_node(str(peer)) peer_count = peer_count - 1 if str(peer) in clustered_peers_list: clustered_peers_list.remove(str(peer)) clustered_peers_count = Counter(clustered_peers_list) break break n += 1 if n == self._peer_count: n = 0 # delete empty clusters for i in range(int(height_of_cluster)): for cluster in self._network.clusters_by_level(i): if nx.number_of_nodes(cluster.graph) == 0: self._network.remove_cluster_by_id(cluster.cluster_id, cluster.level) for i in range(int(height_of_cluster)): for cluster in self._network.clusters_by_level(i): assert (nx.number_of_nodes(cluster.graph) is not 0) # make sure that there are log(n) peers in every level for i in range(int(height_of_cluster) - 1): print("Verifying in level ", i + 1) clustered_peer_list = [] for cluster in self._network.clusters_by_level(i + 1): for node in cluster.graph.nodes: clustered_peer_list.append(str(node)) clustered_peer_count = Counter(clustered_peer_list) for cl in clustered_peer_count: assert (clustered_peer_count[cl] == math.log(self._peer_count, 2))