def test_all_pairs_dijkstra_path(self): cycle = nx.cycle_graph(7) p = dict(nx.all_pairs_dijkstra_path(cycle)) assert_equal(p[0][3], [0, 1, 2, 3]) cycle[1][2]['weight'] = 10 p = dict(nx.all_pairs_dijkstra_path(cycle)) assert_equal(p[0][3], [0, 6, 5, 4, 3])
def test_all_pairs_dijkstra_path(self): cycle = nx.cycle_graph(7) p = dict(nx.all_pairs_dijkstra_path(cycle)) assert p[0][3] == [0, 1, 2, 3] cycle[1][2]['weight'] = 10 p = dict(nx.all_pairs_dijkstra_path(cycle)) assert p[0][3] == [0, 6, 5, 4, 3]
def get_lambdai(email_layer, hist_layer, email_graph, hist_graph): # aggregate network df = aggregated_network(email_layer, hist_layer) agg_G = nx.DiGraph() agg_G = agg_G.to_directed() triad = list(zip(*[df[c].values.tolist() for c in df])) agg_G.add_weighted_edges_from(triad) # agg_G.remove_node('aaaa') # total number of shortest paths on multiplex network agg_path = nx.all_pairs_dijkstra_path(agg_G) email_path = nx.all_pairs_dijkstra_path(email_graph) hist_path = nx.all_pairs_dijkstra_path(hist_graph) agg_dict, email_dict, hist_dict = {}, {}, {} email_edge_path_dict, hist_edge_path_dict, agg_edge_path_dict = {}, {}, { } # shortest paths of all nodes lambdai = {} for node, path in email_path: for i in path: email_edge_path_dict['++'.join(path[i])] = 1 for node, path in hist_path: for i in path: hist_edge_path_dict['++'.join(path[i])] = 1 for node, path in agg_path: for i in path: agg_edge_path_dict['++'.join(path[i])] = 1 # count the shortest path of node i in agg_network bottom_df = pd.DataFrame(list(agg_edge_path_dict.items()), columns=['edge', 'count']) bottom_df['From'] = bottom_df['edge'].map(lambda x: x.split('++')[0]) bottom_count = Counter(bottom_df['From']) for edge in email_edge_path_dict: if agg_edge_path_dict.get(edge, 'aaaa') != 'aaaa': agg_edge_path_dict.pop(edge) # count the shortest path of all nodes between two layers for edge in hist_edge_path_dict: if agg_edge_path_dict.get(edge, 'aaaa') != 'aaaa': agg_edge_path_dict.pop(edge) # count the shortest path of node i between two layers top_df = pd.DataFrame(list(agg_edge_path_dict.items()), columns=['edge', 'count']) top_df['From'] = top_df['edge'].map(lambda x: x.split('++')[0]) top_count = Counter(top_df['From']) # calculate the lambda i lambdai = {} for k in bottom_count: if top_count.get(k, 'aaaa') != 'aaaa': lambdai[k] = round(top_count[k] / float(bottom_count[k]), 2) # if lambdai.get(nbr, 0) == nbr: # return lambdai[nbr] # else: # return 0 return lambdai
def get_path_generator(path_generation: str = 'empirical', G: Optional[nx.Graph]=None, full_paths: Optional[List[List[int]]]=None, zone_paths: Optional[List[List[int]]]=None, synthetic_path_generator_args: Optional[list] = None): """Create path generator functions. Note that a zone path is a sequence of zones that a customer purchased items from, so consecutive zones in the sequence may not be adjacent in the store graph. We map the zone path to the full shopping path by assuming that customers walk shortest paths between purchases.""" # Decide how paths are generated if path_generation == 'empirical': path_generator_function = paths_generator_from_actual_paths if full_paths is not None: path_generator_args = [full_paths] else: assert zone_paths is not None, "If you use path_generation='empirical', you need to specify either zone_paths or full_paths" assert G is not None, "If you use path_generation='empirical' with zone_paths, you need to input the store network G" shortest_path_dict = dict(nx.all_pairs_dijkstra_path(G)) shopping_paths = [zone_path_to_full_path(path, shortest_path_dict) for path in zone_paths] full_paths = [zone_path_to_full_path(path, shortest_path_dict) for path in shopping_paths] path_generator_args = [full_paths] elif path_generation == 'synthetic': assert synthetic_path_generator_args is not None, \ "If you use path_generation='synthetic', " \ "you need to input synthetic_path_generator_args=" \ "[mu, sigma, entrance_nodes, till_nodes, exit_nodes, item_nodes, shortest_path_dict]" assert type(synthetic_path_generator_args) is list, \ "If you use path_generation='synthetic', " \ "you need to input synthetic_path_generator_args=" \ "[mu, sigma, entrance_nodes, till_nodes, exit_nodes, item_nodes, shortest_path_dict]" assert len(synthetic_path_generator_args) == 7, \ "If you use path_generation='synthetic', " \ "you need to input synthetic_path_generator_args=" \ "[mu, sigma, entrance_nodes, till_nodes, exit_nodes, item_nodes, shortest_path_dict]" path_generator_function = sythetic_paths_generator path_generator_args = synthetic_path_generator_args # [mu, sigma, entrance_nodes, # till_nodes, exit_nodes, item_nodes, shortest_path_dict] elif path_generation == 'tmatrix': assert zone_paths is not None, "If you use path_generation='tmatrix', you need to input zone_paths" assert G is not None, "If you use path_generation='tmatrix', you need to input the store network G" shortest_path_dict = dict(nx.all_pairs_dijkstra_path(G)) shopping_paths = [zone_path_to_full_path(path, shortest_path_dict) for path in zone_paths] tmatrix = get_transition_matrix(shopping_paths, len(G)) path_generator_function = path_generator_from_transition_matrix path_generator_args = [tmatrix, shortest_path_dict] else: raise ValueError(f'Unknown path_generation scheme == {path_generation}') return path_generator_function, path_generator_args
def __init__(self, network, **kwargs): assert type(network) is DirectedStreetNetwork self.street_network = network self.create_states() self.shortest_path_dictionary = { source: target_dict for source, target_dict in all_pairs_dijkstra_path_length( network.graph, weight="length" ) } self.shortest_paths = { source: target_dict for source, target_dict in all_pairs_dijkstra_path( self.street_network.graph, weight="length" ) } self.compute_legal_transitions() if "gamma" in kwargs: self.gamma = kwargs["gamma"] else: self.gamma = 0.01 if "sigma" in kwargs: self.sigma = kwargs["sigma"] else: self.sigma = 1
def context_splitting_graph_many(obj: Union[HybridCloud, CloudEnsemble], sources: Iterable[int], max_dist: float) -> List[list]: """ Performs a dijkstra shortest paths on the obj's weighted graph to retrieve the skeleton nodes within `max_dist` for every source node ID in `sources`. Args: obj: The HybridCloud/CloudEnsemble with the graph, nodes and vertices. sources: The source nodes. max_dist: The maximum distance to the source node (along the graph). Returns: The nodes within the requested context for every source node - same ordering as `sources`. """ g = obj.graph() if isinstance(sources, list) and len(sources) == 1: path = nx.single_source_dijkstra_path(g, sources[0], weight='weight', cutoff=max_dist) return [list(path.keys())] else: paths = dict( nx.all_pairs_dijkstra_path(g, weight='weight', cutoff=max_dist)) return [list(paths[s].keys()) for s in sources]
def hub_algo(c): all_short = networkx.all_pairs_dijkstra_path(c.g) clustering_map(c, c.number_of_busses-1) central_nodes = [get_subgraph_center(c, i) for i in range(1, c.number_of_busses)] central_route = [] for i in range(len(central_nodes)-1): central_route += all_short[central_nodes[i]][central_nodes[i+1]] c.set_route(1, central_route) for i in range(1, len(central_nodes)+1): Gp = networkx.Graph() for v in c.rv: for u in c.rv: if (v==u) or (c.clusters[u] != i) or (c.clusters[v] != i): continue Gp.add_edge(v, u, weight=c.get_route_weight_from_route(all_short[u][v])) problem = Problem(Gp) best = simple.aStarSearch(problem, unvisited_heuristic) new_route = [] for j in range(len(best)-1): new_route += all_short[best[j]][best[j+1]] c.set_route(i + 1, new_route) return c
def all_shortest_paths(self, weights): if self.engine == "nx": for e in self.edges.keys(): w = weights[e] self.G[self.edges[e][0]][self.edges[e][1]]['weight'] = w paths = nx.all_pairs_dijkstra_path(self.G) return paths elif self.engine == "igraph": W = [] for e in self.edges.keys(): if weights == "delays": W.append(self.get_link_delay(e)) else: W.append(weights[e]) self.igr.es['weight'] = W res = [] for node in self.nodes.keys(): pnode = self.igr.get_shortest_paths(node, weights="weight") dic_node = {} for (i, path) in enumerate(pnode): pnames = [] for p in path: pnames.append(self.igr.vs['name'][p]) dic_node[self.igr.vs['name'][i]] = pnames res.append((node, dic_node)) return res
def __init__(self, static_map): self.lines = {line["idx"]: Line(line) for line in static_map["line"]} self.graph = nx.Graph() for line in static_map["line"]: self.graph.add_edge(line["point"][0], line["point"][1], key=line["idx"], weight=line['length']) self.edge_labels = dict([((line["point"][0], line["point"][1]), { "id": line["idx"], "weight": line["length"] }) for line in static_map["line"]]) self.start_point_to_line_idx = {} self.shortest_path = dict(nx.all_pairs_dijkstra_path(self.graph)) self.length = dict(nx.all_pairs_dijkstra_path_length(self.graph)) self.post_id_to_point = { point["post_id"]: point["idx"] for point in static_map["point"] if point["post_id"] } for line in static_map["line"]: start_idx = line["point"][0] if start_idx not in self.start_point_to_line_idx: self.start_point_to_line_idx[start_idx] = [] self.start_point_to_line_idx[start_idx].append(line["idx"]) self.line_idx_to_point = { line["idx"]: line["point"] for line in static_map["line"] }
def __init__(self, network: UndirectedStreetNetwork, **kwargs): """See class documentation.""" assert type(network) is UndirectedStreetNetwork self.street_network = network self.states = list(set(map(lambda x: tuple(sorted(x)), network.graph.edges.keys()))) self.shortest_paths = { source: target_dict for source, target_dict in all_pairs_dijkstra_path( self.street_network.graph, weight="length" ) } self.shortest_path_dictionary = { source: target_dict for source, target_dict in all_pairs_dijkstra_path_length( self.street_network.graph, weight="length" ) } if "gamma" in kwargs: self.gamma = kwargs["gamma"] else: self.gamma = 0.01 if "sigma" in kwargs: self.sigma = kwargs["sigma"] else: self.sigma = 1
def route_remaining_edges_simple(G, T, n2c): """The original routing function --- not used now""" #for u,v in G.edges_iter(): # if T.are_adjacent(n2c[u], n2c[v]): # print 'edge (%d,%d) at %d,%d good' % (u,v,n2c[u], n2c[v]) if G.number_of_edges() == 0: return [] H = construct_routing_graph(T, set(n2c.values())) SP = nx.all_pairs_dijkstra_path(H) SP_len = nx.all_pairs_dijkstra_path_length(H) nx.write_edgelist(H, "hex.graph") # for every remaining edge Routes = [] for u, v in G.edges_iter(): c = n2c[u] d = n2c[v] # find the combination of sides that gives the shortest path best = bestp = None for s1, s2 in itertools.product(T.hex_sides(), T.hex_sides()): source = T.side_name(c, s1) target = T.side_name(d, s2) if SP_len[source][target] < best or best is None: best = SP_len[source][target] bestp = SP[source][target] #print >>sys.stderr, "Route %d - %d (%g) %s" % (u, v, best, ",".join(bestp)) Routes.append(bestp) return Routes
def restore_node(self, v, recompute_paths=True): """Restore a previously-removed node and update the network model. Parameters ---------- v : any hashable type Node to restore recompute_paths: bool, optional If True, recompute all shortest paths """ self.model.topology.add_node(v, **self.model.removed_nodes.pop(v)) for u in self.model.disconnected_neighbors[v]: if (v, u) in self.model.removed_links: self.restore_link(v, u, recompute_paths=False) self.model.disconnected_neighbors.pop(v) if v in self.model.removed_caches: self.model.cache[v] = self.model.removed_caches.pop(v) if v in self.model.removed_local_caches: self.model.local_cache[v] = self.model.removed_local_caches.pop(v) if v in self.model.removed_sources: self.model.source_node[v] = self.model.removed_sources.pop(v) for content in self.model.source_node[v]: self.model.countent_source[content] = v if recompute_paths: shortest_path = nx.all_pairs_dijkstra_path(self.model.topology) self.model.shortest_path = symmetrify_paths(shortest_path)
def __init__(self, status): self.status = status self.INTERNAL_COST = self.status.INTERNAL_COST self.EXTERNAL_COST = self.status.EXTERNAL_COST self.core = self.status.core self.k = self.status.k self.h = self.status.h self.max_delay = self.h * self.INTERNAL_COST + self.EXTERNAL_COST if self.status.topo_type == 'tree': self.topology = self._create_topology(self.core, self.k, self.h) elif self.status.topo_type == 'rocket': self.topology = self._parse_rocketfuel_topology() self.clients = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'leaf'} self.pops = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'root'} self.routers = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] in ['root', 'intermediate']} self.shortest_path = self._symmetrify_paths(nx.all_pairs_dijkstra_path(self.topology)) self.neighbors2 = {node: self._neighbors_of_neighbors(node) for node in self.topology.node}
def ds_spt(G, start_node): """ find a dominating set, from which we construct a subgraph by the union of all pairwise shortest paths. then run trimMST on the subgraph. """ subgraph = nx.Graph() ds = nx.dominating_set(G, start_node) subgraph.add_nodes_from(ds) shortest_paths = nx.all_pairs_dijkstra_path(G) shortest_paths = dict(shortest_paths) # add all edges/nodes in shortest paths to subgraph for source in ds: for target in ds: if source != target: # path is a list of nodes along the s-t path path = shortest_paths[source][target] for i in range(len(path) - 1): node_from = path[i] node_to = path[i + 1] weight = G[node_from][node_to]['weight'] if not subgraph.has_node(node_from): subgraph.add_node(node_from) if not subgraph.has_node(node_to): subgraph.add_node(node_to) if not subgraph.has_edge(node_from, node_to): subgraph.add_edge(node_from, node_to, weight=weight) #tree = trimMST.solve(subgraph) tree = nx.minimum_spanning_tree(subgraph) return tree
def upd_env_R(self): weights = {} for e, w in zip(self.graph.edges(), self.env_W): weights[e] = w nx.set_edge_attributes(self.graph, 'weight', weights) # print(self.env_W) routing_nodes = np.full([self.ACTIVE_NODES] * 2, -1.0, dtype=int) routing_ports = np.full([self.ACTIVE_NODES] * 2, -1.0, dtype=int) all_shortest = nx.all_pairs_dijkstra_path(self.graph) self.env_all_shortest = all_shortest.copy() sucess = self.check_if_link_failure() if sucess == False: return sucess for s in range(self.ACTIVE_NODES): for d in range(self.ACTIVE_NODES): if s != d: next = all_shortest[s][d][1] port = self.ports[s][next] routing_nodes[s][d] = next routing_ports[s][d] = port else: routing_nodes[s][d] = -1 routing_ports[s][d] = -1 self.env_R = np.asarray(routing_ports) self.env_Rn = np.asarray(routing_nodes) return sucess
def setShortestRoutes(self): paths = nx.all_pairs_dijkstra_path(self.gg, weight='weight') for node1 in paths.keys(): host1 = self.get(node1) debug("Starting node: "+node1+'\n') debug("\tpaths: "+str(paths[node1])+'\n') for node2 in paths[node1].keys(): if node2 != node1: if len(paths[node1][node2]) > 2: debug("\tDestination node: "+node2+'\n') nextHop = self.get(paths[node1][node2][1]) debug("\tNextHop node: "+nextHop.name+'\n') dsts = self.getNodeAddrs(self.get(node2)) intfs = host1.connectionsTo(nextHop) nextAddrs = [ couple[1].ip for couple in intfs if couple[1].ip ] rintf = intfs[0][0] # WARNING we just consider one link for dst in dsts: for addr in nextAddrs: debug("\tip route add "+str(dst)+" via "+str(addr)+'\n') host1.cmd("ip route add "+dst+" via "+addr+" dev "+rintf.name) debug("\tip route add "+dst+" via "+addr+'\n') else : host2 = self.get(node2) intfs = [ couple[0] for couple in host1.connectionsTo(host2) ] rintf = intfs[0] # WARNING we just consider one link raddrs = self.getNodeAddrs(host2) for addr in raddrs: host1.setHostRoute(addr,rintf.name)
def route_remaining_edges_simple(G, T, n2c): """The original routing function --- not used now""" #for u,v in G.edges_iter(): # if T.are_adjacent(n2c[u], n2c[v]): # print 'edge (%d,%d) at %d,%d good' % (u,v,n2c[u], n2c[v]) if G.number_of_edges() == 0: return [] H = construct_routing_graph(T, set(n2c.values())) SP = nx.all_pairs_dijkstra_path(H) SP_len = nx.all_pairs_dijkstra_path_length(H) nx.write_edgelist(H, "hex.graph") # for every remaining edge Routes = [] for u,v in G.edges_iter(): c = n2c[u] d = n2c[v] # find the combination of sides that gives the shortest path best = bestp = None for s1,s2 in itertools.product(T.hex_sides(),T.hex_sides()): source = T.side_name(c,s1) target = T.side_name(d,s2) if SP_len[source][target] < best or best is None: best = SP_len[source][target] bestp = SP[source][target] #print >>sys.stderr, "Route %d - %d (%g) %s" % (u, v, best, ",".join(bestp)) Routes.append(bestp) return Routes
def parse_ashiip(path): """ Parse a topology from an output file generated by the aShiip topology generator Parameters ---------- path : str The path to the aShiip output file Returns ------- topology : Topology """ topology = fnss.Topology(type='ashiip') for line in open(path, "r").readlines(): # There is no documented aShiip format but we assume that if the line # does not start with a number it is not part of the topology if line[0].isdigit(): node_ids = re.findall("\d+", line) if len(node_ids) < 2: raise ValueError('Invalid input file. Parsing failed while ' \ 'trying to parse a line') node = int(node_ids[0]) # level = int(node_ids[1]) topology.add_node(node) for i in range(1, len(node_ids)): topology.add_edge(node, int(node_ids[i])) paths = dict(nx.all_pairs_dijkstra_path(topology)) for u in paths: for v in paths[u]: paths[u][v] = len(paths[u][v]) return paths
def hub_algo(c): all_short = networkx.all_pairs_dijkstra_path(c.g) clustering_map(c, c.number_of_busses - 1) central_nodes = [ get_subgraph_center(c, i) for i in range(1, c.number_of_busses) ] central_route = [] for i in range(len(central_nodes) - 1): central_route += all_short[central_nodes[i]][central_nodes[i + 1]] c.set_route(1, central_route) for i in range(1, len(central_nodes) + 1): Gp = networkx.Graph() for v in c.rv: for u in c.rv: if (v == u) or (c.clusters[u] != i) or (c.clusters[v] != i): continue Gp.add_edge(v, u, weight=c.get_route_weight_from_route( all_short[u][v])) problem = Problem(Gp) best = simple.aStarSearch(problem, unvisited_heuristic) new_route = [] for j in range(len(best) - 1): new_route += all_short[best[j]][best[j + 1]] c.set_route(i + 1, new_route) return c
def highest_activity_paths(input_network, percentile): path_length_thresh = 2 #print "Calculating shortest paths..." shortest_paths = nx.all_pairs_dijkstra_path(input_network) #print "Extracting paths with length >=",path_length_thresh,"and corresponding costs into an array..." # list of tuples (cost, path) # path is a list of nodes cost_paths = [] for src, dest_paths in shortest_paths.items(): for dest, path in dest_paths.items(): if (len(path) - 1) >= path_length_thresh: cost_paths.append((get_path_cost(input_network, path), path)) costs = [tup[0] for tup in cost_paths ] # this is necessary for the percentile computation #print "Got ", len(cost_paths), " shortest paths" #print "Sorting paths based on costs..." cost_paths_sorted = sorted(cost_paths) network_path_cost_thresh = np.percentile(costs, percentile) # Get highest activity paths highestActivityPaths_network = set() for tup in cost_paths_sorted: if tup[0] < network_path_cost_thresh: highestActivityPaths_network.add(tuple(tup[1])) else: break #print "Got ", len(highestActivityPaths_network), " highest activity paths in network" return highestActivityPaths_network
def compute_distance(root_path, vertice_name, polygon_name, compute_distance=False): vertice = function_library.reading_text(root_path + vertice_name) polygon_edge = function_library.reading_text(root_path + polygon_name) print('Vertice and polygon are loaded!') # compute edge edge = function_library.convert_to_edge(polygon_edge, vertice) if compute_distance: G = nx.Graph() G.add_nodes_from(list(np.arange(1, len(vertice)))) for i in range(len(edge)): G.add_edge(int(edge[i, 0]), int(edge[i, 1]), weight=edge[i, 2]) path = nx.all_pairs_dijkstra_path(G) print('Shortest path computed!') # compute the distance distance_matrix = np.zeros((len(vertice), len(vertice))) for iter1 in range(1, len(vertice) + 1): for iter2 in range(1, len(vertice) + 1): if iter2 > iter1: node_path = path[iter1][iter2] dis = 0 for i in range(len(node_path) - 1): dis = dis + np.linalg.norm( vertice[int(node_path[i] - 1), :] - vertice[int(node_path[i + 1] - 1), :]) distance_matrix[iter1 - 1, iter2 - 1] = dis distance_matrix = distance_matrix + distance_matrix.T print('Distance computed!') return distance_matrix else: return vertice, edge
def remove_link(self, u, v, recompute_paths=True): """Remove a link from the topology and update the network model. Note well. With great power comes great responsibility. Be careful when using this method. In fact as a result of link removal, network partitions and other corner cases might occur. Ensure that the implementation of strategies using this method deal with all potential corner cases appropriately. Also, note that, for these changes to be effective, the strategy must use fresh data provided by the network view and not storing local copies of network state because they won't be updated by this method. Parameters ---------- u : any hashable type Origin node v : any hashable type Destination node recompute_paths: bool, optional If True, recompute all shortest paths """ self.model.removed_links[(u, v)] = self.model.topology.edge[u][v] self.model.topology.remove_edge(u, v) if recompute_paths: shortest_path = nx.all_pairs_dijkstra_path(self.model.topology) self.model.shortest_path = symmetrify_paths(shortest_path)
def __init__(self, cityMap): VRPProblem.__init__(self, cityMap) self.src_nodes = {s for s, d in cityMap.passengers} print(self.src_nodes) self.relevant_nodes = {d for s, d in cityMap.passengers} | self.src_nodes print(self.relevant_nodes - self.src_nodes) self.short_path = networkx.all_pairs_dijkstra_path(cityMap.g)
def shortest_path(self): source = tk.StringVar.get(self.source) destination = tk.StringVar.get(self.destination) if source == "": source = 1 else: source = int(source) if destination == "": destination = self.rows else: destination = int(destination) path = nx.shortest_path(self.graph, source=source, target=destination, weight="weight") length = nx.shortest_path_length(self.graph, source=source, target=destination, weight="weight") print("shortest path: {}, with weight: {}".format(path, length)) all_paths = dict(nx.all_pairs_dijkstra_path(self.graph)) for node in all_paths.keys(): temp = dict(all_paths[node]) for dest_node in temp.keys(): print("path from node {} to node {}: {}".format( node, dest_node, all_paths[node][dest_node]))
def update_env_r(self): weights = {} for e, w in zip(self.graph.edges(), self.env_w): weights[e] = w nx.set_edge_attributes(self.graph, name='weight', values=weights) routing_nodes = np.full([self.active_nodes] * 2, -1.0, dtype=int) routing_ports = np.full([self.active_nodes] * 2, -1.0, dtype=int) all_shortest = nx.all_pairs_dijkstra_path(self.graph) for s in range(self.active_nodes): for d in range(self.active_nodes): if s != d: # TODO # must be updated for networkx 2.x # `nx.all_pairs_dijkstra_path` returns a generator instead of a list next_ = all_shortest[s][d][1] port = self.ports[s][next_] routing_nodes[s][d] = next_ routing_ports[s][d] = port else: routing_nodes[s][d] = -1 routing_ports[s][d] = -1 self.env_r = np.asarray(routing_ports) self.env_rn = np.asarray(routing_nodes)
def upd_env_R(self): weights = {} for e, w in zip(self.graph.edges(), self.env_W): weights[e] = w nx.set_edge_attributes(self.graph, name='weight', values=weights) routing_nodes = np.full([self.ACTIVE_NODES] * 2, -1.0, dtype=int) routing_ports = np.full([self.ACTIVE_NODES] * 2, -1.0, dtype=int) all_shortest = dict(nx.all_pairs_dijkstra_path(self.graph)) # print(all_shortest) for s in range(self.ACTIVE_NODES): for d in range(self.ACTIVE_NODES): if s != d: next = all_shortest[s][d][1] port = self.ports[s][next] routing_nodes[s][d] = next routing_ports[s][d] = port else: routing_nodes[s][d] = -1 routing_ports[s][d] = -1 self.env_R = np.asarray(routing_ports) self.env_Rn = np.asarray(routing_nodes)
def shortest_path(G, source=None, target=None, weight=None): if source is None: if target is None: if weight is None: paths = dict(nx.all_pairs_shortest_path(G)) else: paths = dict(nx.all_pairs_dijkstra_path(G, weight=weight)) else: with nx.utils.reversed(G): if weight is None: paths = single_source_shortest_path(G, target) else: paths = nx.single_source_dijkstra_path(G, target, weight=weight) for target in paths: paths[target] = list(reversed(paths[target])) else: if target is None: if weight is None: paths = single_source_shortest_path(G, source) else: paths = nx.single_source_dijkstra_path(G, source, weight=weight) else: if weight is None: paths = nx.bidirectional_shortest_path(G, source, target) else: paths = nx.dijkstra_path(G, source, target, weight) return paths
def rewire_link(self, u, v, up, vp, recompute_paths=True): """Rewire an existing link to new endpoints This method can be used to model mobility patters, e.g., changing attachment points of sources and/or receivers. Note well. With great power comes great responsibility. Be careful when using this method. In fact as a result of link rewiring, network partitions and other corner cases might occur. Ensure that the implementation of strategies using this method deal with all potential corner cases appropriately. Parameters ---------- u, v : any hashable type Endpoints of link before rewiring up, vp : any hashable type Endpoints of link after rewiring """ link = self.model.topology.edge[u][v] self.model.topology.remove_edge(u, v) self.model.topology.add_edge(up, vp, **link) if recompute_paths: shortest_path = nx.all_pairs_dijkstra_path(self.model.topology) self.model.shortest_path = symmetrify_paths(shortest_path)
def generate_paths(gg,pos,sample,thres,mode=0): if(mode==0): N=gg.number_of_nodes() paths=[] for n in range(N): print('BFS',n,N) for m in range(n): patha=list(bfs_paths_angles(gg,pos,thres,n,m)) for path in patha: if(not path in paths and len(path)>1): paths.append(path) else: paths=[] for i in range(sample): print('RMST',i,sample) gm=random_spanning_tree(gg) pathd=nx.all_pairs_dijkstra_path(gm) for n in pathd.keys(): for m in pathd[n].keys(): path=pathd[n][m] if(not path in paths and len(path)>1): paths.append(path) return paths
def highest_activity_paths(input_network, percentile): path_length_thresh = 2 #print "Calculating shortest paths..." shortest_paths = nx.all_pairs_dijkstra_path(input_network) #print "Extracting paths with length >=",path_length_thresh,"and corresponding costs into an array..." # list of tuples (cost, path) # path is a list of nodes cost_paths = [] for src, dest_paths in shortest_paths.items(): for dest, path in dest_paths.items(): if (len(path) - 1) >= path_length_thresh: cost_paths.append((get_path_cost(input_network, path), path)) costs = [tup[0] for tup in cost_paths] # this is necessary for the percentile computation #print "Got ", len(cost_paths), " shortest paths" #print "Sorting paths based on costs..." cost_paths_sorted = sorted(cost_paths) network_path_cost_thresh = np.percentile(costs, percentile) # Get highest activity paths highestActivityPaths_network = set() for tup in cost_paths_sorted: if tup[0] < network_path_cost_thresh: highestActivityPaths_network.add(tuple(tup[1])) else: break #print "Got ", len(highestActivityPaths_network), " highest activity paths in network" return highestActivityPaths_network
def computeGeoDist(G): """Compute Geographical distances with waypoints""" path = nx.all_pairs_dijkstra_path(G) outf = open("african-origin-homelands-distances-1.txt", "w") for src, src_coordl in source_dict.iteritems(): # print src, src_coordl src_area = src_coordl[0] src_coord = src_coordl[1] for hm, hm_coordl in hmcoord_dict.iteritems(): hm_area = hm_coordl[0] hm_coord = hm_coordl[1] distance = 0 if src_area != hm_area: path_list = path[src_area][hm_area] coord_list = [] coord_list.append(src_coord) # distance += getDistance(src_coord,G[src_area][path_list[1]]["coord"]) for i in range(len(path_list)-1): coord_list.append(G[path_list[i]][path_list[i+1]]["coord"]) coord_list.append(hm_coord) # print coord_list for i in range(len(coord_list)-1): distance += getDistance(coord_list[i],coord_list[i+1]) else: distance = getDistance(src_coord, hm_coord) outf.write(src+"\t"+hm+"\t"+str(distance)+"\n") print src, hm, distance
def _all_pairs_shortest_path(self): """ Resolve an all pair shortest path problem """ self.all_pairs = dict(nx.all_pairs_dijkstra_path(G=self._weighted_graph, weight=lambda v1, v2, attributes: self._custom_weight( attributes)))
def expand_road_network(road_network, discritization): """Discritize a simple road_network Takes a simple road network with nodes at features and intersections and edges with weights between nodes and add nodes along the edges """ rn_old = road_network df = discritization # nodes per unit weight # Find shortest paths and path lengths paths = nx.all_pairs_dijkstra_path(rn_old) path_lengths = nx.all_pairs_dijkstra_path_length(rn_old) # Create new graph rn = nx.Graph() rn.add_nodes_from(rn_old.nodes(data=True)) for old_edge in rn_old.edges(data=True): beg = old_edge[0] end = old_edge[1] if int(beg) > int(end): beg, end = end, beg num_nodes = int(round(old_edge[2]['weight'] * df) - 1) old_node_name = beg for node in range(num_nodes): new_node_name = '{}.{}.{}'.format(beg, end, node) if node == num_nodes - 1: rn.add_edge(new_node_name, end) rn.add_edge(old_node_name, new_node_name) old_node_name = new_node_name return rn, paths, path_lengths
def do_metrics(options, topo, g): '''Compute the metrics for a single topology.''' print("==========options") print(options) print("computing metricss for topo: %s" % topo) controllers = get_controllers(g, options) filename = get_filename(topo, options, controllers) data = {} # See top for data schema details. apsp = nx.all_pairs_dijkstra_path_length(g) apsp_paths = nx.all_pairs_dijkstra_path(g) extra_params = get_extra_params(g) if options.use_prior: data = read_json_file(filename) else: start = time.time() weighted = True metrics.run_all_combos(options.metrics, g, controllers, data, apsp, apsp_paths, weighted, options.write_dist, options.write_combos, extra_params, options.processes, options.multiprocess, options.chunksize, options.median) total_duration = time.time() - start print("%0.6f" % total_duration) if not options.dist_only: metrics.run_greedy_informed(data, g, apsp, options.weighted) metrics.run_greedy_alg_dict( data, g, 'greedy-cc', 'latency', nx.closeness_centrality(g, weighted_edges=options.weighted), apsp, options.weighted) metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency', nx.degree_centrality(g), apsp, options.weighted) for i in [10, 100, 1000]: metrics.run_best_n(data, g, apsp, i, options.weighted) metrics.run_worst_n(data, g, apsp, i, options.weighted) print( "*******************************************************************") # Ignore the actual combinations in CSV outputs as well as single points. exclude = ["distribution", "metric", "group", "id"] if not options.write_combos: exclude += ['highest_combo', 'lowest_combo'] if options.write: dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.mkdir(dirname) write_json_file(filename + '.json', data) if options.write_csv: write_csv_file(filename, data["data"], exclude=exclude) if options.write_dist: write_dist_csv_file(filename + '_dist', data["data"], exclude) return data, filename
def load_data_for_sim(store_id, graph_params, data_dir): # load data suffix = graph_params.get('suffix', '') path_suffix = graph_params.get('path_suffix', '') graph_suffix = graph_params.get('graph_suffix', '') logging.info('Loading data') if store_id in [1, 2, 3]: # synthetic stores if store_id == 1: G, pos, entrance_nodes, till_nodes, exit_nodes, item_nodes = create_small_store( ) store_size = 'small' elif store_id == 2: G, pos, entrance_nodes, till_nodes, exit_nodes, item_nodes = create_medium_store( ) store_size = 'medium' elif store_id == 3: G, pos, entrance_nodes, till_nodes, exit_nodes, item_nodes = create_large_store( ) store_size = 'large' else: raise RuntimeError('This should not happen.' ) # superfluous, but here to help linting mu = graph_params.get('mu_basketsize', '') sigma = graph_params.get('sigma_basketsize', '') pickle_filename = os.path.join( data_dir, f'{store_id}_shortest_path_dict_{store_size}_store.pickle') if os.path.isfile(pickle_filename): with open(pickle_filename, 'rb') as f: shortest_path_dict = pickle.load(f) logging.info(f'Loaded shortest path dict: {pickle_filename}') else: G, pos, entrance_nodes, till_nodes, exit_nodes, item_nodes = create_medium_store( ) shortest_path_dict = get_all_shortest_path_dicts(G) logging.info('Calculated shortest path dict of graph') extra_outputs = [ mu, sigma, entrance_nodes, till_nodes, exit_nodes, item_nodes, shortest_path_dict ] else: paths_path = os.path.join( data_dir, f'{store_id}_zone_paths{suffix}{path_suffix}.parquet') logging.info('Loaded zone paths') df_paths = pd.read_parquet(paths_path) G = load_store_graph(store_id, graph_params, data_dir) logging.info('Loaded store graph') shortest_path_dict = dict(nx.all_pairs_dijkstra_path(G)) use_TSP_path = graph_params.get('use_TSP_path', False) if use_TSP_path: path_col = 'TSP_path' + graph_suffix else: path_col = 'zone_path' df_paths['full_path'] = df_paths[path_col].apply( lambda x: zone_path_to_full_path(x, shortest_path_dict)) all_zone_paths = list(df_paths[path_col]) extra_outputs = all_zone_paths return G, extra_outputs
def largearcs_connecting_heuristic( cycles, transport_graph, cost_label='cost' ) : APSP = nx.all_pairs_dijkstra_path( transport_graph, weight=cost_label ) APSPL = nx.all_pairs_dijkstra_path_length( transport_graph, weight=cost_label ) # Step 3(b): Form the inter-node distance from the original edge costs # d(ni,nj) = min { c'(u,v) | u \in Ri, v \in Rj }. # Associate with (ni,nj) the edge (u,v) yielding minimum cost. inter_node_distance = nx.Graph() for cycle in cycles : u = node() u.cycle = cycle # do we need this?... eh, it's cheap inter_node_distance.add_node( u ) # u.enroute, u.balance = cycle_edges( cycle ) # also want to store all nodes visited while *EMPTY* u.nodes = set() for x,y in u.balance.edges_iter() : u.nodes.update( APSP[x][y] ) #for x,y,key, data in u.graph.edges_iter( keys=True, data=True ) : #if not data.get( 'CONNECT_ONLY', False ) : continue #u.nodes.update( APSP[x][y] ) NODES = inter_node_distance.nodes() for u, v in itertools.combinations( NODES, 2 ) : options = [ ( APSPL[x][y] + APSPL[y][x], (x,y) ) for x,y in itertools.product( u.nodes, v.nodes ) ] # round trip cost cost, edge = min( options ) inter_node_distance.add_edge( u, v, cost=cost, edge=edge ) # Step 4: Find a minimum cost spanning tree on inter-node distance graph MST = nx.algorithms.minimum_spanning_tree( inter_node_distance, weight='cost' ) # Step 2: Initialize PRETOUR to be empty. For each edge in the matching, # associate a direction (from head to tail); insert into PRETOUR # (Also, insert the arcs...) eulerian = nx.MultiDiGraph() for u in NODES : for x,y, key in u.enroute.edges_iter( keys=True ) : eulerian.add_edge( x,y, key, SERVICE=True ) for u in NODES : for x,y in u.balance.edges_iter() : eulerian.add_edge( x, y ) for _,__,data in MST.edges_iter( data=True ) : x,y = data.get('edge') eulerian.add_edge( x, y ) eulerian.add_edge( y, x ) try : tour = [] for edge in eulerian_circuit_verbose( eulerian ) : if not eulerian.get_edge_data( *edge ).get('SERVICE', False ) : continue tour.append( edge ) return tour except : return eulerian
def getPathDict(self, state:FactoryFloorState): """ @return pathdict with all connected paths on the map """ if self._graph == None: self._graph = FactoryGraph(state.getMap()) self._pathDict = dict(networkx.all_pairs_dijkstra_path(self._graph)) return self._pathDict
def _compute_shortest_paths(self, scan, path=None): ''' Load connectivity graph for each scan, useful for reasoning about shortest paths ''' graph = utils.load_nav_graphs(scan, path=path) paths = dict( nx.all_pairs_dijkstra_path(graph)) # paths from graph of scan distances = dict(nx.all_pairs_dijkstra_path_length( graph)) # distances for respective paths return graph, paths, distances
def __init__(self, core, k, h): self.CACHE_BUDGET_FRACTION = .04 self.N_CONTENTS = 3 * 10 ** 4 self.N_WARMUP_REQUESTS = 5 * 10 ** 4 self.N_MEASURED_REQUESTS = 1 * 10 ** 4 self.GAMMA = 1 self.ALPHA = .8 self.INTERNAL_COST = 2 self.EXTERNAL_COST = 10 self.on_path_routing = True self.on_path_winner = True self.relative_popularity = True self.cache_placement = 'betweenness' self.scenario = 'AUC' self.saved_shots = [] self.shots = [] self.max_delay = h * self.INTERNAL_COST + self.EXTERNAL_COST # Uniform cache assignement self.core, self.k, self.h = core, k, h self.topology = self._create_topology(core, k, h) self.clients = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'leaf'} self.pops = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'root'} self.routers = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] in ['root', 'intermediate']} self.informations = {node: {} for node in self.topology.node} self.workload = StationaryWorkload(self.clients.keys(), self.N_CONTENTS, self.ALPHA, n_warmup=self.N_WARMUP_REQUESTS, n_measured=self.N_MEASURED_REQUESTS) self._cache_budget = None self.cache = None self.shortest_path = self._symmetrify_paths(nx.all_pairs_dijkstra_path(self.topology)) self.neighbors2 = {node: self._neighbors_of_neighbors(node) for node in self.topology.node} self.hits = 0 # self.cache_hit = {node:{i:0 for i in range(1, 1+self.N_CONTENTS)} for node in self.topology.node} # self.delays = {i:[] for i in range(1, 1+self.N_CONTENTS)} self.all_delays = [] self.cr_hit = [] self.winners = [] # probs = [] self.v_value = lambda p,d,v,pp,dp: p self.u_value = lambda p,d,u,path,v: 0 self.cnt = 0
def _load_nav_graphs(self): ''' Load connectivity graph for each scan, useful for reasoning about shortest paths ''' print 'Loading navigation graphs for %d scans' % len(self.scans) self.graphs = load_nav_graphs(self.scans) self.paths = {} for scan,G in self.graphs.iteritems(): # compute all shortest paths self.paths[scan] = dict(nx.all_pairs_dijkstra_path(G)) self.distances = {} for scan,G in self.graphs.iteritems(): # compute all shortest paths self.distances[scan] = dict(nx.all_pairs_dijkstra_path_length(G))
def __init__(self, data_path): with open(data_path, 'r') as graph_file: graph_data = json.load(graph_file) self.graph = json_graph.node_link_graph(graph_data, multigraph=False) self.name_node_map = {node[1]['name']: node[0] for node in self.graph.nodes(data=True)} self.all_paths = networkx.all_pairs_dijkstra_path(self.graph) self.all_costs = networkx.all_pairs_dijkstra_path_length(self.graph) # add blank houses set to each node for node in self.graph.nodes(): self.graph.node[node]['houses'] = []
def getSortedListOfPaths(self,graph,logicalNodes): path = nx.all_pairs_dijkstra_path(graph) lst = [] for u in logicalNodes: for v in logicalNodes: if (u>=v): continue lst.append((u,v,path[u][v])) sortedList = sorted(lst, key=lambda x: len(x[2])) return sortedList
def all_pairs_shortest_path_weighted(graph): paths = nx.all_pairs_dijkstra_path(graph, weight='weight') path_weighted_lengths = defaultdict(dict) for n1 in paths: for n2 in paths[n1]: path = paths[n1][n2] path_len = sum([graph[path[k-1]][path[k]]['weight'] for k in xrange(1, len(path))]) path_weighted_lengths[n1][n2] = path_len path_weighted_lengths[n2][n1] = path_len return paths, path_weighted_lengths
def calcPaths(num): length = nx.all_pairs_dijkstra_path(G, num) length_paths = [] for node in length: for target in length[node]: if len(length[node][target]) == num: length_paths.append(length[node][target]) labeled_paths = labels[length_paths] same_labels = (squareform(pdist(labeled_paths)) < 1e-10).sum(axis=1) return length_paths, labeled_paths, same_labels
def _all_paris_shortest_path(self): # print one shortest path for all node pairs # print nx.shortest_path(self.net) with open(OFP_ALL_PAIRS_SHOREST_PATH, 'w') as outp: shortest_path = nx.all_pairs_dijkstra_path(self.net) for src in shortest_path.keys(): for dst in shortest_path[src]: outp.write("%s -> %s %s\n" % (self._hostname_Check(src), self._hostname_Check(dst), [self._hostname_Check(i) for i in shortest_path[src][dst]]))
def test_all_pairs_shortest_path(self): p = nx.shortest_path(self.cycle) assert_equal(p[0][3], [0, 1, 2, 3]) assert_equal(p, dict(nx.all_pairs_shortest_path(self.cycle))) p = nx.shortest_path(self.grid) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights p = nx.shortest_path(self.cycle, weight='weight') assert_equal(p[0][3], [0, 1, 2, 3]) assert_equal(p, dict(nx.all_pairs_dijkstra_path(self.cycle))) p = nx.shortest_path(self.grid, weight='weight') validate_grid_path(4, 4, 1, 12, p[1][12]) # weights and method specified p = nx.shortest_path(self.cycle, weight='weight', method='dijkstra') assert_equal(p[0][3], [0, 1, 2, 3]) assert_equal(p, dict(nx.all_pairs_dijkstra_path(self.cycle))) p = nx.shortest_path(self.cycle, weight='weight', method='bellman-ford') assert_equal(p[0][3], [0, 1, 2, 3]) assert_equal(p, dict(nx.all_pairs_bellman_ford_path(self.cycle)))
def test_all_pairs_shortest_path(self): p=nx.shortest_path(self.cycle) assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_shortest_path(self.cycle)) p=nx.shortest_path(self.grid) assert_equal(p[1][12],[1, 2, 3, 4, 8, 12]) # now with weights p=nx.shortest_path(self.cycle,weighted=True) assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_dijkstra_path(self.cycle)) p=nx.shortest_path(self.grid,weighted=True) assert_equal(p[1][12],[1, 2, 3, 4, 8, 12])
def test_all_pairs_shortest_path(self): p=nx.shortest_path(self.cycle) assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_shortest_path(self.cycle)) p=nx.shortest_path(self.grid) validate_grid_path(4, 4, 1, 12, p[1][12]) # now with weights p=nx.shortest_path(self.cycle,weight='weight') assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_dijkstra_path(self.cycle)) p=nx.shortest_path(self.grid,weight='weight') validate_grid_path(4, 4, 1, 12, p[1][12])
def nodesOmitted(Graph, referenceNode): shortest_paths = nx.all_pairs_dijkstra_path(Graph) front_path = shortest_paths[referenceNode][0] back_path = shortest_paths[referenceNode][len(Graph.nodes()) - 1] path = concatenate([front_path, back_path]) omitted = ones(len(Graph.nodes())) for node in path: omitted[node] = 0 return omitted G = nx.DiGraph() G.add_weighted_edges_from(weights) return G
def create_shortest_path_index(request): if 'id' in request.GET: start = datetime.now() road_network = RoadNetwork.objects.get(id=request.GET["id"]) graph = json_graph.node_link_graph(json.loads(road_network.graph)) index = networkx.all_pairs_dijkstra_path(graph) road_network.shortest_path_index = json.dumps(index) road_network.save() end = datetime.now() log("Creating shortest path index task takes " + str((end - start).total_seconds()) + " seconds.") return Response(status=status.HTTP_201_CREATED) else: return Response(status=status.HTTP_400_BAD_REQUEST)
def transitive_closure(g, node_weight='r', edge_weight='c'): new_g = nx.DiGraph() l = nx.all_pairs_dijkstra_path_length(g, weight=edge_weight) # add shortest path and weight new_g.add_edges_from([ (s, tree, {edge_weight: l[s][tree]}) for s in l for tree in l[s]] ) # add node weight for n in new_g.nodes_iter(): new_g.node[n][node_weight] = g.node[n][node_weight] return new_g, nx.all_pairs_dijkstra_path(g, weight=edge_weight)
def dk(graph): ''' graph: A networkx Graph object to calculate the dk from. Calculates the dynamic connectivity (dk) of a network. This is a centrality measure that is related to betweenness centrality. The shortest path between every pair of nodes in the network is calculated with Dijkstra's algorithm. The dk of a node is the number of times it lies within one of these paths. This centrality measure was used in the program JAMMING, which predicts functional residues of a protein through network analysis. Reference: Efficient identification of critical residues based only on protein structure by network analysis Cusack M, Thibert B, Bredesen DE and del Rio G. 2007. PLoS ONE 2(5):e421 returns: Dictionary with nodes as key and dk for each node as value. ''' assert type(graph) is nx.Graph # Get a list of the nodes of the graph. nodes = graph.nodes() # Only for debugging. Sort amino acids by key. nodes.sort(key=lambda x: x.get_id()[1]) # Create list of same length as node list with zero as all entries. dk_list = [0] * len(nodes) # Calculate the dijkstra shortest path for all node pairs. sp = nx.all_pairs_dijkstra_path(graph) # For all nodes in the graph for i in range(0, len(nodes)): node1 = nodes[i] # For all nodes later in list than first node for j in range(i+1, len(nodes)): node2 = nodes[j] # Get shortest path between the first and second node path = sp[node1][node2] # Add one to the dk_list at an index for every time the node of that index # is inside the shortest path between a node pair. for n in path[1:-1]: dk_list[nodes.index(n)] += 1 # Store dk values in a dictionary. dictionary = dict(zip(nodes, dk_list)) return dictionary
def test_dijkstra_vs_networkx_apsp(): """ Test Dijkstra: My implementation vs Networkx implementation > All Pairs """ # NX Version G = nx.from_edgelist(edgelist_james) nx.set_edge_attributes(G, 'weight', edgelist_james) nx_all_complete_paths = nx.all_pairs_dijkstra_path(G, 'weight') # My Version d = Dijkstra() d.from_edgelist(edgelist_james, directed=False) dc_all_lenghts, dc_all_paths = d.all_pairs_shortest_paths() dc_all_complete_paths = d.shortest_complete_paths assert (nx_all_complete_paths == dc_all_complete_paths)
def do_metrics(options, topo, g): '''Compute the metrics for a single topology.''' print "computing metricss for topo: %s" % topo controllers = get_controllers(g, options) filename = get_filename(topo, options, controllers) data = {} # See top for data schema details. apsp = nx.all_pairs_dijkstra_path_length(g) apsp_paths = nx.all_pairs_dijkstra_path(g) extra_params = get_extra_params(g) if options.use_prior: data = read_json_file(filename) else: start = time.time() weighted = True metrics.run_all_combos(options.metrics, g, controllers, data, apsp, apsp_paths, weighted, options.write_dist, options.write_combos, extra_params, options.processes, options.multiprocess, options.chunksize, options.median) total_duration = time.time() - start print "%0.6f" % total_duration if not options.dist_only: metrics.run_greedy_informed(data, g, apsp, options.weighted) metrics.run_greedy_alg_dict(data, g, 'greedy-cc', 'latency', nx.closeness_centrality(g, weighted_edges = options.weighted), apsp, options.weighted) metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency', nx.degree_centrality(g), apsp, options.weighted) for i in [10, 100, 1000]: metrics.run_best_n(data, g, apsp, i, options.weighted) metrics.run_worst_n(data, g, apsp, i, options.weighted) print "*******************************************************************" # Ignore the actual combinations in CSV outputs as well as single points. exclude = ["distribution", "metric", "group", "id"] if not options.write_combos: exclude += ['highest_combo', 'lowest_combo'] if options.write: dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.mkdir(dirname) write_json_file(filename + '.json', data) if options.write_csv: write_csv_file(filename, data["data"], exclude = exclude) if options.write_dist: write_dist_csv_file(filename + '_dist', data["data"], exclude) return data, filename
def _all_paris_shortest_path(self): # print one shortest path for all node pairs print "_all_paris_shortest_path ", self.net with open(OFP_ALL_PAIRS_SHOREST_PATH, 'w') as outp: try: shortest_path = nx.all_pairs_dijkstra_path(self.net) except Exception as e: self.logger.info("_all_paris_shortest_path %s", e) finally: for src in shortest_path.keys(): for dst in shortest_path[src]: outp.write("%s -> %s %s\n" % (self._hostname_Check(src), self._hostname_Check(dst), [self._hostname_Check(i) for i in shortest_path[src][dst]]))
def jacobianSpiess(numNodes, numLinks, numODpairs, od_pairs, link_list_js, link_length_list_dict): numClasses = len(link_length_list_dict) netDict = {} pathDict = {} od_link_dict_dict = {} for k in range(numClasses): netDict[k] = nx.DiGraph() netDict[k].add_nodes_from(range(numNodes+1)[1:]) weighted_edges = [(int(link_list_js[i].split(',')[0]), int(link_list_js[i].split(',')[1]), \ link_length_list_dict[k][i]) for i in range(len(link_list_js))] netDict[k].add_weighted_edges_from(weighted_edges) pathDict[k] = nx.all_pairs_dijkstra_path(netDict[k]) od_route_dict = {} for od in od_pairs: origi = od[0] desti = od[1] key = OD_pair_label_dict_refined[str((origi, desti))] route = str(pathDict[k][origi][desti]).replace("[", "").replace(", ", "->").replace("]", "") od_route_dict[key] = route od_link_dict = {} for idx in range(len(od_route_dict)): od_link_list = [] od_node_list = od_route_dict[idx+1].split('->') for i in range(len(od_node_list)): if i < len(od_node_list) - 1: od_link_list.append(link_label_dict_[od_node_list[i] + '->' + od_node_list[i+1]]) od_link_dict[idx] = od_link_list od_link_dict_dict[k] = od_link_dict jacob = np.zeros((numODpairs, numLinks, numClasses)) for i in range(numODpairs): for j in range(numLinks): for k in range(numClasses): if j in od_link_dict_dict[k][i]: jacob[i, j, k] = 1 return jacob
def GetShortestPathMST(self,goal): ''' Find the shortest path Minimum-Spanning Tree ''' if self.g!= None: if self.UseNetworkX: import networkx as nx self.sp_mst = nx.all_pairs_dijkstra_path(self.g) self.dist = nx.all_pairs_dijkstra_path_length(self.g) else: from pygraph.algorithms.minmax import shortest_path self.sp_mst, self.dist = shortest_path(self.g,'(%d,%d)'%(goal[0],goal[1])) return self.sp_mst, self.dist else: print 'CreateExpRiskGraph first before calling GetShortestPathMST(goal)' return None, None
def reset_topology(self): if self.status.topo_type == 'tree': self.topology = self._create_topology(self.core, self.k, self.h) elif self.status.topo_type == 'rocket': self.topology = self._parse_rocketfuel_topology() self.clients = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'leaf'} self.pops = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] == 'root'} self.routers = {node: self.topology.node[node] for node in self.topology.node if self.topology.node[node]['type'] in ['root', 'intermediate']} self.shortest_path = self._symmetrify_paths(nx.all_pairs_dijkstra_path(self.topology)) self.neighbors2 = {node: self._neighbors_of_neighbors(node) for node in self.topology.node} self.place_caches()
def _all_paris_shortest_path(self): with open(OFP_ALL_PAIRS_SHOREST_PATH, 'w') as outp: try: shortest_path = nx.all_pairs_dijkstra_path(self.net) except Exception as e: self.logger.info("_all_paris_shortest_path %s", e) finally: for src in shortest_path.keys(): for dst in shortest_path[src]: # outp.write("%s->%s %s\n" % (self._hostname_Check(src), # self._hostname_Check(dst), # [self._hostname_Check(i) for i in shortest_path[src][dst]])) outp.write("%s->%s " % (self._hostname_Check(src), self._hostname_Check(dst))) for each_node in shortest_path[src][dst]: outp.write("%s-" % self._hostname_Check(each_node)) outp.write("\n")