def route_signal(sat_and_ids, route_from, route_to):
    G = networkx.Graph()

    for x, y in itertools.permutations(sat_and_ids, r=2):
        sat_x, sid_x = x
        sat_y, sid_y = y

        in_sight = is_in_sight(sat_x, sat_y)

        if in_sight is not None:
            G.add_edge(sid_x, sid_y, weight=in_sight)

    for sat, sid in sat_and_ids:
        in_sight = can_transmit(route_from, sat)

        if in_sight is not None:
            G.add_edge('SRC', sid, weight=in_sight)

    for sat, sid, in sat_and_ids:
        in_sight = can_transmit(route_to, sat)

        if in_sight is not None:
            G.add_edge('DST', sid, weight=in_sight)

    print networkx.shortest_path_length(G, 'SRC', 'DST', weight='weight')
    return networkx.shortest_path(G, 'SRC', 'DST', weight='weight')
コード例 #2
0
ファイル: closeness.py プロジェクト: nkfly/sna-hw3
def edge_update(graph, edge, cc, dd):
	u = edge[0]
	v = edge[1]
	change = 0
	if u not in dict:
		dict[u] = {}
		dict[u][v] = 1
		dict[u][u] = 0
	if v not in dict:
		dict[v] = {}
		dict[v][u] = 1
		dict[v][v] = 0
	for s in graph.nodes():
		if u not in dict[s]:
			try:
				dict[s][u] = nx.shortest_path_length(graph, s ,u)
				dict[u][s] = dict[s][u]
		if v not in dict[s]:
			try:
				dict[s][v] = nx.shortest_path_length(graph, s ,v)
				dict[v][s] = dict[s][v]
		if u in dict[s] and v in dict[s]:
			if math.fabs(dict[s][u] - dict[s][v]) > 1:
				dict[s] = {}
				dict[s] = nx.shortest_path_length(graph, s)
				change = change - cc[s]
				cc[s] = 0
				for n in dict[s]:
					cc[s] = cc[s] + 1.0 / dict[s][n]
				change = change + cc[s]
	return cc, dict
コード例 #3
0
ファイル: csr13.py プロジェクト: pstjuste/pt_analysis
def get_followers_dist(g, dg, follow):

    if follow == -1: return -1

    no_of_paths = 0
    for u in dg.nodes():

        if not g.has_node(u):
            print "no_source"
            continue

        for v in dg.neighbors(u):
            if u == v: continue

            if g.has_node(v):
                try:
                    print nx.shortest_path_length(g, source=u, target=v)
                    no_of_paths += 1
                except nx.exception.NetworkXNoPath as err:
                    print "no_path"
            else:
                print "no_target"

    print >> sys.stderr, "no of paths", no_of_paths
    return no_of_paths
コード例 #4
0
ファイル: sigcomm2012.py プロジェクト: pstjuste/pt_analysis
def get_followers_dist(g, dg, follow):

    if follow == -1: return -1

    if len(dg) == 0:
        dg = nx.read_edgelist(sys.argv[2], create_using=dg)

    no_of_paths = 0
    for u in dg.nodes():

        if not g.has_node(u):
            print "no_source"
            continue

        for v in dg.successors(u):
            if u == v: continue

            if g.has_node(v):
                try:
                    print nx.shortest_path_length(g, source=u, target=v)
                    no_of_paths += 1
                except nx.exception.NetworkXError as err:
                    print "no_path"
            else:
                print "no_target"

    print >> sys.stderr, "no of paths", no_of_paths
    return no_of_paths
コード例 #5
0
    def verify_path_len(self):
        """
        Make sure that the finger links are optimal in length.
        (That they are the shortest paths possible)
        """

        # Iterate over all nodes:
        for nd in self.nodes:
            for f in self.dht_succ_fingers:
                best_succ_f = nd.get_best_succ_finger(f)
                # Calculate shortest path on graph:
                spath_len = nx.shortest_path_length(self.graph,\
                        self.vec_to_graph[nd.ind],\
                        self.vec_to_graph[best_succ_f.lindex])

                # Check if the path we have to best_succ_f equals exactly
                # spath_len:
                if best_succ_f.path_len != spath_len:
                    return False

            for f in self.dht_pred_fingers:
                best_pred_f = nd.get_best_pred_finger(f)
                # Calculate shortest path on graph:
                spath_len = nx.shortest_path_length(self.graph,\
                        self.vec_to_graph[nd.ind],\
                        self.vec_to_graph[best_pred_f.lindex])

                # Check if the path we have to best_pred_f equals exactly
                # spath_len:
                if best_pred_f.path_len != spath_len:
                    return False

        return True
コード例 #6
0
ファイル: calc_patrol_line.py プロジェクト: hxgqh/jailmonitor
    def calc_2_lines_point_shortest_path(self, start_line, end_line):
        p11, p12 = self.line_dict[start_line]
        p21, p22 = self.line_dict[end_line]

        d1 = nx.shortest_path_length(self.G, source=p11, target=p21)
        d2 = nx.shortest_path_length(self.G, source=p11, target=p22)
        d3 = nx.shortest_path_length(self.G, source=p12, target=p21)
        d4 = nx.shortest_path_length(self.G, source=p12, target=p22)

        min_d = min([d1, d2, d3, d4])
        if d1 == min_d:
            return p11, p21
            pass

        if d2 == min_d:
            return p11, p22
            pass

        if d3 == min_d:
            return p12, p21
            pass

        if d4 == min_d:
            return p12, p22
            pass
        pass
コード例 #7
0
ファイル: playerx.py プロジェクト: charlesyuan314/awap-2015
 def heuristic(n):
     t = (graph.degree(n), min([nx.shortest_path_length(graph, n, s) for s in self.stations]))
     # geometric mean of these two
     # closest to orders
     return (
         -sum([nx.shortest_path_length(graph, n, order_node) for order_node in order_nodes]),
         (t[0] * t[1]) ** (1 / 2),
     )
コード例 #8
0
ファイル: spawn_camp.py プロジェクト: wildertm/jytwai
def get_chokes(instance, choke_candidates):
    #prevent writing over base space.
    used_set = set()
    start, finish = instance.level.botSpawnAreas[instance.game.enemyTeam.name]
    for i, j in itertools.product(range(int(start.x), int(finish.x)), range(int(start.y), int(finish.y))):
        node_index = regressions2.get_node_index(instance, Vector2(i,j))
        used_set.add(node_index)
            
    choke_dict = {}
    master_chokes = set()
    flag_node = regressions2.get_node_index(instance, instance.game.team.flag.position)
    spawn_node = regressions2.get_node_index(instance, get_enemy_base(instance))

    shortest_length = nx.shortest_path_length(instance.graph, source=spawn_node, target=flag_node, weight="choke_covered")
    choke_count = 0
    while shortest_length == 0.0:
        if len(choke_candidates) == 0.0:
            print "RAN OUT OF CANDIDATES!"
            break
        
        choke_count += 1
        
        one_choke = set()
        choke_center = choke_candidates.pop()
        choke_vector = regressions2.get_node_vector(instance, choke_center)
        
        #Ignore potential chokes too far from their spawn.
        while (choke_vector.distance((get_enemy_base(instance))) > 5.0 or choke_center in used_set) and len(choke_candidates) > 0:
            choke_vector = regressions2.get_node_vector(instance, choke_center)
            choke_center = choke_candidates.pop()
        if len(choke_candidates) == 0:
            print "RAN OUT OF CANDIDATES!"
            return choke_dict, master_chokes
        if choke_vector.distance((get_enemy_base(instance))) > 5.0:
            print "RAN OUT OF CANDIDATES, LAST CANDIDATE DIDN'T WORK!"
            return choke_dict, master_chokes
    
        one_choke.add(choke_center)
        for x in range(4):
            neighbors = set()
            for node in one_choke:
                neighbors2 = instance.graph.neighbors(node)
                if neighbors2 is not None:
                    for neighbor2 in neighbors2:
                        if neighbor2 not in used_set:
                            neighbors.add(neighbor2)
            one_choke = one_choke.union(neighbors)
            used_set = used_set.union(one_choke)
        for node in one_choke:
            instance.graph.node[node]["choke_covered"] = 1.0
            neighbors = instance.graph.neighbors(node)
            for neighbor in neighbors:
                instance.graph.edge[node][neighbor]["choke_covered"] = 1.0
        choke_dict[choke_center] = { "nodes": one_choke, "redundancy": 0}
        master_chokes = master_chokes.union(one_choke)
        shortest_length = nx.shortest_path_length(instance.graph, source=spawn_node, target=flag_node, weight="choke_covered")
        
    return choke_dict, master_chokes
コード例 #9
0
def closeness_vitality(grafo,vertice, aresta=False):
    """Calcula closeness vitality do vértice no grafo

    Retorna a closeness vitality para o vértice ou aresta especificado
    no grafo especificado.

    Parâmetros
    ----------
    grafo: grafo networkx
        Grafo no qual se quer calcular closeness vitality.
    vertice: identificador
        Identificador do vértice para o qual se quer a medida.
    aresta: tupla
        Identificador dos vértices que formam a aresta para o qual se quer a medida de centralidade.
    """

    g_foo=nx.copy.deepcopy(grafo)
    dists=nx.shortest_path_length(g_foo, weighted=True)
    vertices=g_foo.nodes()
    pares=[]
    soma_dists1=0
    for v1 in vertices:
        for v2 in vertices:
            if set((v1,v2)) not in pares:
                soma_dists1+=dists[v1][v2]
                pares.append(set((v1,v2)))
    Iwg=soma_dists1

    if aresta:
        g_foo.remove_edge(aresta[0],aresta[1])
        dists=nx.shortest_path_length(g_foo, weighted=True)
        vertices=g_foo.nodes()
        pares=[]
        soma_dists2=0
        for v1 in vertices:
            for v2 in vertices:
                if set((v1,v2)) not in pares:
                    soma_dists2+=dists[v1][v2]
                    pares.append(set((v1,v2)))
        Iwg2=soma_dists2
        return Iwg-Iwg2

    g_foo.remove_node(vertice)

    dists=nx.shortest_path_length(g_foo, weighted=True)
    vertices=g_foo.nodes()
    pares=[]
    soma_dists2=0
    for v1 in vertices:
        for v2 in vertices:
            if set((v1,v2)) not in pares:
                soma_dists2+=dists[v1][v2]
                pares.append(set((v1,v2)))
    Iwg2=soma_dists2

    return Iwg-Iwg2
コード例 #10
0
def _min_cycle(G, orth, weight=None):
    """
    Computes the minimum weight cycle in G, orthogonal to the vector orth
    as per [p. 338, 1]
    """
    T = nx.Graph()

    nodes_idx = {node: idx for idx, node in enumerate(G.nodes())}
    idx_nodes = {idx: node for node, idx in nodes_idx.items()}

    nnodes = len(nodes_idx)

    # Add 2 copies of each edge in G to T. If edge is in orth, add cross edge;
    # otherwise in-plane edge
    if weight is not None:
        for u, v in G.edges():
            uidx, vidx = nodes_idx[u], nodes_idx[v]
            edge_w = G[u][v][weight]
            if frozenset((u, v)) in orth:
                T.add_edges_from(
                    [(uidx, nnodes + vidx), (nnodes + uidx, vidx)], weight=edge_w)

            else:
                T.add_edges_from(
                    [(uidx, vidx), (nnodes + uidx, nnodes + vidx)], weight=edge_w)

        all_shortest_pathlens = nx.shortest_path_length(T, weight='weight')

    else:
        for u, v in G.edges():
            uidx, vidx = nodes_idx[u], nodes_idx[v]
            if frozenset((u, v)) in orth:
                T.add_edges_from(
                    [(uidx, nnodes + vidx), (nnodes + uidx, vidx)])

            else:
                T.add_edges_from(
                    [(uidx, vidx), (nnodes + uidx, nnodes + vidx)])

        all_shortest_pathlens = nx.shortest_path_length(T)

    cross_paths_w_lens = {
        n: all_shortest_pathlens[n][nnodes + n] for n in range(nnodes)}

    # Now compute shortest paths in T, which translates to cyles in G
    min_path_startpoint = min(cross_paths_w_lens, key=cross_paths_w_lens.get)
    min_path = nx.shortest_path(
        T, source=min_path_startpoint, target=nnodes + min_path_startpoint, weight='weight')

    # Now we obtain the actual path, re-map nodes in T to those in G
    min_path_nodes = [
        node if node < nnodes else node - nnodes for node in min_path]
    # Now remove the edges that occur two times
    mcycle_pruned = _path_to_cycle(min_path_nodes)

    return {frozenset((idx_nodes[u], idx_nodes[v])) for u, v in mcycle_pruned}
コード例 #11
0
ファイル: mwpm.py プロジェクト: jacobmarks/QTop
def AssociatedExternal(node, Dual, External):
    associate = External.iterkeys().next()
    min_dist = nx.shortest_path_length(Dual, node, External[associate]['measure']) + 1

    for candidate in External:
        distance = nx.shortest_path_length(Dual, node, External[candidate]['measure']) + 1
        if distance < min_dist:
            min_dist = distance
            associate = candidate
    return associate
コード例 #12
0
ファイル: p2e2.py プロジェクト: ryanefoley/repo1
def get_sigma(G, s, v, predecessors):
  if s == v:
    return 1

  l = nx.shortest_path_length(G,s,v)
  sigma_v = 0
  for u in G.neighbors(v):
    if nx.shortest_path_length(G,s,u) < l:
      predecessors.add(u)
      sigma_v += get_sigma(G, s, u, predecessors)
  return sigma_v
コード例 #13
0
ファイル: run.py プロジェクト: vaishakbelle/APPROXWMI
def extract_shortestpath_subgraph(g, nodes):
    newg = nx.DiGraph()
    for beg in nodes:
        for end in nodes:
            if beg != end:
                newg.add_edge(beg,
                              end,
                              duration=nx.shortest_path_length(g,source=beg,target=end,weight='duration'),
                              min_duration=nx.shortest_path_length(g,source=beg,target=end,weight='min_duration'),
                              max_duration=nx.shortest_path_length(g,source=beg,target=end,weight='max_duration'))
    return newg
コード例 #14
0
def random_pairs_dist(data1, data, el):
    """
    Chooses random pairs of branches in the skeleton and computes the distances
    between them.
    
    Parameters
    ------------
    data1 : list of pathes for the break-ups dictionaries. Used here for the 
            estimation of the necessary number of pairs.
    data : list of pathes for the graphs
    el : list of length scales
    
    Return
    -------
    d_ran : list of distances along the skeleton between random branches.
    """
    d_ran = list()
    u=0
    for b, g in zip(data1, data):
        br = np.load(b).item().keys()
        gr = nx.read_gpickle(g)
        edg = list(nx.edges(gr))
        number = nx.get_edge_attributes(gr, 'number')
        num_dict = {}
        for k in number:
            for v in number[k]:
                num_dict.setdefault(v, []).append(k)
        a = len(list(br))
        if a > 1500: #We cannot use here infinitly many pairs because of the computing time.
            a = 1500
        for j in range(a):
            e1 = random.choice(edg)
            e2 = random.choice(edg)
            if (e1==e2):
                continue
            n1 = e1[0]
            n2 = e1[1]
            m1 = e2[0]
            m2 = e2[1]
            try: 
                p_min = p_max = nx.shortest_path_length(gr, n1, m1, 'length')
            except nx.NetworkXNoPath:
                continue    
            for i in itertools.product((n1,n2), (m1,m2)):
                p = nx.shortest_path_length(gr, i[0], i[1], 'length')
                if p < p_min:
                    p_min = p
                if p > p_max:
                    p_max = p
            d = (p_min+p_max)/(2. * el[u])
            d_ran.append(d)
        u+=1
    return d_ran
コード例 #15
0
def nxShortestPath(nxGraph, nxPos, startPt, endPt, Dijk=0):
    if Dijk == 0:
        nxList = nx.shortest_path(nxGraph, source=startPt, target=endPt)
        score = nx.shortest_path_length(nxGraph, source=startPt, target=endPt)
        dist = nx.shortest_path_length(nxGraph, source=startPt, target=endPt, weight="distance")
    elif Dijk == 1:
        nxList = nx.dijkstra_path(nxGraph, source=startPt, target=endPt, weight="weight")
        score = nx.dijkstra_path_length(nxGraph, source=startPt, target=endPt, weight="weight")
        dist = nx.dijkstra_path_length(nxGraph, source=startPt, target=endPt, weight="distance")

    nxH = nx.subgraph(nxGraph, nxList)
    return nxList, nxH, score, dist
コード例 #16
0
ファイル: test_generic.py プロジェクト: c0ns0le/zenoss-4
 def test_single_source_shortest_path_length(self):
     l=nx.shortest_path_length(self.cycle,0)
     assert_equal(l,{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
     assert_equal(l,nx.single_source_shortest_path_length(self.cycle,0))
     l=nx.shortest_path_length(self.grid,1)
     assert_equal(l[16],6)
     # now with weights
     l=nx.shortest_path_length(self.cycle,0,weighted=True)
     assert_equal(l,{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
     assert_equal(l,nx.single_source_dijkstra_path_length(self.cycle,0))
     l=nx.shortest_path_length(self.grid,1,weighted=True)
     assert_equal(l[16],6)
コード例 #17
0
ファイル: test_generic.py プロジェクト: c0ns0le/zenoss-4
 def test_all_pairs_shortest_path_length(self):
     l=nx.shortest_path_length(self.cycle)
     assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
     assert_equal(l,nx.all_pairs_shortest_path_length(self.cycle))
     l=nx.shortest_path_length(self.grid)
     assert_equal(l[1][16],6)
     # now with weights
     l=nx.shortest_path_length(self.cycle,weighted=True)
     assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1})
     assert_equal(l,nx.all_pairs_dijkstra_path_length(self.cycle))
     l=nx.shortest_path_length(self.grid,weighted=True)
     assert_equal(l[1][16],6)
コード例 #18
0
ファイル: TMI.py プロジェクト: udemirezen/MotifRetrieval
def normalize_step_weight(graph):
    "Changes the edge weights in the graph proportional to the longest path."
    longest_path_len = max(nx.shortest_path_length(graph, "ROOT").values())
    # add normalized path length as weight to edges.
    for category in "ABCEDFGHJKLMNPQRSTUVWXZ":
        # for each category, find out how long the longest path is.
        cat_longest_path_len = max(nx.shortest_path_length(graph, category).values()) + 1
        # normalize the stepsize
        stepsize = float(longest_path_len) / cat_longest_path_len
        # traverse tree for this category and assign stepsize to edges as weight attribute
        for a, b in nx.dfs_edges(graph, category):
            graph[a][b]["weight"] = stepsize
コード例 #19
0
ファイル: main.py プロジェクト: KPLauritzen/Project-Euler
def main():
    for i in range(size):
        for j in range(size):
            G.add_edge((i,j), (i+1,j), weight=mat[i,j])
            G.add_edge((i,j), (i,j+1), weight=mat[i,j])


    for i in range(size):
        G.remove_node((i,size))
        G.remove_node((size,i))
    
    print nx.shortest_path_length(G, (0,0), (size-1,size-1), weight='weight')
コード例 #20
0
ファイル: common.py プロジェクト: jacobmarks/QTop
 def distance(self, type, node1, node2):
     if node1 in self.Dual[type].nodes() and node2 in self.Dual[type].nodes():
         return nx.shortest_path_length(self.Dual[type], node1, node2)
     elif node1 in self.Dual[type].nodes() and node2 not in self.Dual[type].nodes():
         node2 = self.External[type][node2]['measure']
         return nx.shortest_path_length(self.Dual[type], node1, node2) + 1
     elif node1 not in self.Dual[type].nodes() and node2 in self.Dual[type].nodes():
         node1 = self.External[type][node1]['measure']
         return nx.shortest_path_length(self.Dual[type], node1, node2) + 1
     else:
         node1 = self.External[type][node1]['measure']
         node2 = self.External[type][node2]['measure']
         return nx.shortest_path_length(self.Dual[type], node1, node2) + 2
コード例 #21
0
ファイル: util.py プロジェクト: MamadouDoumbia/pyannote
def complete_mpg(g):

    G = propagate_constraints(g)

    log = log_mpg(G)
    complete = nx.Graph()

    # all track nodes of interest
    # ie speaker/head node not subtrack
    tnodes = [(n,d) for n,d in G.nodes_iter(data=True) \
                    if isinstance(n, TrackNode) \
                    and n.modality in ['speaker', 'head'] \
                    and not d.get(SUBTRACK, False)]

    # all identity nodes
    inodes = [(n,d) for n,d in G.nodes_iter(data=True) \
                    if isinstance(n, IdentityNode)]

    # tnode/tnode shortest path (with forbidden identity nodes)
    _log = nx.Graph(log)
    _log.remove_nodes_from(zip(*inodes)[0])
    _shortest = nx.shortest_path_length(_log, weight=PROBABILITY)
    for i, (n, d) in enumerate(tnodes):
        complete.add_node(n, **d)
        for N, D in tnodes[i+1:]:
            if G.has_edge(n, N):
                data = dict(G[n][N])
            else:
                data = {PROBABILITY: np.exp(-_shortest[n][N])}
            complete.add_edge(n, N, **data)

    # inode/tnodes shortest path (with forbidden other identity nodes)
    for i, (n, d) in enumerate(inodes):
        complete.add_node(n, **d)
        _log = nx.Graph(log)
        _log.remove_nodes_from([m for j,(m,_) in enumerate(inodes) if j != i])
        _shortest = nx.shortest_path_length(_log, source=n, weight=PROBABILITY)
        for N, D in tnodes:
            if G.has_edge(n, N):
                data = dict(G[n][N])
            else:
                data = {PROBABILITY: np.exp(-_shortest[N])}
            complete.add_edge(n, N, **data)

    # inode/inode constraint
    for i, (n, d) in enumerate(inodes):
        for m,_ in inodes[i+1:]:
            G.add_edge(n, m, **{PROBABILITY: 0})

    return complete
コード例 #22
0
ファイル: test_generic.py プロジェクト: jianantian/networkx
 def test_shortest_path_length(self):
     assert_equal(nx.shortest_path_length(self.cycle, 0, 3), 3)
     assert_equal(nx.shortest_path_length(self.grid, 1, 12), 5)
     assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4), 4)
     # now with weights
     assert_equal(nx.shortest_path_length(self.cycle, 0, 3,
                                          weight='weight'),
                  3)
     assert_equal(nx.shortest_path_length(self.grid, 1, 12,
                                          weight='weight'),
                  5)
     assert_equal(nx.shortest_path_length(self.directed_cycle, 0, 4,
                                          weight='weight'),
                  4)
     # weights and method specified
     assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight='weight',
                                          method='dijkstra'),
                  3)
     assert_equal(nx.shortest_path_length(self.cycle, 0, 3, weight='weight',
                                          method='bellman-ford'),
                  3)
     # confirm bad method rejection
     assert_raises(ValueError,
                   nx.shortest_path_length,
                   self.cycle,
                   method='SPAM')
     # confirm absent source rejection
     assert_raises(nx.NodeNotFound, nx.shortest_path_length, self.cycle, 8)
コード例 #23
0
ファイル: path_graph.py プロジェクト: balajipandian/taco
def get_unreachable_kmers(K, source=None, sink=None):
    '''
    Path graphs created with k > 2 can yield fragmented paths. Test for
    these by finding unreachable kmers from source or sink
    '''
    if source is None:
        source = SOURCE
    if sink is None:
        sink = SINK
    allnodes = set(K)
    # unreachable from source
    a = allnodes - set(nx.shortest_path_length(K, source=source).keys())
    # unreachable from sink
    b = allnodes - set(nx.shortest_path_length(K, target=sink).keys())
    return a | b
コード例 #24
0
def maketree(g,T,ww,sortedgel):
	for e in sortedgel:
		if len(T.edges())<(n-1):
                        try: 
                             nx.shortest_path_length(T,*e)
                        except:

                             T.add_edge(*e)
			    
			     
		else:
                      	break

	if(len(T.edges())!=n-1):
           print "Oh no of the no no !!!"
コード例 #25
0
ファイル: Evaluation.py プロジェクト: hfsun/DBCP
def EndtoEndComm(graph,source,target,switch2controller):
    path = nx.shortest_path(graph,source,target)
    
    #domain_controller = switch2controller[graph.nodes().index(source)]
    domain_controller = switch2controller[graph.nodes().index(source)]
    l_s2c = nx.shortest_path_length(graph,source,domain_controller)#calculate the latency of the package-in package delivery, which is sent from source switch to controller.
    l_c2s = 0
    #calculate the worst-case latency of the combinition of 2 parts:controller to controller latency and controller to switch latency.
    for node in path:
        #calculate the latency of inter-controller communication
        c2c = nx.shortest_path_length(graph,domain_controller,switch2controller[graph.nodes().index(node)])
        #calculate the latency of the rule delivery, which is sent from controller to the switch.
        c2s = nx.shortest_path_length(graph,switch2controller[graph.nodes().index(node)],node)
        l_c2s = c2c+c2s if l_c2s < c2c+c2s else l_c2s
    return l_s2c+l_c2s    
コード例 #26
0
ファイル: test_generic.py プロジェクト: jianantian/networkx
 def test_shortest_path_length_target(self):
     answer = {0: 1, 1: 0, 2: 1}
     sp = dict(nx.shortest_path_length(nx.path_graph(3), target=1))
     assert_equal(sp, answer)
     # with weights
     sp = nx.shortest_path_length(nx.path_graph(3), target=1,
                                  weight='weight')
     assert_equal(sp, answer)
     # weights and method specified
     sp = nx.shortest_path_length(nx.path_graph(3), target=1,
                                  weight='weight', method='dijkstra')
     assert_equal(sp, answer)
     sp = nx.shortest_path_length(nx.path_graph(3), target=1,
                                  weight='weight', method='bellman-ford')
     assert_equal(sp, answer)
コード例 #27
0
ファイル: molecule_class.py プロジェクト: smerz1989/np-mc
def getDihedralsFromAtoms(atomlist,bondlist,dihedrals):
	"""Given a list of Atoms find all the Dihedral objects in dihedrals which contains these atoms.

	Parameters
	----------
	atomlist : Atom List
		A list of Atoms used to search through the Dihedral List.
	bondlist : Bond List
		A List of Bond objects used to get connectivity of atoms.  The connectivity is used to find the possible dihedral combinations of the Atoms in atoms.
	angles : Angle List
		A List of Angles which is searched through.

	Returns
	-------
	anglelist : Angle List
		A list of Angles that are associated with the Atoms in atoms.
	"""
	mol_graph = molecule2graph(atomlist,bondlist)
	dihedral_combos=[]
	#Get possible angles by getting subgraphs with only 2 steps
	for atom1,atom2 in permutations(mol_graph.__iter__(),r=2):
		if(ntwkx.shortest_path_length(mol_graph,source=atom1,target=atom2)==3):
				dihedral_combos.append(ntwkx.shortest_path(mol_graph,source=atom1,target=atom2))	  
	dihedral_list=[]
	for dihedral in dihedrals:
		if([dihedral.atom1,dihedral.atom2,dihedral.atom3,dihedral.atom4] in dihedral_combos):
			dihedral_list.append(dihedral)
	return dihedral_list
コード例 #28
0
def is_destination_reachable_from_source(noc_rg, source_node, destination_node):
    """
    checks if destination is reachable from the local port of the source node
    the search starts from the local port
    :param noc_rg: NoC routing graph
    :param source_node: source node id
    :param destination_node: destination node id
    :return: True if there is a path else, False
    """
    # the Source port should be input port since this is input of router
    # (which will be connected to PE's output port)
    source = str(source_node)+str('L')+str('I')
    # the destination port should be output port since this is output of router to PE
    # (which will be connected to PE's input port)
    destination = str(destination_node)+str('L')+str('O')
    if has_path(noc_rg, source, destination):
        if Config.RotingType == 'MinimalPath':
            path_length = shortest_path_length(noc_rg, source, destination)
            minimal_hop_count = manhattan_distance(source_node, destination_node)
            if (path_length/2) == minimal_hop_count:
                return True
        else:
            return True
    else:
        return False
コード例 #29
0
ファイル: explorer_scene.py プロジェクト: pierreloicq/sakia
    def twopi_layout(nx_graph, center=None):
        """
        Render the twopi layout. Ported from C code available at
        https://github.com/ellson/graphviz/blob/master/lib/twopigen/circle.c

        :param networkx.MultiDiGraph nx_graph: the networkx graph
        :param str center: the centered node
        :return:
        """
        if len(nx_graph.nodes()) == 0:
            return {}

        if len(nx_graph.nodes()) == 1:
            return {nx_graph.nodes()[0]: (0, 0)}
        #nx_graph = nx_graph.to_undirected()

        data = ExplorerScene._init_layout(nx_graph)
        if not center:
            center = networkx.center(nx_graph)[0]
        ExplorerScene._set_parent_nodes(nx_graph, data, center)
        ExplorerScene._set_subtree_size(nx_graph, data)
        data[center]['span'] = 2 * math.pi
        ExplorerScene._set_subtree_spans(nx_graph, data, center)
        data[center]['theta'] = 0.0
        ExplorerScene._set_positions(nx_graph, data, center)

        distances = networkx.shortest_path_length(nx_graph.to_undirected(), center)
        nx_pos = {}
        for node in nx_graph.nodes():
            hyp = distances[node] + 1
            theta = data[node]['theta']
            nx_pos[node] = (hyp * math.cos(theta) * 100, hyp * math.sin(theta) * 100)
        return nx_pos
コード例 #30
0
def participation_coefficient(graph, partition):
    '''
    Computes the participation coefficient for each node.

    ------
    Inputs
    ------
    graph = networkx graph
    partition = modularity partition of graph

    ------
    Output
    ------
    List of the participation coefficient for each node.

    '''
    
    pc_dict = {}
    all_nodes = set(graph.nodes())
    paths = nx.shortest_path_length(G=graph)
    for m in partition.keys():
        mod_list = set(partition[m])
        between_mod_list = list(set.difference(all_nodes, mod_list))
        for source in mod_list:
            degree = float(nx.degree(G=graph, nbunch=source))
            count = 0
            for target in between_mod_list:
                if paths[source][target] == 1:
                    count += 1
            bm_degree = count
            pc = 1 - (bm_degree / degree)**2
            pc_dict[source] = pc
    return pc_dict
コード例 #31
0
ファイル: LausNetw.py プロジェクト: yyan162/DemEstMeth
    def GdL_create_network(self, estim_param):
        '''
        Creates the graph of Gare de Lausanne
        '''
        self.G = nx.MultiDiGraph()
        ''' ---------------------------------------------------
                            Defines the vertices
        ----------------------------------------------------'''

        # Dictionary used to plot the graph. Includes all the nodes of the network.
        self.positions = {
            'NW': (-0.7, 20),
            'NWM': (1, 20),
            'NE': (7, 20),
            'NEM': (9, 20),
            'SW': (0, -3),
            'SE': (8, -3),
            'N': (3, 20),
            '1D': (-2.5, 14),
            '1C': (3, 14),
            'BAR': (-0.8, 16.1),
            'nww': (-1.4, 18),
            'KIOSK': (0.9, 12),
            'kioskh': (0, 12),
            'SHOP': (-1.5, -3),
            '1h1': (-2, 14),
            '1h2': (-1.7, 14),
            '1d': (-0.9, 14),
            'nwh': (0, 17),
            '1wh': (0, 16),
            '1w': (0, 14),
            '1wc': (0.9, 14),
            '1c': (1.4, 14),
            '1h3': (1.8, 14),
            '1h4': (2.4, 14),
            'nh': (3, 19),
            'h': (3, 18),
            '34B': (5.5, 9),
            '34C': (3, 9),
            '34A': (10.5, 9),
            '34D': (-2.5, 9),
            '56B': (5.5, 6),
            '56C': (3, 6),
            '56D': (-2.5, 6),
            '56A': (10.5, 6),
            '78B': (5.5, 3),
            '78C': (3, 3),
            '78A': (10.5, 3),
            '78D': (-2.5, 3),
            '9C': (3, 0),
            '9D': (-2.5, 0),
            'nw': (-0.7, 18),
            'sw': (0, -1.5),
            'nwm': (1, 18),
            'ne': (7, 16),
            'neh1': (7, 19),
            'neh2': (7, 18),
            'nem': (9, 16),
            'se': (8, -1.5),
            '1eh': (8, 15),
            '1e': (8, 14),
            '1AB': (10.5, 14),
            '70FE': (10.5, 16),
            '70fe': (9.5, 16),
            '1ab': (9.5, 14),
            '34d': (-0.9, 9),
            '34c': (0.9, 9),
            '34b': (7.1, 9),
            '34a': (8.9, 9),
            '56d': (-0.9, 6),
            '56c': (0.9, 6),
            '56b': (7.1, 6),
            '56a': (8.9, 6),
            '78d': (-0.9, 3),
            '78c': (0.9, 3),
            '78b': (7.1, 3),
            '78a': (8.9, 3),
            '9d': (-0.9, 0),
            '9c': (0.9, 0),
            '9h1': (-0.6, 0),
            '9h2': (-0.3, 0),
            '9h3': (0.3, 0),
            '9h4': (0, 1),
            '34w': (0, 9),
            '56w': (0, 6),
            '78w': (0, 3),
            '9w': (0, 0),
            '34e': (8, 9),
            '56e': (8, 6),
            '56h': (8, 4),
            '78e': (8, 3)
        }

        self.node_labels = {
            'NW': 'NW',
            'NWM': 'NWM',
            'NE': 'NE',
            'NEM': 'NEM',
            'SW': 'SW',
            'SE': 'SE',
            'N': 'N',
            '1D': '1D',
            '1C': '1C',
            'BAR': 'BAR',
            'nww': 'nww',
            'KIOSK': 'KIOSK',
            'kioskh': 'kiokh',
            'SHOP': 'SHOP',
            '1h1': '1h1',
            '1h2': '1h2',
            '1d': '1d',
            'nwh': 'nwh',
            '1wh': '1wh',
            '1w': '1w',
            '1wc': '1wc',
            '1c': '1c',
            '1h3': '1h3',
            '1h4': '1h4',
            'nh': 'nh',
            'h': 'h',
            '34B': '34B',
            '34C': '34C',
            '34A': '34A',
            '34D': '34D',
            '56B': '56B',
            '56C': '56C',
            '56D': '56D',
            '56A': '56A',
            '78B': '78B',
            '78C': '78C',
            '78A': '78A',
            '78D': '78D',
            '9C': '9C',
            '9D': '9D',
            'nw': 'nw',
            'sw': 'sw',
            'nwm': 'nwm',
            'ne': 'ne',
            'neh1': 'neh1',
            'neh2': 'neh2',
            'nem': 'nem',
            'se': 'se',
            '1eh': '1eh',
            '1e': '1e',
            '1AB': '1AB',
            '70FE': '70FE',
            '70fe': '70fe',
            '1ab': '1ab',
            '34d': '34d',
            '34c': '34c',
            '34b': '34b',
            '34a': '34a',
            '56d': '56d',
            '56c': '56c',
            '56b': '56b',
            '56a': '56a',
            '78d': '78d',
            '78c': '78c',
            '78b': '78b',
            '78a': '78a',
            '9d': '9d',
            '9c': '9c',
            '9h1': '9h1',
            '9h2': '9h2',
            '9h3': '9h3',
            '9h4': '9h4',
            '34w': '34w',
            '56w': '56w',
            '78w': '78w',
            '9w': '9w',
            '34e': '34e',
            '56e': '56e',
            '56h': '56h',
            '78e': '78e'
        }
        self.centroids_labels = {
            'NW': 'NW',
            'NWM': 'NWM',
            'NE': 'NE',
            'NEM': 'NEM',
            'SW': 'SW',
            'SE': 'SE',
            'N': 'N',
            '1D': '1D',
            '1C': '1C',
            'BAR': 'BAR',
            'KIOSK': 'KIOSK',
            'SHOP': 'SHOP',
            '34B': '34B',
            '34C': '34C',
            '34A': '34A',
            '34D': '34D',
            '56B': '56B',
            '56C': '56C',
            '56D': '56D',
            '56A': '56A',
            '78B': '78B',
            '78C': '78C',
            '78A': '78A',
            '78D': '78D',
            '9C': '9C',
            '9D': '9D',
            '1AB': '1AB',
            '70FE': '70FE'
        }
        self.not_centroids_labels = {
            'nww': 'nww',
            'kioskh': 'kiokh',
            '1h1': '1h1',
            '1h2': '1h2',
            '1d': '1d',
            'nwh': 'nwh',
            '1wh': '1wh',
            '1w': '1w',
            '1wc': '1wc',
            '1c': '1c',
            '1h3': '1h3',
            '1h4': '1h4',
            'nh': 'nh',
            'h': 'h',
            'nw': 'nw',
            'sw': 'sw',
            'nwm': 'nwm',
            'ne': 'ne',
            'neh1': 'neh1',
            'neh2': 'neh2',
            'nem': 'nem',
            'se': 'se',
            '1eh': '1eh',
            '1e': '1e',
            '70fe': '70fe',
            '1ab': '1ab',
            '34d': '34d',
            '34c': '34c',
            '34b': '34b',
            '34a': '34a',
            '56d': '56d',
            '56c': '56c',
            '56b': '56b',
            '56a': '56a',
            '78d': '78d',
            '78c': '78c',
            '78b': '78b',
            '78a': '78a',
            '9d': '9d',
            '9c': '9c',
            '9h1': '9h1',
            '9h2': '9h2',
            '9h3': '9h3',
            '9h4': '9h4',
            '34w': '34w',
            '56w': '56w',
            '78w': '78w',
            '9w': '9w',
            '34e': '34e',
            '56e': '56e',
            '56h': '56h',
            '78e': '78e'
        }

        #We define the nodes, classifying th1em as centroids (platform and not platform), ASE_measurement nodes, VS_measurement_nodes (West and East PU) and other nodes.
        self.centroids = [
            'NW', 'NWM', 'NE', 'NEM', 'SW', 'SE', 'N', 'KIOSK', 'BAR', 'SHOP',
            '1D', '1C', '70FE', '1AB', '34D', '34C', '34B', '34A', '56D',
            '56C', '56B', '56A', '78D', '78C', '78B', '78A', '9D', '9C'
        ]
        Centroids_Platform = [
            '1D', '1C', '70FE', '1AB', '34D', '34C', '34B', '34A', '56D',
            '56C', '56B', '56A', '78D', '78C', '78B', '78A', '9D', '9C'
        ]
        Centroids_No_Platform = [
            'NW', 'NWM', 'NE', 'NEM', 'SW', 'SE', 'N', 'KIOSK', 'BAR', 'SHOP'
        ]
        Centroids_Entrance_Exit = ['NW', 'NWM', 'NE', 'NEM', 'SW', 'SE', 'N']
        Centroids_Shop = ['KIOSK', 'BAR', 'SHOP']

        Platforms = {'1', '3/4', '5/6', '7/8', '9', '70'}

        Centroid_Types_Detail = ['platform', 'entrance', 'shop']
        Centroid_Types_RCh = ['platform', 'non-platform']

        self.ASE_measurement_nodes = [
            'nw', 'nww', 'nwm', '1c', 'nh', 'neh1', 'ne', 'nem', 'sw', '9h2',
            '9h3', 'se'
        ]
        self.VS_measurement_nodes = [
            '1h1', '1h4', '70fe', '1ab', '34d', '34c', '34b', '34a', '56d',
            '56c', '56b', '56a', '78d', '78c', '78b', '78a', '9d', '9c'
        ]  # TINF nodes
        self.other_nodes = [
            'h', '1wc', 'kioskh', '1h2', '1d', 'nwh', '1wh', '1w', '1h3',
            'neh2', '1eh', '1e', '9h1', '9h4', '34w', '56w', '78w', '9w',
            '34e', '56e', '56h', '78e'
        ]
        VS_west_nodes = [
            '1wh', '1d', '1wc', '34d', '34c', '56d', '56c', '78d', '78c', '9h4'
        ]
        VS_east_nodes = ['1e', '34b', '34a', '56b', '56a', '56h']
        VS_nodes = VS_west_nodes + VS_east_nodes

        #We create dictionaries for the centroids
        self.centroids_dict = {}
        self.centroids_dict_rev = {}
        for idx, node in enumerate(self.centroids):
            self.centroids_dict[idx] = node
            self.centroids_dict_rev[node] = idx

        self.centroids_p_dict = {}
        for idx, node in enumerate(Centroids_Platform):
            self.centroids_p_dict[idx] = node

        self.centroids_np_dict = {}
        for idx, node in enumerate(Centroids_No_Platform):
            self.centroids_np_dict[idx] = node

        self.centroids_ee_dict = {}
        for idx, node in enumerate(Centroids_Entrance_Exit):
            self.centroids_ee_dict[idx] = node

        self.shops_dict = {}
        for idx, node in enumerate(Centroids_Shop):
            self.shops_dict[idx] = node

        self.centroid_types_det_dict = {}
        self.centroid_types_det_dict_rev = {}
        for idx, node in enumerate(Centroid_Types_Detail):
            self.centroid_types_det_dict[idx] = node
            self.centroid_types_det_dict_rev[node] = idx

        self.centroid_types_RCh_dict = {}
        self.centroid_types_RCh_dict_rev = {}
        for idx, node in enumerate(Centroid_Types_RCh):
            self.centroid_types_RCh_dict[idx] = node
            self.centroid_types_RCh_dict_rev[node] = idx

        self.platforms_dict = {}
        self.platforms_dict_rev = {}
        for idx, node in enumerate(Platforms):
            self.platforms_dict[idx] = node
            self.platforms_dict_rev[node] = idx
        ''' ---------------------------------------------------
                Adds the vertices to the graph object
        ----------------------------------------------------'''

        self.G.add_nodes_from(Centroids_No_Platform, type='Centroids')
        self.G.add_nodes_from(self.ASE_measurement_nodes, type='ASE')
        self.G.add_nodes_from(Centroids_Platform, type='Platforms')
        self.G.add_nodes_from(self.VS_measurement_nodes, type='VS')
        self.G.add_nodes_from(self.other_nodes, type='Other')

        self.nodes = self.G.nodes()
        ''' ---------------------------------------------------
                Adds the edges to the graph object
        ----------------------------------------------------'''

        #We add the edges that are not ramps or escalators to the graph
        self.G.add_edges_from([('34d', '34w'), ('56d', '56w'), ('78d', '78w'),
                               ('34w', '34c'), ('56w', '56c'), ('78w', '78c'),
                               ('kioskh', 'KIOSK'), ('1w', '1d')],
                              {'length': 3})
        self.G.add_edges_from([('34b', '34e'), ('56b', '56e'), ('78b', '78e'),
                               ('34e', '34a'), ('56e', '56a'), ('78e', '78a')],
                              {'length': 5})
        self.G.add_edges_from([('34b', '34e'), ('56b', '56e'), ('78b', '78e'),
                               ('34e', '34a'), ('56e', '56a'), ('78e', '78a')],
                              {'length': 5})
        self.G.add_edges_from([('34w', '56w'), ('34e', '56e')],
                              {'length': 15.5})
        self.G.add_edges_from([('56w', '78w')], {'length': 14.4})
        self.G.add_edges_from([('56e', '56h'), ('56h', '78e'),
                               ('neh2', 'neh1')], {'length': 7.2})
        self.G.add_edges_from([('34w', 'kioskh'), ('kioskh', '1w')],
                              {'length': 11})
        self.G.add_edges_from([('1h2', '1h1'), ('1h1', '1D'), ('1c', '1h3'),
                               ('1h3', '1h4'), ('1h4', '1C'), ('h', 'nh'),
                               ('neh1', 'NE')],
                              {'length': 2})  # Check length maybe
        self.G.add_edges_from([('1w', 'BAR'), ('nh', 'N')], {'length': 8})
        self.G.add_edges_from([('1h3', 'h')], {'length': 30})
        self.G.add_edges_from([('h', 'neh2')], {'length': 75})
        self.G.add_edges_from([('78C', '78B')], {'length': 82})
        self.G.add_edges_from([('neh2', '70fe')], {'length': 75})
        self.G.add_edges_from([('70fe', '70FE')], {'length': 10})
        self.G.add_edges_from([('1ab', '1AB')], {'length': 10})
        self.G.add_edges_from([('neh2', '1ab')], {'length': 40})
        self.G.add_edges_from([('nww', 'NW')], {'length': 60})
        self.G.add_edges_from([('nww', '1h2')], {'length': 10})
        self.G.add_edges_from([('34e', '1e'), ('nem', 'NEM')], {'length': 22})
        self.G.add_edges_from([('SW', 'sw'), ('sw', '9w')], {'length': 5})
        self.G.add_edges_from([('9w', '9h2'), ('9h2', '9h1'), ('9h1', '9d')],
                              {'length': 3.5})
        self.G.add_edges_from([('9w', '9h3'), ('9h3', '9c')], {'length': 7})
        self.G.add_edges_from([('9w', '9h4'), ('9h4', '78w')], {'length': 8.6})
        self.G.add_edges_from([('9h1', 'SHOP'), ('nwm', 'NWM')], {'length': 6})
        self.G.add_edges_from([('SE', 'se'), ('se', '78e')], {'length': 5.5})
        self.G.add_edges_from([('1w', '1wc')], {'length': 3.0})
        self.G.add_edges_from([('nw', 'nwh'), ('nwm', 'nwh'), ('nwh', '1wh'),
                               ('ne', '1eh'), ('nem', '1eh'), ('1eh', '1e')],
                              {'length': 1.0})
        self.G.add_edges_from([('1wh', '1w')], {'length': 17.5})

        self.edge_labels_simple = dict([((
            u,
            v,
        ), d['length']) for u, v, d in self.G.edges(data=True)])

        self.edges = self.G.edges()

        #We add the edges reversed
        for edge in self.edges:
            self.G.add_edges_from([edge[::-1]],
                                  self.G.get_edge_data(edge[0], edge[1])[0])

        #We add the edges that are ramps or escalators and thus have a different weight in each direction
        self.G.add_edges_from([('34d', '34D'), ('56d', '56D'), ('78d', '78D'),
                               ('9d', '9D'), ('34b', '34B'), ('56b', '56B'),
                               ('78b', '78B'), ('56a', '56A'), ('1d', '1h2'),
                               ('ne', 'neh2'), ('nw', 'NW')],
                              {'length': 23.06})  # Stairs up
        self.G.add_edges_from([('34D', '34d'), ('56D', '56d'), ('78D', '78d'),
                               ('9D', '9d'), ('34B', '34b'), ('56B', '56b'),
                               ('78B', '78b'), ('56A', '56a'), ('1h2', '1d'),
                               ('neh2', 'ne'), ('NW', 'nw')],
                              {'length': 21.51})  # Stairs down
        self.G.add_edges_from(
            [('34a', '34A'), ('78a', '78A')],
            {'length': 25.46})  # Stairs up plus some corridor
        self.G.add_edges_from(
            [('34A', '34a'), ('78A', '78a')],
            {'length': 23.91})  # Stairs down plus some corridor
        self.G.add_edges_from([('34c', '34C'), ('56c', '56C'), ('78c', '78C'),
                               ('9c', '9C')], {'length': 40.05})  # Ramps up
        self.G.add_edges_from([('34C', '34c'), ('56C', '56c'), ('78C', '78c'),
                               ('9C', '9c')], {'length': 32.97})  # Ramps down
        self.G.add_edges_from([('1wc', '1c')], {'length': 33.8})  # Ramp up
        self.G.add_edges_from([('1c', '1wc')], {'length': 25.84})  # Ramp down

        self.edges = self.G.edges()

        self.edge_labels_duplicated = dict([((
            u,
            v,
        ), d['length']) for u, v, d in self.G.edges(data=True)])

        self.number_of_edges = len(self.edges)

        #We create a list of the edges associated to ASE sensors
        self.edges_ASE = [
            ('nw', 'nwh'),  # nwIn
            ('nwm', 'nwh'),  # nwmIn
            #('1c', '1wc') ,  # 1cIn MALFUNCTIONING
            ('nh', 'h'),  # nhIn
            ('neh1', 'neh2'),  # neh1In
            ('ne', '1eh'),  # neIn
            ('nem', '1eh'),  # nemIn
            ('sw', '9w'),  # swIn
            ('9h2', '9w'),  # gh2In
            ('9h3', '9w'),  # gh3In
            ('se', '78e'),  # seIn
            #('nww', '1h2') , #nwwIn MALFUNCTIONING
            ('nw', 'NW'),  # nwOut
            ('nwm', 'NWM'),  # nwmOut
            #('1c', '1h3'), #1cOut MALFUNCTIONING
            ('nh', 'N'),  # nhOut
            ('neh1', 'NE'),  # neh1Out
            ('ne', 'neh2'),  # neOut
            ('nem', 'NEM'),  # nemOut
            ('sw', 'SW'),  # swOut
            ('9h2', '9h1'),  # g9h2Out
            ('9h3', '9c'),  # 9h3Out
            ('se', 'SE'),  # seOut
            #('nww', 'NW') #nwwOut MALFUNCTIONING
        ]
        self.number_of_edges_ASE = len(self.edges_ASE)

        #We create a list of the edges associated to TINF
        self.edges_TINF = [
            ('1D', '1h1'),
            ('34d', '34w'),
            ('56d', '56w'),
            ('78d', '78w'),
            ('9d', '9h1'),
            ('1C', '1h4'),
            ('34c', '34w'),
            ('56c', '56w'),
            ('78c', '78w'),
            ('9c', '9h3'),
            ('1AB', '1ab'),
            ('70FE', '70fe'),
            ('34b', '34e'),
            ('56b', '56e'),
            ('78b', '78e'),
            ('34a', '34e'),
            ('56a', '56e'),
            ('78a', '78e'),
        ]

        self.edges_TINF_origins = [
            '1D', '34D', '56D', '78D', '9D', '1C', '34C', '56C', '78C', '9C',
            '1AB', '70FE', '34B', '56B', '78B', '34A', '56A', '78A'
        ]

        self.number_of_edges_TINF = len(self.edges_TINF)

        VS_inflow_edges = [('1wh', '1w'), ('1d', '1w'), ('1wc', '1w'),
                           ('34d', '34w'), ('34c', '34w'), ('56d', '56w'),
                           ('56c', '56w'), ('78d', '78w'), ('78c', '78w'),
                           ('9h4', '78w'), ('1e', '34e'), ('34b', '34e'),
                           ('34a', '34e'), ('56b', '56e'), ('56a', '56e'),
                           ('56h', '56e')]
        VS_outflow_edges = [('1wh', 'nwh'), ('1d', '1h2'), ('1wc', '1c'),
                            ('34d', '34D'), ('34c', '34C'), ('56d', '56D'),
                            ('56c', '56C'), ('78d', '78D'), ('78c', '78C'),
                            ('9h4', '9w'), ('1e', '1eh'), ('34b', '34B'),
                            ('34a', '34A'), ('56b', '56B'), ('56a', '56A'),
                            ('56h', '78e')]

        #         #We create a list of the edges associated to historical data
        #         self.edges_sales_points = [
        #         ('KIOSK', 'kioskh'),
        #         ('SHOP', '9h1'),
        #         ('BAR', '1w'),
        #         ]
        #         self.number_of_edges_HIST = len(self.edges_sales_points)

        #We create dictionaries (for easier bookkeeping)
        self.edges_dict = {}
        self.edges_dict_rev = {}
        self.edges_reverse_dict = {}
        for idx, edge in enumerate(self.G.edges()):
            self.edges_reverse_dict[edge] = edge[::-1]
            self.edges_dict[idx] = edge
            self.edges_dict_rev[edge] = idx

        self.edges_ASE_dict = {}
        self.edges_ASE_dict_rev = {}
        for idx, edge in enumerate(self.edges_ASE):
            self.edges_ASE_dict[idx] = edge
            self.edges_ASE_dict_rev[edge] = idx

        self.edges_TINF_dict = {}
        self.edges_TINF_dict_rev = {}
        for idx, edge in enumerate(self.edges_TINF):
            self.edges_TINF_dict[idx] = edge
            self.edges_TINF_dict_rev[edge] = idx

        self.edges_TINF_origins_dict = {}
        #self.edges_TINF_origins_dict_rev = {}
        for idx, edge in enumerate(self.edges_TINF_origins):
            self.edges_TINF_origins_dict[idx] = edge
            #self.edges_TINF_origins_dict_rev[edge] = idx
#         self.edges_HIST_dict = {}
#         for idx, edge in enumerate(self.edges_sales_points):
#             self.edges_HIST_dict[idx] = edge

        self.VS_inflow_edges_dict = {}
        self.VS_inflow_edges_dict_rev = {}
        for idx, edge in enumerate(VS_inflow_edges):
            self.VS_inflow_edges_dict[idx] = edge
            self.VS_inflow_edges_dict_rev[edge] = idx

        self.VS_outflow_edges_dict = {}
        self.VS_outflow_edges_dict_rev = {}
        for idx, edge in enumerate(VS_outflow_edges):
            self.VS_outflow_edges_dict[idx] = edge
            self.VS_outflow_edges_dict_rev[edge] = idx

        # We define the areas. They are characterized by the nodes lying on their borders. They can overlap.
        areas = [[
            '9h4', '78d', '56d', '34d', '1d', 'BAR', '1wh', '1wc', 'KIOSK',
            '34c', '56c', '78c'
        ], ['56h', '56b', '34b', '1e', '34a', '56a']]

        #We create a dictionary of the areas
        self.areas_dict = {}
        for idx, area in enumerate(areas):
            self.areas_dict[idx] = area

        ## Code to generate all routes and write them in routes.txt (postprocessing needed to select the feasible routes)
        if estim_param.forceGenerateRoutes:
            routes = []
            path = nx.all_pairs_dijkstra_path(self.G, weight='length')
            for source in self.centroids:
                for target in self.centroids:
                    if source is not target:
                        routes.append(path[source][target])
            with open(estim_param.path_routes_file, 'w') as file:
                for item in routes:
                    file.write("{}\n".format(item))

        #We load the routes from a precoumpted file
        self.routes = []
        with open(estim_param.path_routes_file, 'r') as routesfile:
            for line in routesfile:
                line = line[1:-2]
                line = line.replace(',', '')
                line = line.replace("'", '')
                line = line.split()
                self.routes.append(line)

        self.number_of_routes = len(self.routes)

        # We generate all possible subroutes
        path = nx.all_pairs_dijkstra_path(self.G, weight='length')

        self.subroutes_west = []
        for source in VS_west_nodes:
            for target in VS_west_nodes:
                if source is not target:
                    self.subroutes_west.append(path[source][target])

        self.subroutes_east = []
        for source in VS_east_nodes:
            for target in VS_east_nodes:
                if source is not target:
                    self.subroutes_east.append(path[source][target])

        self.subroutes_VS = self.subroutes_west + self.subroutes_east

        #We create dictonaries for the routes and subroutes
        self.routes_dict = {}
        self.routes_dict_rev = {}

        for idx, route in enumerate(self.routes):
            self.routes_dict[idx] = route
            self.routes_dict_rev[str(route)] = idx

        self.subroutes_VS_dict = {}
        self.subroutes_VS_dict_rev = {}
        for idx, route in enumerate(self.subroutes_VS):
            self.subroutes_VS_dict[idx] = route
            self.subroutes_VS_dict_rev[str(route)] = idx

        self.VS_nodes_dict = {}
        self.VS_nodes_dict_rev = {}
        for idx, node in enumerate(VS_nodes):
            self.VS_nodes_dict[idx] = node
            self.VS_nodes_dict_rev[node] = idx

        # We compute the distances between edges and routes
        self.distances_edge_route = np.zeros(
            (len(self.edges), len(self.routes)))

        for e in range(len(self.edges)):
            for r in range(len(self.routes)):
                # if the edge in on the route, we compute the distance between the start of the route and the start of the edge
                if '( ' + str(self.edges[e][0]) + ',' + str(
                        self.edges[e][1]) + ')' in [
                            '(' + str(self.routes[r][i - 1]) + ',' +
                            str(self.routes[r][i]) + ')'
                            for i in range(1, len(self.routes[r]))
                        ]:
                    self.distances_edge_route[e][r] = nx.shortest_path_length(
                        self.G,
                        source=self.routes[r][0],
                        target=self.edges[e][0],
                        weight='length')
                else:
                    self.distances_edge_route[e][r] = -1

        self.distances_edge_route2 = np.zeros(
            (len(self.edges_ASE), len(self.routes)))

        # Exit links
        self.exit_centroids_exit_edges = {
            'SW': ('sw', 'SW'),
            'NW': ('nw', 'NW'),
            'NWM': ('nwm', 'NWM'),
            'SE': ('se', 'SE'),
            'NEM': ('nem', 'NEM'),
            'N': ('nh', 'N'),
            'NE': ('neh1', 'NE')
            #'NW': ('nww', 'NW'), negligibly small
        }

        # dictionaries for route choice

        self.route_orig_dict = {}
        self.route_dest_dict = {}

        self.routes_from_centroid = defaultdict(list)
        self.routes_to_centroid = defaultdict(list)

        for route_key in self.routes_dict.keys():
            orig_centroid_key = self.centroids_dict_rev[
                self.routes_dict[route_key][0]]
            dest_centroid_key = self.centroids_dict_rev[
                self.routes_dict[route_key][-1]]
            self.route_orig_dict[route_key] = orig_centroid_key
            self.route_dest_dict[route_key] = dest_centroid_key

            self.routes_from_centroid[orig_centroid_key].append(route_key)
            self.routes_to_centroid[dest_centroid_key].append(route_key)

        # dictionaries for postprocessing of VisioSafe

        self.subroute_orig_dict = {}
        self.subroute_dest_dict = {}

        self.subroutes_from_node = defaultdict(list)
        self.subroutes_to_node = defaultdict(list)

        for subroute_key in self.subroutes_VS_dict.keys():
            orig_centroid_key = self.VS_nodes_dict_rev[
                self.subroutes_VS_dict[subroute_key][0]]
            dest_centroid_key = self.VS_nodes_dict_rev[
                self.subroutes_VS_dict[subroute_key][-1]]
            self.subroute_orig_dict[subroute_key] = orig_centroid_key
            self.subroute_dest_dict[subroute_key] = dest_centroid_key

            self.subroutes_from_node[orig_centroid_key].append(subroute_key)
            self.subroutes_to_node[dest_centroid_key].append(subroute_key)

        # linking track numbres to platform names
        self.track_platform_dict = {
            1: '1',
            3: '3/4',
            4: '3/4',
            5: '5/6',
            6: '5/6',
            7: '7/8',
            8: '7/8',
            9: '9',
            70: '70'
        }

        self.centroid_platform_dict = {
            '1D': '1',
            '1C': '1',
            '70FE': '70',
            '1AB': '1',
            '34D': '3/4',
            '34C': '3/4',
            '34B': '3/4',
            '34A': '3/4',
            '56D': '5/6',
            '56C': '5/6',
            '56B': '5/6',
            '56A': '5/6',
            '78D': '7/8',
            '78C': '7/8',
            '78B': '7/8',
            '78A': '7/8',
            '9D': '9',
            '9C': '9'
        }

        # Structural components of Lausanne useful for Circos and other highly aggregated plots
        self.structural_labels = [
            'P1',
            'P34',
            'P56',
            'P78',
            'P9',
            'P70',
            'Metro',
            'North',
            'South',
            'Shops',
        ]
        #Dictionary that aggregates the centroids in these structural zones:
        self.structural_centroids_dict = {
            '1D': 0,
            '1C': 0,
            '70FE': 5,
            '1AB': 0,
            '34D': 1,
            '34C': 1,
            '34B': 1,
            '34A': 1,
            '56D': 2,
            '56C': 2,
            '56B': 2,
            '56A': 2,
            '78D': 3,
            '78C': 3,
            '78B': 3,
            '78A': 3,
            '9D': 4,
            '9C': 4,
            'NW': 7,
            'NWM': 6,
            'NE': 7,
            'NEM': 6,
            'SW': 8,
            'SE': 8,
            'N': 7,
            'KIOSK': 9,
            'BAR': 9,
            'SHOP': 9
        }

        self.ASE_edge_names_dict = {
            'ASE9ab_in': ('9h3', '9w'),
            'ASE9cde_in': ('sw', '9w'),
            'ASE9fgh_in': ('9h2', '9w'),
            'ASE4_out': ('1c', '1wc'),
            'ASE5a_in': ('nw', 'nwh'),
            'ASE10_in': ('nwm', 'nwh'),
            'ASE8_in': ('se', '78e'),
            'ASE2_out': ('ne', '1eh'),
            'ASE2de_out': ('nem', '1eh'),
            'ASE3_in': ('nh', 'h'),
            'ASE1_in': ('neh1', 'neh2'),
            'ASE6_in': ('nww', '1h2'),
            'ASE9ab_out': ('9h3', '9c'),
            'ASE9cde_out': ('sw', 'SW'),
            'ASE9fgh_out': ('9h2', '9h1'),
            'ASE4_in': ('1c', '1h3'),
            'ASE5a_out': ('nw', 'NW'),
            'ASE10_out': ('nwm', 'NWM'),
            'ASE8_out': ('se', 'SE'),
            'ASE2_in': ('ne', 'neh2'),
            'ASE2de_in': ('nem', 'NEM'),
            'ASE3_out': ('nh', 'N'),
            'ASE1_out': ('neh1', 'NE'),
            'ASE6_out': ('nww', 'NW'),
        }

        #Reversed dictionary:
        self.ASE_edge_names_dict_rev = {}
        for key in self.ASE_edge_names_dict.keys():
            self.ASE_edge_names_dict_rev[self.ASE_edge_names_dict[key]] = key

        self.edges_sens_correction = [
            ('nw', 'nwh'),  # nwIn
            ('nwm', 'nwh'),  # nwmIn
            ('nh', 'h'),  # nhIn
            ('neh1', 'neh2'),  # neh1In
            ('nw', 'NW'),  # nwOut
            ('nwm', 'NWM'),  # nwmOut
            ('nh', 'N'),  # nhOut
            ('neh1', 'NE'),  # neh1Out

            # following links tentatively considered
            ('sw', '9w'),  # swIn
            ('9h2', '9w'),  # gh2In
            ('9h3', '9w'),  # gh3In
            ('se', '78e'),  # seIn
            ('sw', 'SW'),  # swOut
            ('9h2', '9h1'),  # g9h2Out
            ('9h3', '9c'),  # 9h3Out
            ('se', 'SE'),  # seOut
        ]
コード例 #32
0
ファイル: metrics.py プロジェクト: huayu-zhang/gp-bench
 def shortest_path_length(self, u, v):
     return nx.shortest_path_length(self.G, u, v)
コード例 #33
0
def min_tree(input_file_path):
    file_dir = os.path.dirname(input_file_path)
    file = os.path.basename(input_file_path)
    file_name, file_type = file.split('_')
    out_file_path = os.path.join(file_dir, file_name)
    test_path = os.path.join(file_dir, 'steps', 'try_')

    if file_type == 'pickle':
        G = pj.load_pickle(input_file_path)

    reduced_graph = remove_relay(G)

    #    plt.figure(1)
    #    pj.draw_graph(reduced_graph, out_file_path+"_terminals")
    fig = 1
    pj.draw_graph(reduced_graph, test_path + "terminal" + str(fig))
    fig += 1
    #terminals = reduced_graph.nodes()
    #print (terminals)

    #finding the sink (Base Station)
    for node in reduced_graph.nodes(data=True):
        if node[1]['type'] == 'BS':
            sink_id = node[1]['id']
            break

    print(nx.is_biconnected(reduced_graph))
    if nx.is_connected(reduced_graph):

        #CMST begins================================================

        #initialize:----------------------------------
        Tree = nx.Graph()

        #getting hop counts and max hop counts
        hop_tuples = []
        for node in reduced_graph.nodes():
            reduced_graph.nodes[node]['hops'] = nx.shortest_path_length(
                reduced_graph, sink_id, node)
            if reduced_graph.nodes[node]['hops'] > 1:
                hop_tuples += [(node, reduced_graph.nodes[node]['hops'])]
        hop_tuples = sorted(hop_tuples, key=lambda x: x[1])
        max_hop_count = hop_tuples[-1][1]

        single_hop = []
        #finding and connecting roots of top sub-trees
        for sink, branch_root in reduced_graph.edges(sink_id):
            single_hop += [branch_root]
            temprary = G.subgraph([sink] +
                                  reduced_graph.edges[sink,
                                                      branch_root]['link'] +
                                  [branch_root])
            Tree.add_nodes_from(temprary.nodes(data=True))
            Tree.add_edges_from(temprary.edges(data=True))
#        plt.figure(2)
#        pj.draw_graph(Tree, out_file_path+"_tree")

        pj.draw_graph(Tree, test_path + "tree" + str(fig))
        fig += 1
        #updating reduced tree - removing unrequired edges
        # is this needed?
        for i in list(combinations(single_hop, 2)):
            if reduced_graph.has_edge(i[0], i[1]):
                reduced_graph.remove_edge(i[0], i[1])
#        plt.figure(1)
#        pj.draw_graph(reduced_graph, out_file_path+"_terminals")

        pj.draw_graph(reduced_graph, test_path + "terminal" + str(fig))
        fig += 1
        #UPDATES
        #defining branches, their loads and their components
        branches = {}
        dfs_edges = list(nx.dfs_edges(Tree, source=sink_id))
        for parent, child in dfs_edges:
            if parent == sink_id:
                branch = child
                branches[branch] = {}
                branches[branch]['nodes'] = set([child])
                branches[branch]['load'] = 0
                if Tree.nodes[child]['type'] == 'S':
                    branches[branch]['load'] += 1
            else:
                branches[branch]['nodes'] = set([child
                                                 ]) | branches[branch]['nodes']
                if Tree.nodes[child]['type'] == 'S':
                    branches[branch]['load'] += 1
        #print (dfs_edges)
        #print (branches)

        #UPDATES
        #making growth sets, parent sets and hop sets
        all_sets = {}
        hop_set = {}
        #print(hop_tuples)
        for node, hop in hop_tuples:
            if hop in hop_set:
                hop_set[hop] = set([node]) | hop_set[hop]
            else:
                hop_set[hop] = set([node])

            all_sets[node] = {'growth': set([]), 'parents': set([])}
            for vertex, adj_vert in reduced_graph.edges(node):
                #if parent
                if reduced_graph.nodes[adj_vert]['hops'] < reduced_graph.nodes[
                        vertex]['hops']:
                    all_sets[node]['parents'] = set(
                        [adj_vert]) | all_sets[node]['parents']

                elif reduced_graph.nodes[adj_vert][
                        'hops'] > reduced_graph.nodes[vertex]['hops']:
                    all_sets[node]['growth'] = set(
                        [adj_vert]) | all_sets[node]['growth']

        #print(all_sets)
        #print(hop_set)

        #iteration

        for h in range(2, max_hop_count + 1):
            done = []
            left_tuple = []
            #directly connecting nodes with single parent
            for node in hop_set[h]:
                if len(all_sets[node]['parents']) == 1:
                    done += [node]
                    temp = G.subgraph([node] + reduced_graph.edges[
                        node, all_sets[node]['parents'][0]]['link'] +
                                      all_sets[node]['parents'])
                    Tree.add_nodes_from(temp.nodes(data=True))
                    Tree.add_edges_from(temp.edges(data=True))
                else:
                    left_tuple += [(node, len(all_sets[node]['growth']))]

#            plt.figure(2)
#            pj.draw_graph(Tree, out_file_path+"_tree")

            pj.draw_graph(Tree, test_path + "tree" + str(fig))
            fig += 1
            #updating reduced tree - removing unrequired edges
            # is this needed?
            for i in done:
                for j in hop_set[h]:
                    if reduced_graph.has_edge(i, j):
                        reduced_graph.remove_edge(i, j)
#            plt.figure(1)
#            pj.draw_graph(reduced_graph, out_file_path+"_terminals")

            pj.draw_graph(reduced_graph, test_path + "terminal" + str(fig))
            fig += 1
            #updating branches, their loads and their components
            dfs_edges = list(nx.dfs_edges(Tree, source=sink_id))
            for parent, child in dfs_edges:
                if parent == sink_id:
                    branch = child
                    branches[branch] = {}
                    branches[branch]['nodes'] = set([child])
                    branches[branch]['load'] = 0
                    if Tree.nodes[child]['type'] == 'S':
                        branches[branch]['load'] += 1
                else:
                    branches[branch]['nodes'] = set(
                        [child]) | branches[branch]['nodes']
                    if Tree.nodes[child]['type'] == 'S':
                        branches[branch]['load'] += 1

            while len(left_tuple) > 0:
                left_tuple = sorted(left_tuple, key=lambda x: x[0])
                left_tuple = sorted(left_tuple, key=lambda x: x[1])
                node = left_tuple[0][0]
                left_tuple_temp = left_tuple[1:]
                left_tuple = []
                metric_tuple = []
                for branch in branches:
                    if len(branches[branch]['nodes']
                           & all_sets[node]['parents']) > 0:
                        #generate the search set if h<max_hop_count
                        for parent in (branches[branch]['nodes']
                                       & all_sets[node]['parents']):
                            ss = set([])
                            relay_set = set([])
                            temp_redgraph = reduced_graph.copy()
                            for i in done:
                                if temp_redgraph.has_edge(node, i):
                                    temp_redgraph.remove_edge(node, i)
                            for p in all_sets[node]['parents']:
                                if temp_redgraph.has_edge(node, p):
                                    temp_redgraph.remove_edge(node, p)
                            for c in all_sets[node]['growth']:
                                flag = 0
                                for cp in all_sets[c]['parents']:
                                    if cp != node and flag == 0:
                                        for path in nx.all_simple_paths(
                                                temp_redgraph,
                                                source=cp,
                                                target=sink_id,
                                                cutoff=h):
                                            if len(
                                                    set(path)
                                                    & branches[branch]['nodes']
                                            ) == 0:
                                                flag = 1
                                                break
                                if flag == 0:
                                    ss = ss | set([c])
                                    relay_set = relay_set | set(
                                        reduced_graph.edges[node, c]['link'])
                            #print ('ss = '+str(ss))
                            #TODO - replace with link weight
                            relay_set = relay_set | set(
                                reduced_graph.edges[node, parent]['link'])
                            relay_count = len(relay_set -
                                              branches[branch]['nodes'])
                            #case1 metric
                            #metric = branches[branch]['load']+1+len(ss)+relay_count
                            #case2 metric
                            metric = branches[branch]['load'] + 1 + len(ss)
                            # checking if branches are fusing
                            relay_flag = 0
                            for b in branches:
                                if b != branch:
                                    if len(relay_set
                                           & branches[b]['nodes']) > 0:
                                        relay_flag = 1
                                        break
                            if (relay_flag == 0):
                                metric_tuple += [(metric, relay_count, branch,
                                                  parent, ss)]
                #TODO - devise how to choose the best parent within a branch
                metric_tuple = sorted(metric_tuple, key=lambda y: y[1])
                metric_tuple = sorted(metric_tuple, key=lambda y: y[0])
                #TODO - add search set addition and remove them from further hop metrix and their links too
                temp = G.subgraph(
                    [node] +
                    reduced_graph.edges[node, metric_tuple[0][3]]['link'] +
                    [metric_tuple[0][3]])
                Tree.add_nodes_from(temp.nodes(data=True))
                Tree.add_edges_from(temp.edges(data=True))
                for n in metric_tuple[0][4]:
                    temp = G.subgraph([n] +
                                      reduced_graph.edges[n, node]['link'] +
                                      [node])
                    Tree.add_nodes_from(temp.nodes(data=True))
                    Tree.add_edges_from(temp.edges(data=True))
#                plt.figure(2)
#                pj.draw_graph(Tree, out_file_path+"_tree")

                pj.draw_graph(Tree, test_path + "tree" + str(fig))
                fig += 1
                #updating reduced tree - removing unrequired edges
                # is this needed?
                for i in hop_set[h]:
                    if reduced_graph.has_edge(node, i):
                        reduced_graph.remove_edge(node, i)
                for p in all_sets[node]['parents']:
                    if p != metric_tuple[0][3]:
                        reduced_graph.remove_edge(node, p)
                for n in metric_tuple[0][4]:
                    hop_set[h + 1] = hop_set[h + 1] - set([n])
                    for i in hop_set[h + 1]:
                        if reduced_graph.has_edge(n, i):
                            reduced_graph.remove_edge(n, i)
                    for p in all_sets[n]['parents']:
                        if p != node:
                            reduced_graph.remove_edge(n, p)
#                plt.figure(1)
#                pj.draw_graph(reduced_graph, out_file_path+"_terminals")

                pj.draw_graph(reduced_graph, test_path + "terminal" + str(fig))
                fig += 1

                done += [node]

                #updating branches, their loads and their components
                dfs_edges = list(nx.dfs_edges(Tree, source=sink_id))
                for parent, child in dfs_edges:
                    if parent == sink_id:
                        branch = child
                        branches[branch] = {}
                        branches[branch]['nodes'] = set([child])
                        branches[branch]['load'] = 0
                        if Tree.nodes[child]['type'] == 'S':
                            branches[branch]['load'] += 1
                    else:
                        branches[branch]['nodes'] = set(
                            [child]) | branches[branch]['nodes']
                        if Tree.nodes[child]['type'] == 'S':
                            branches[branch]['load'] += 1
                #TODO - may need to change these for higher hop count nodes as well
                #updating growth sets and parent sets
                for n, a in left_tuple_temp:
                    all_sets[n] = {'growth': set([]), 'parents': set([])}
                    for vertex, adj_vert in reduced_graph.edges(n):
                        #if parent
                        if reduced_graph.nodes[adj_vert][
                                'hops'] < reduced_graph.nodes[vertex]['hops']:
                            all_sets[n]['parents'] = set(
                                [adj_vert]) | all_sets[n]['parents']

                        elif reduced_graph.nodes[adj_vert][
                                'hops'] > reduced_graph.nodes[vertex]['hops']:
                            all_sets[n]['growth'] = set(
                                [adj_vert]) | all_sets[n]['growth']

                    left_tuple += [(n, len(all_sets[n]['growth']))]

        plt.figure(fig)
        pj.draw_graph(reduced_graph, out_file_path + "_skeletontree")
        plt.figure(fig + 1)
        pj.draw_graph(Tree, out_file_path + "_fulltree")
        pj.store_pickle(Tree, out_file_path + "balancedtree_pickle")
    else:
        print(
            "Terminals of the given graph do not lie in a single sonnected component. Thus it cannot be processed further"
        )
コード例 #34
0
 def get_distance(self, source, target):
     """
     Computes the shortest distance from source to target.
     Source is likely to be Pacman, while targets can be the ghosts for example.
     """
     return nx.shortest_path_length(self.graph, source, target)
コード例 #35
0
import networkx

G = networkx.DiGraph()

for a in open("06_input.txt").readlines():
    G.add_edge(*[x.strip() for x in a.split(')')])

print(networkx.transitive_closure(G).size())

print(networkx.shortest_path_length(G.to_undirected(), "YOU", "SAN") - 2)
コード例 #36
0
 def find_shortest_length(self, start, end):
     return (nx.shortest_path_length(self.G, start, end, weight="weight"))
コード例 #37
0
ファイル: epidemic.py プロジェクト: Shin4y/SEIRepidemics
 def howFarFromHome(self, node, G):
     return nx.shortest_path_length(G, self.location, node)
コード例 #38
0
####            for ng in G2.neighbors(node):
####                if ng not in lt and ng in lt2:
####                    newG.add_edge(i+115,j+115)
####
####print(newG.edges())
##
##from networkx.algorithms import bipartite
##col=bipartite.color(newG)
##print(col)
##
sentr={}
for i in prsen:
    sentr[i]=True
for i in prsen:
    for j in prsen:
        if i!=j:
            dst=nx.shortest_path_length(G2,i[0],j[0])
            for lt in e.values():
                if i[0] in lt:
                    szi=len(lt)
                if j[0] in lt:
                    szj=len(lt)
            sz=min(szi,szj)
            if dst<sz/3:
                if i[1]<j[1]:
                    sentr[i]=False
                else:
                    sentr[j]=False
print(sentr)
                
コード例 #39
0
prevalence_outcomes = [{}]*clusters
for cluster in range(clusters):
    prevalence_outcomes[cluster] = copy.deepcopy({node: 0 for node in graphs[cluster].nodes()})
    for infected_node in prevalences[cluster]:
        prevalence_outcomes[cluster][infected_node] = 1
    IV_weights  = [{node: np.random.normal(logit(IV_ps[2*cluster//clusters]), 1, 1)[0]  for node in range(n)} for cluster in range(clusters)]
    IV_nodes    = set.union(*[{((cluster, node), expit(IV_weights[cluster][node])) for node in set(range(n)).difference(infecteds[cluster])} for cluster in range(clusters)])
    IV_infected = set([])
    for IV_node in IV_nodes:
        if random.random() < IV_node[1]:
            IV_infected.add(IV_node[0])

    mins = {node: 0 for node in range(n)}
    sums = {node: 0 for node in range(n)}
    for node in range(n):
        paths = [nx.shortest_path_length(graphs[cluster],node,neighbor) for neighbor in set(nx.node_connected_component(graphs[cluster], node)).intersection(prevalences[cluster]).difference(set([node]))]
        if len(paths) == 0:
            mins[node] = 0
            sums[node] = 0
        if len(paths) > 0:
            mins[node] = 1 / min(paths)
            sums[node] = sum([1/path for path in paths])

    components = [len(C) for C in nx.connected_components(graphs[cluster])]
    datas[cluster] = pd.DataFrame({
        # main informations relevant to basic analysis
        "Trt":                  (2*cluster//clusters),
        "Cluster":              cluster,
        "Prevalences":          prevalence_outcomes[cluster],

        # Degree-based covariates
コード例 #40
0
def create_g_30000_240000_1():
    graph = GraphAlgo()
    g_nx = nx.DiGraph()
    file = "../data/Graph_on_circle/G_30000_240000_1.json"
    graph.load_from_json(file)
    try:
        with open(file, 'r') as file:
            load_file = json.load(file)

        for vertex in load_file["Nodes"]:
            g_nx.add_node(vertex["id"])

        for edge in load_file["Edges"]:
            g_nx.add_weighted_edges_from([(edge["src"], edge["dest"],
                                           edge["w"])])
    except Exception as ex:
        print("couldn't save to jason", ex)
        return False
    finally:
        file.close()

    # graph.plot_graph()

    print("g_30000_240000_1 result:")
    shortest_path_result = []
    start_time = time.perf_counter()
    dist, path = graph.shortest_path(2, 8)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_result.append(record)
    # print("shortest_path(2, 8)=", path)
    # print("distance=", dist)

    shortest_path_networkx_result = []
    start_time = time.perf_counter()
    path = nx.shortest_path(G=g_nx, source=2, target=8)
    dist = nx.shortest_path_length(G=g_nx, source=2, target=8)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("g_30000_240000_1 networkx result:")
    # print("shortest_path(2,8)", path)
    # print("distance=", dist)
    shortest_path_networkx_result.append(record)

    start_time = time.perf_counter()
    dist, path = graph.shortest_path(3, 9)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_result.append(record)
    # print("shortest_path(3, 9)=", path)
    # print("distance=", dist)

    start_time = time.perf_counter()
    path = nx.shortest_path(G=g_nx, source=3, target=9)
    dist = nx.shortest_path_length(G=g_nx, source=3, target=9)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("g_30000_240000_1 networkx result:")
    # print("shortest_path(3,9)", path)
    # print("distance=", dist)
    shortest_path_networkx_result.append(record)

    start_time = time.perf_counter()
    dist, path = graph.shortest_path(4, 7)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_result.append(record)
    # print("shortest_path(4, 7)=", path)
    # print("distance=", dist)

    start_time = time.perf_counter()
    path = nx.shortest_path(G=g_nx, source=4, target=7)
    dist = nx.shortest_path_length(G=g_nx, source=4, target=7)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("g_30000_240000_1 networkx result:")
    # print("shortest_path(4,7)", path)
    # print("distance=", dist)
    shortest_path_networkx_result.append(record)

    start_time = time.perf_counter()
    dist, path = graph.shortest_path(5, 4)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_result.append(record)
    # print("shortest_path(5, 4)=", path)
    # print("distance=", dist)

    start_time = time.perf_counter()
    path = nx.shortest_path(G=g_nx, source=5, target=4)
    dist = nx.shortest_path_length(G=g_nx, source=5, target=4)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("g_30000_240000_1 networkx result:")
    # print("shortest_path(5,4)", path)
    # print("distance=", dist)
    shortest_path_networkx_result.append(record)

    start_time = time.perf_counter()
    dist, path = graph.shortest_path(5, 2)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_result.append(record)
    # print("shortest_path(5, 2)=", path)
    # print("distance=", dist)
    result = sum(shortest_path_result) / len(shortest_path_result)
    print("shortest path record:", result)

    start_time = time.perf_counter()
    path = nx.shortest_path(G=g_nx, source=5, target=2)
    dist = nx.shortest_path_length(G=g_nx, source=5, target=2)
    end_time = time.perf_counter()
    record = end_time - start_time
    shortest_path_networkx_result.append(record)
    # print("g_30000_240000_1 networkx result:")
    # print("shortest_path(5,2)", path)
    # print("distance=", dist)
    result = sum(shortest_path_networkx_result) / len(
        shortest_path_networkx_result)
    print("networkx shortest path record:", result)

    start_time = time.perf_counter()
    path = graph.connected_components()
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("connected_components=", path)
    print("connected components record:", record)

    start_time = time.perf_counter()
    cc = nx.strongly_connected_components(G=g_nx)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("g_30000_240000_1 networkx result:")
    # print("strongly_connected_components(G=g)", cc)
    print("networkx connected components record:", record)

    connected_component_result = []
    start_time = time.perf_counter()
    cc = graph.connected_component(1)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("connected_component(1)", cc)
    connected_component_result.append(record)

    start_time = time.perf_counter()
    cc = graph.connected_component(2)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("connected_component(2)", cc)
    connected_component_result.append(record)

    start_time = time.perf_counter()
    cc = graph.connected_component(3)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("connected_component(3)", cc)
    connected_component_result.append(record)

    start_time = time.perf_counter()
    cc = graph.connected_component(4)
    end_time = time.perf_counter()
    record = end_time - start_time
    # print("connected_component(4)", cc)
    connected_component_result.append(record)

    start_time = time.perf_counter()
    cc = graph.connected_component(5)
    end_time = time.perf_counter()
    record = end_time - start_time
    connected_component_result.append(record)
    # print("connected_component(5)", cc)
    result = sum(connected_component_result) / len(connected_component_result)
    print("connected component record:", result)
コード例 #41
0
        costless_warehouse_id = None
        costless_warehouse_cost = INF
        for ii in range(len(warehouses_ids_list)):
            warehouse_id = warehouses_ids_list[ii]
            warehouse_supply = warehouse_supply_list[ii]
            # print(warehouse_id, " ", warehouse_supply)

            if warehouse_supply > 0:

                curr_path = nx.shortest_path(graph,
                                             source=truck_curr_node,
                                             target=warehouse_id,
                                             weight="cost")
                curr_path_length = nx.shortest_path_length(
                    graph,
                    source=truck_curr_node,
                    target=warehouse_id,
                    weight="cost")
                if _log_alg:  # log or not
                    save_path_algo.write(
                        "--- current W id: {}\n".format(warehouse_id))
                    save_path_algo.write(
                        "--- path to current W: {}\n".format(curr_path))
                    save_path_algo.write(
                        "--- path cost: {}\n\n".format(curr_path_length))
                if curr_path_length < costless_warehouse_cost:
                    costless_warehouse_cost = curr_path_length
                    costless_warehouse_id = warehouse_id
                    path_to_go = curr_path
                # save_path_algo.write("--- GOTO: costless warehouse to go: {}\n".format(costless_warehouse_id))
        if _log_alg:
コード例 #42
0
ファイル: dijkstra.py プロジェクト: integzz/notes-bio
from path_ring import make_ring_lattice


def shortest_path_dijkstra(G, source):
    """A fast version of Dijkstra's algorithm for equal edges."""
    new_dist = 0
    dist = {}
    nextlevel = {source}
    while nextlevel:
        thislevel = nextlevel
        nextlevel = set()
        for v in thislevel:
            if v not in dist:
                dist[v] = new_dist
                nextlevel.update(G[v])
        new_dist += 1
    return dist


lattice = make_ring_lattice(10, 4)
d1 = shortest_path_dijkstra(lattice, 0)
print(d1)
# {0: 0, 8: 1, 1: 1, 2: 1, 9: 1, 6: 2, 7: 2, 3: 2, 4: 2, 5: 3}
d2 = nx.shortest_path_length(lattice, 0)
print(d2)
# {0: 0, 8: 1, 1: 1, 2: 1, 9: 1, 6: 2, 7: 2, 3: 2, 4: 2, 5: 3}
# %timeit shortest_path_dijkstra(lattice, 0)
# 1.51 ms ± 9.93 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)
# %timeit nx.shortest_path_length(lattice, 0)
# 3.7 ms ± 49.2 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)
コード例 #43
0
def shortest_path(m, n):
    return nx.shortest_path_length(G, m, n)
コード例 #44
0
ファイル: epidemic.py プロジェクト: Shin4y/SEIRepidemics
    def tooFarAway(self, G, node):
        if nx.shortest_path_length(G, node, self.home) > 6:
            return True

        return False
コード例 #45
0
 def __targets_to_source_distances(self, graph, node_index):
     if self.edge_type == 'unweighted':
         __dicts = nx.shortest_path_length(graph, target=node_index)
     else:
         __dicts = nx.shortest_path_length(graph, target=node_index, weight=self.weight_name)
     return __dicts
コード例 #46
0
def path_length(G, u, v):
    return nx.shortest_path_length(G, v, u, weight='path_length')
コード例 #47
0
            elif text[i] == '|':
                current_node = start_node

            elif text[i] == '$':
                return current_node, i

        i += 1

    return current_node, i


# expr = '''^WNE$'''
# expr = '''^ENWWW(NEEE|SSE(EE|N))$'''
# expr = '''^ENNWSWW(NEWS|)SSSEEN(WNSE|)EE(SWEN|)NNN$'''
# expr = '''^ESSWWN(E|NNENN(EESS(WNSE|)SSS|WWWSSSSE(SW|NNNE)))$'''
expr = open('input.txt').read().strip()

start_node = (0, 0)
ROUTES.add_node(start_node)
end_node, _ = parse_tree(expr[1:], start_node)

import matplotlib.pyplot as plt

nx.draw_networkx(ROUTES, pos={n: n for n in ROUTES.nodes()})
plt.show()

shortest_paths = nx.shortest_path_length(ROUTES, start_node)
print('solution 1:', max(shortest_paths.values()))

print(len(list(filter(lambda k: shortest_paths[k] >= 1000, shortest_paths))))
コード例 #48
0
ファイル: dec15.py プロジェクト: mortenlj/advent_of_code
def part2(graph, lr):
    return networkx.shortest_path_length(graph, (0, 0), lr, "risk")
コード例 #49
0
G = nx.Graph()
for x in range(NUM):
    G.add_node(chr(ord('0') + x))

for x in range(NUM):
    paths = find_paths(chr(ord('0') + x), grid)
    for p in paths:
        G.add_edge(chr(ord('0') + x), p, weight=paths[p])

tsp = 9999999999999
for path in permutations(G.nodes()):
    cost = 0
    if path[0] != '0':
        continue
    for i in range(len(path) - 1):
        cost += nx.shortest_path_length(G, path[i], path[i + 1], 'weight')
    if cost < tsp:
        tsp = cost
        #print(tsp,path)
print('part1', tsp)

tsp = 9999999999999
for path in permutations(G.nodes()):
    cost = 0
    if path[0] != '0':
        continue
    path += ('0', )
    for i in range(len(path) - 1):
        cost += nx.shortest_path_length(G, path[i], path[i + 1], 'weight')
    if cost < tsp:
        tsp = cost
コード例 #50
0
ファイル: route_calculator.py プロジェクト: ianphil/pyRoute13
 def get_path_distance(self, source: str, target: str):
     return nx.shortest_path_length(self._G,
                                    source=source,
                                    target=target,
                                    weight="distance")
コード例 #51
0
ファイル: graph_test.py プロジェクト: Chai-Jin/MyCode
# if os.path.exists("test.gpickle.gz"):
#     start_time = time.time()
#     G = nx.read_gpickle("test.gpickle.gz")
#     print time.time() - start_time
# else:
w = 'score'
G = nx.Graph()
G.add_path([0, 1], weight=1, score=100)
G.add_path([1, 2], weight=2, score=200)
G.add_path([1, 3], weight=1, score=100)
G.add_path([2, 4], weight=3, score=300)
G.add_path([3, 5], weight=1, score=100)
G.add_path([5, 4], weight=1, score=100)
f = 2
t = 5
print nx.shortest_path_length(G, f, t, weight=w)
print[p for p in nx.all_shortest_paths(G, f, t, weight=w)]
#
# with open(tf_network_path) as nfh:
#     G = nx.DiGraph()
#     cnt = 0
#     for line in nfh:
#         (a, b, score, pvalue) = line.split("\t")
#         G.add_path([a.lower(), b.lower()])
#         cnt += 1
#         if cnt % 10000 == 0:
#             print cnt
#     shorts = [p for p in nx.all_shortest_paths(G, source=key_from, target=key_to)]
#     with open(tf_shorts_path, 'w') as ph:
#         json.dump(shorts, ph)
コード例 #52
0
    def comprehensive_test_tasklet():
        """Comprehensive test."""
        successes = 0

        try:
            yield 0

            g = nx.Graph()  # Construct a graph for the current topology.
            for c in sorted(all_cables,
                            key=lambda x:
                            (x.src.entity.name, x.dst.entity.name)):
                assert c.src, "cable {} has no source".format(c)
                assert c.dst, "cable {} has no destination".format(c)

                g.add_node(c.src.entity.name, entity=c.src.entity)
                g.add_node(c.dst.entity.name, entity=c.dst.entity)

                g.add_edge(c.src.entity.name,
                           c.dst.entity.name,
                           latency=c.latency)

            initial_wait = 5 + nx.diameter(g)
            api.simlog.info(
                "Waiting for at least %d seconds for initial routes to converge...",
                initial_wait)
            yield initial_wait * 1.1

            for round in itertools.count():
                api.simlog.info("=== Round %d ===", round + 1)
                num_actions = rand.randint(1, 3)
                for i in range(num_actions):
                    yield rand.random() * 2  # Wait 0 to 2 seconds.
                    action, u, v = pick_action(g, rand)
                    if action == "del":
                        api.simlog.info(
                            "\tAction %d/%d: remove link %s -- %s" %
                            (i + 1, num_actions, u, v))
                        g.remove_edge(u, v)
                        g.nodes[u]["entity"].unlinkTo(g.nodes[v]["entity"])
                    elif action == "add":
                        api.simlog.info("\tAction %d/%d: add link %s -- %s" %
                                        (i + 1, num_actions, u, v))
                        g.add_edge(u, v)
                        g.nodes[u]["entity"].linkTo(g.nodes[v]["entity"])
                    else:
                        assert False, "unknown action {}".format(action)

                # Wait for convergence.
                max_latency = nx.diameter(g) * 1.01
                yield max_latency

                # Send pair-wise pings.
                assert nx.is_connected(g), "BUG: network partition"
                expected = defaultdict(dict)  # dst -> src -> time
                deadline = defaultdict(dict)  # dst -> src -> time

                lengths = dict(nx.shortest_path_length(g))
                for s in sorted(all_hosts, key=lambda h: h.name):
                    for d in sorted(all_hosts, key=lambda h: h.name):
                        if s is d:
                            continue

                        s.ping(d, data=round)
                        latency = lengths[s.name][d.name]
                        deadline[d][s] = api.current_time() + latency
                        expected[d][s] = api.current_time() + latency * 1.01

                # Wait for ping to propagate.
                yield max_latency

                for dst in expected:
                    rxed = dst.rxed_pings
                    for src in set(expected[dst].keys()) | set(rxed.keys()):
                        if src not in rxed:
                            api.simlog.error(
                                "\tFAILED: Missing ping: %s -> %s", src, dst)
                            return

                        assert rxed[src]
                        rx_packets = [packet for packet, _ in rxed[src]]
                        if src not in expected[dst]:
                            api.simlog.error(
                                "\tFAILED: Extraneous ping(s): %s -> %s %s",
                                src, dst, rx_packets)
                            return

                        if len(rx_packets) > 1:
                            api.simlog.error(
                                "\tFAILED: Duplicate ping(s): %s -> %s %s",
                                src, dst, rx_packets)
                            return

                        rx_packet = rx_packets[0]
                        assert isinstance(rx_packet, Ping)
                        if rx_packet.data != round:
                            api.simlog.error(
                                "\tFAILED: Ping NOT from current round %d: %s -> %s %s",
                                round, src, dst, rx_packet)
                            return

                        _, actual_time = rxed[src][0]
                        late = actual_time - expected[dst][src]
                        if late > 0:
                            api.simlog.error(
                                "\tFAILED: Ping late by %g sec: %s -> %s %s",
                                actual_time - deadline[dst][src], src, dst,
                                rx_packet)
                            return

                    dst.reset()

                api.simlog.info("\tSUCCESS!")
                successes += 1
        except Exception as e:
            api.simlog.error("Exception occurred: %s" % e)
            traceback.print_exc()
        finally:
            sys.exit()
コード例 #53
0
def remove_Cluster(G,target_node,r):
    g = nx.ego_graph(G,target_node,radius = r, undirected = True, distance = "weight") # gets the subgraph within the radius

    node_list = list(g.nodes) # gets list of nodes around the supernode within the radius
    cities = [x for x in node_list if G.nodes[x]["is_city"] == True] # gets a list within the nodes that are cities
    target_list = [] # list of nodes that have 1 edge connecting to a node outside the radius
    keep_list = [] # list of nodes that we want to keep inside the radius
    remove_list = [] # list of nodes within the radius that will be removed
    check_list = [] # list of nodes inside the radius that have 2 edges connecting to 2 diff nodes outside the radius
    check_list2 = []
    
    keep_list = keep_list + cities # want to keep cities
    if target_node not in keep_list:
        keep_list.append(target_node)# adds center node to the keep list
    check_nodes = [x for x in node_list if x not in keep_list] # gets nodes that are not city nodes or the target node
    for i in check_nodes: # loops through list of nodes to check
        e = list(G.edges(i))
        num_outside_nodes = len([x[1] for x in e if x[1] not in node_list])    
        if num_outside_nodes > 2:
            keep_list.append(i)
        elif num_outside_nodes == 2:
            check_list.append(i)
        elif num_outside_nodes == 1:
            target_list.append(i)
        elif num_outside_nodes == 0:
            remove_list.append(i) 
        
    for i in check_list:
        outside_nodes = [x for x in (list(sum(list(G.edges(i)), ()))) if x not in node_list] # returns the nodes connected to the specific node that are not in the list of nodes within the radius of the target node
        path = nx.shortest_path(G, source = outside_nodes[0] , target = outside_nodes[1], weight = "weight") # finds the shortest path between the two nodes outside the radius
        if set(path) != set([outside_nodes[0],outside_nodes[1],i]): #checks to see if the shortest path between the two outside nodes are the 2 and the specific node
            for n in outside_nodes: # loops through each outside node
                path_to_center = nx.shortest_path(G, source = i, target = target_node, weight = "weight") # finds the shortest path between the two nodes outside the radius
                closest_keep = [p for p in path_to_center if p in keep_list] # gets the list of nodes in the pathway that does                
                for j in closest_keep:
                    if not(G.has_edge(j,n)):
                        new_dist = G[i][n]["weight"] + nx.shortest_path_length(G, source = i, target = j, weight = "weight") # finds the shortest path between the two nodes outside the radius
                        G.add_edge(n,j,weight = new_dist, gmv = 0)
                        break
        else:
            keep_list.append(i)
            check_list2.append(i)
            
    for i in target_list:
        node = [x for x in (list(sum(list(G.edges(i)), ()))) if x not in node_list] # returns the nodes connected to the specific node that are not in the list of nodes within the radius of the target node
        n = node[0]
        path_to_center = nx.shortest_path(G, source = i, target = target_node, weight = "weight")
        closest_keep = [p for p in path_to_center if p in keep_list] # gets the list of nodes in the pathway that does
        for j in closest_keep:
            if not(G.has_edge(j,n)):
                new_dist = G[i][n]["weight"] + nx.shortest_path_length(G, source = i, target = j, weight = "weight") # finds the shortest path between the two nodes outside the radius
                G.add_edge(n,j,weight = new_dist, gmv = 0)
                break
        
    for i in keep_list: # for nodes inside the radius but not directly connected to the center node, ensures there is a path to it
        path = nx.shortest_path(G, i, target_node, weight = "weight")
        if not(all(item in keep_list for item in path)):
            length = nx.shortest_path_length(G, i , target_node, weight = "weight")
            G.add_edge(i,target_node, weight = length, gmv = 0)
        
    for i in check_list2:
        check_list.remove(i)
    
    G.remove_nodes_from(target_list)
    G.remove_nodes_from(remove_list)     
    G.remove_nodes_from(check_list)      
コード例 #54
0
def kamada_kawai_layout(G,
                        dist=None,
                        pos=None,
                        weight='weight',
                        scale=1,
                        center=None,
                        dim=2):
    """Position nodes using Kamada-Kawai path-length cost-function.

    Parameters
    ----------
    G : NetworkX graph or list of nodes
        A position will be assigned to every node in G.

    dist : float (default=None)
        A two-level dictionary of optimal distances between nodes,
        indexed by source and destination node.
        If None, the distance is computed using shortest_path_length().

    pos : dict or None  optional (default=None)
        Initial positions for nodes as a dictionary with node as keys
        and values as a coordinate list or tuple.  If None, then use
        circular_layout() for dim >= 2 and a linear layout for dim == 1.

    weight : string or None   optional (default='weight')
        The edge attribute that holds the numerical value used for
        the edge weight.  If None, then all edge weights are 1.

    scale : number (default: 1)
        Scale factor for positions.

    center : array-like or None
        Coordinate pair around which to center the layout.

    dim : int
        Dimension of layout.

    Returns
    -------
    pos : dict
        A dictionary of positions keyed by node

    Examples
    --------
    >>> G = nx.path_graph(4)
    >>> pos = nx.kamada_kawai_layout(G)
    """
    import numpy as np

    G, center = _process_params(G, center, dim)
    nNodes = len(G)

    if dist is None:
        dist = dict(nx.shortest_path_length(G, weight=weight))
    dist_mtx = 1e6 * np.ones((nNodes, nNodes))
    for row, nr in enumerate(G):
        if nr not in dist:
            continue
        rdist = dist[nr]
        for col, nc in enumerate(G):
            if nc not in rdist:
                continue
            dist_mtx[row][col] = rdist[nc]

    if pos is None:
        if dim >= 2:
            pos = circular_layout(G, dim=dim)
        else:
            pos = {n: pt for n, pt in zip(G, np.linspace(0, 1, len(G)))}
    pos_arr = np.array([pos[n] for n in G])

    pos = _kamada_kawai_solve(dist_mtx, pos_arr, dim)

    pos = rescale_layout(pos, scale=scale) + center
    return dict(zip(G, pos))
コード例 #55
0
for i in range(N_RA):
    edges = [(i, t) for t in super_synapses[i]]

    DG.add_edges_from(edges)

distances = np.zeros(N_RA, np.int32)

#distances = np.empty(N_RA, np.int32)
#distances.fill(np.nan)

for i in range(N_RA):
    if i not in training_neurons:
        smallest_distance = 10000
        for j in training_neurons:
            try:
                d = nx.shortest_path_length(DG, source=j, target=i)
                if d < smallest_distance:
                    smallest_distance = d
            except nx.NetworkXNoPath:
                continue

        if smallest_distance == 10000:
            print "No path to training neurons was found for neuron", i
        else:
            distances[i] = smallest_distance

indsorted = np.argsort(first_spike_times[(mature_indicators > 0)
                                         & (first_spike_times > 0)])

plt.figure()
plt.plot(
コード例 #56
0
def FDT2CDG(cfg: nx.DiGraph, if_id, node_infos):
    leafs = []
    control_dep_edge = {}
    cdg_edges = []

    idoms = nx.algorithms.immediate_dominators(cfg, "0")
    print("后向支配关系:", idoms)

    # 查询当前cfg所有叶子节点
    for cfg_node in cfg.nodes:
        if cfg.out_degree(cfg_node) == 0:  # 叶子节点列表
            leafs.append(cfg_node)

    cfg.add_node("EXIT_POINT", label="EXIT_POINT")
    for leaf_node in leafs:
        cfg.add_edge(leaf_node, "EXIT_POINT")

    get_graph_png(cfg, "cfg")

    reverse_cfg = cfg.reverse()

    get_graph_png(reverse_cfg, "reverse")

    ifdoms = nx.algorithms.immediate_dominators(reverse_cfg, "EXIT_POINT")
    del ifdoms["EXIT_POINT"]
    print("前向支配关系:", ifdoms)

    # FDT
    fdt = nx.DiGraph(name=cfg.graph["name"])
    fdt.add_nodes_from(reverse_cfg.nodes)
    for s in ifdoms:
        fdt.add_edge(ifdoms[s], s)

    get_graph_png(fdt, "fdt")

    # CDG = CFG + FDT 计算 条件语句相关控制依赖
    for id in if_id:
        ifdom = ifdoms[str(id)]
        print("ifdom of {} is {}".format(id, ifdom))
        cfg_paths = nx.all_simple_paths(cfg,
                                        source=str(id),
                                        target="EXIT_POINT")

        # Y is control dependent on X ⇔ there is a path in the CFG from X to Y that doesn’t contain the immediate
        # forward dominator of X
        for path in list(cfg_paths):
            for node in path[1:-1]:
                node_info = node_infos[node]
                if node_info.type != NodeType.ENDIF and node_info.type != NodeType.ENDLOOP:
                    if node != ifdom:
                        key = "{}-{}".format(node, str(id))
                        if key not in control_dep_edge:
                            control_dep_edge[key] = 1
                            length = nx.shortest_path_length(
                                cfg, str(id), node)
                            cdg_edges.append({
                                'from': node,
                                "to": str(id),
                                'color': 'red',
                                'distance': length
                            })
                            print("{} 控制依赖于 {}, 距离是: {}".format(
                                node, id, length))
                    else:
                        break

    control_dep_edge.clear()
    for cdg_edge in cdg_edges:
        from_node = cdg_edge["from"]
        to_node = cdg_edge["to"]
        distance = cdg_edge["distance"]
        if from_node not in control_dep_edge:
            control_dep_edge[from_node] = {"to": to_node, "distance": distance}
        else:
            old_distance = control_dep_edge[from_node]["distance"]
            if old_distance > distance:
                control_dep_edge[from_node] = {
                    "to": to_node,
                    "distance": distance
                }

    for from_node in control_dep_edge:
        cfg.add_edge(from_node, control_dep_edge[from_node]["to"], color="red")

    get_graph_png(cfg, "cdg")
コード例 #57
0
ファイル: d13p1.py プロジェクト: gergely-elias/advent_of_code
input_lines = list(fileinput.input())

designer_number = int(input_lines[0].strip())


def wall(x, y):
    return (bin(x * x + 3 * x + 2 * x * y + y + y * y +
                designer_number)[2:].count("1") % 2 == 1)


source = (1, 1)
target = (31, 39)

maze = networkx.Graph()
diagonal = 0
distance = float("inf")
while 2 * diagonal - sum(source) - sum(target) < distance:
    for x in range(0, diagonal + 1):
        y = diagonal - x
        if not wall(x, y):
            maze.add_node((x, y))
            if (x - 1, y) in maze.nodes():
                maze.add_edge((x, y), (x - 1, y))
            if (x, y - 1) in maze.nodes():
                maze.add_edge((x, y), (x, y - 1))
    if diagonal >= sum(target) and networkx.has_path(maze, source, target):
        distance = min(networkx.shortest_path_length(maze, source, target),
                       distance)
    diagonal += 1
print(distance)
コード例 #58
0
 def get_shortest_path_dict(self):
     ''' returns the dict od dict of shortest path lengths, if it does not exist, it creates it'''
     if self.shortest_path_dict is None:
         self.shortest_path_dict = nx.shortest_path_length(self)
     return self.shortest_path_dict
コード例 #59
0

def get_nodes_from_db(graph):
    graph = connect_graph()
    matcher = NodeMatcher(graph)
    nodes = list(matcher.match('COORDINATE'))

    nodes_data = []
    for node in nodes:
        node_json = {'longitude': node['longitude'], 'latitude': node['latitude'], 'pk': node['pk']}
        nodes_data.append(node_json)

    return nodes_data

if __name__ == '__main__':
    neo4j_graph = connect_graph()
    G = nx.MultiDiGraph()
    add_nodes(G, get_nodes_from_db(neo4j_graph))
    add_rels(G, get_rels_from_db(neo4j_graph))

    site_pairs = get_sites()
    for site_pair in site_pairs:
        node_id_from = site_pair['node_id_from']
        node_id_to = site_pair['node_id_to']
        try:
            path = nx.shortest_path_length(G, node_id_from, node_id_to, weight='time')
            print(node_id_from, node_id_to)
        except Exception as e:
            print(e)
            continue
コード例 #60
0
maxX = len(rows[0])
maxY = len(rows)
goals = dict()
distances = dict()
G = networkx.generators.classic.grid_2d_graph(maxY, maxX)

for y, row in enumerate(rows):
    for x, pos in enumerate(row):
        if pos == "#":
            G.remove_node((y,x))
        if pos.isdigit():
            goals[int(pos)] = (y,x)

for y in range(8):
    for x in range(8):
        distances[y,x] = networkx.shortest_path_length(G, goals[y], goals[x])
        distances[x,y] = distances[y,x]

shortest = -1
for path in permutations(range(1,8)):
    l = [0] + list(path)
    pathDist = 0
    for i in range(len(l)-1):
        pathDist += distances[l[i+1], l[i]]
    if shortest > 0:
        shortest = min(pathDist, shortest)
    else:
        shortest = pathDist

print "Part one: " + str(shortest)