コード例 #1
0
ファイル: flownetworks.py プロジェクト: makslevental/clrs
def edmondskarp(G: nx.Graph, s, t):

    RG = G.copy()

    for u,v in G.edges_iter():
        G.edge[u][v]['flow'] = 0

    path = isthereapath(RG,s,t)
    while len(path) > 0:
        path_cp = min([RG.edge[u][v]['capacity'] for u,v in path])
        for u,v in path:
            if G.has_edge(u,v):
                G.edge[u][v]['flow'] += path_cp
                RG.edge[u][v]['capacity'] -= path_cp
                if RG.edge[u][v]['capacity'] == 0:
                    RG.remove_edge(u,v)

                if RG.has_edge(v,u):
                    RG.edge[v][u]['capacity'] += path_cp
                else:
                    RG.add_edge(v,u,capacity=path_cp)
            else:
                # then this edge is a "cancelling" flow
                # residue should go up and cancelling "capacity" should go down
                G.edge[v][u]['flow'] -= path_cp
                RG.edge[v][u]['capacity'] += path_cp
                RG.edge[u][v]['capacity'] -= path_cp
                if RG.edge[u][v]['capacity'] == 0:
                    RG.remove_edge(u,v)
        path = isthereapath(RG,s,t)

    return RG
コード例 #2
0
def test_algorithms(algorithms, graph: nx.Graph):
    print()
    print("Testing graph with {0} nodes and {1} edges".format(graph.number_of_nodes(), graph.number_of_edges()))
    results = []
    for algorithm, name in algorithms:
        # make a copy of the graph in case the algorithm mutates it
        graph_copy = graph.copy()
        start_time = time.time()
        result = len(algorithm.get_fbvs(graph_copy))
        print("{0}: {1}, time: {2}".format(name, result, time.time() - start_time))
        results.append(result)
    assert results.count(results[0]) == len(results), "The algorithms's results are not the same!"
コード例 #3
0
ファイル: allpairs.py プロジェクト: makslevental/clrs
def johnson(G: nx.Graph):

    Gp = G.copy()

    Gp.add_node(-1)
    Gp.add_edges_from([(-1,i,{'weight':0}) for i in range(len(G.node))])
    D = bellmanford(Gp,-1)
    DD = {}
    if D:
        h = len(Gp.node)*[None]
        for v,d in Gp.nodes_iter(data=True):
            h[v] = d['distance']

        for u,v in Gp.edges_iter():
            Gp.edge[u][v]['weight'] += (h[u] - h[v])

        for u in G.node:
            dijkstra(Gp,u)
            for v,d in Gp.nodes_iter(data=True):
                DD[(u,v)] = d['distance'] + h[v] - h[u]

    return DD
コード例 #4
0
from networkx import Graph
entries = Graph()
#entries.add_node('@davidmartinb')
#entries.add_node('@emartinborregon')
entries.add_edge('@davidmartinb','@emartinborregon')
#entries.add

entries2 = entries.copy()
#entries.add_node('@davidmartinb')
#entries.add_node('@emartinborregon')
entries2.add_edge('@davidmartinb','@test')
entries.add_edge('@davidmartinb','@emartinborregon')
#entries.add
print set(entries2.nodes())-set(entries.nodes())
edges1=entries.edges()
entries2.remove_edges_from( edges1 )
print entries2.edges()
コード例 #5
0
def nX_consolidate_parallel(networkX_graph: nx.Graph,
                            buffer_dist: float = 14) -> nx.Graph:
    if not isinstance(networkX_graph, nx.Graph):
        raise TypeError('This method requires an undirected networkX graph.')

    logger.info(f'Consolidating network by parallel edges.')
    g_copy = networkX_graph.copy()

    # create an STRtree
    tree = _create_strtree(networkX_graph)

    # setup template for new node names
    n_n_template = uuid.uuid4().hex.upper()[0:3]
    n_n_count = 0

    # keep track of removed nodes
    removed_nodes = set()

    # keep track of manually split node locations for post-processing
    merge_pairs = []

    # iterate origin graph
    for n, n_d in tqdm(networkX_graph.nodes(data=True),
                       disable=checks.quiet_mode):
        # skip if already consolidated from an adjacent node
        if n in removed_nodes:
            continue
        # get all other nodes within buffer distance
        js = tree.query(geometry.Point(n_d['x'], n_d['y']).buffer(buffer_dist))
        # if only self-node, then continue
        if len(js) <= 1:
            continue

        # new parent node name - only used if match found
        parent_node_name = None
        # keep track of the uids to be consolidated
        node_group = set()
        # delay removals until after each iteration of loop to avoid in-place modification errors
        removals = []

        # iterate each node's neighbours
        # check if any of the neighbour's neighbours are within the buffer distance of other direct neighbours
        # if so, parallel set of edges may have been found
        nbs = list(nx.neighbors(g_copy, n))
        for j_point in js:
            j = j_point.uid
            # ignore self-node
            if j == n:
                continue
            # only review if not already in the removed nodes,
            if j in removed_nodes:
                continue
            # matching can happen in one of several situations, so use a flag
            matched = False
            # cross check n's neighbours against j's neighbours
            # if they have respective neighbours within buffer dist of each other, then merge
            j_nbs = list(nx.neighbors(g_copy, j))
            for n_nb in nbs:
                # skip this neighbour if already removed
                if n_nb in removed_nodes:
                    continue
                # if j is a direct neighbour to n, then ignore
                if n_nb == j:
                    continue
                # get the n node neighbour and create a point
                n_nb_point = geometry.Point(g_copy.nodes[n_nb]['x'],
                                            g_copy.nodes[n_nb]['y'])
                # compare against j node neighbours
                for j_nb in j_nbs:
                    # skip this neighbour if already removed
                    if j_nb in removed_nodes:
                        continue
                    # don't match against origin node
                    if j_nb == n:
                        continue
                    # if the respective neighbours are the same node, then match
                    if n_nb == j_nb:
                        matched = True
                        break
                    # otherwise, get the j node neighbour and create a point
                    j_nb_point = geometry.Point(g_copy.nodes[j_nb]['x'],
                                                g_copy.nodes[j_nb]['y'])
                    # check whether the neighbours are within the buffer distance of each other
                    if n_nb_point.distance(j_nb_point) < buffer_dist:
                        matched = True
                        break
                    # if not, then check along length of lines
                    # this is necessary for situations where certain lines are broken by other links
                    # i.e. where nodes are out of lock-step
                    # check first for j_nb point against n - n_nb line geom
                    response = _find_parallel(g_copy, n, n_nb, j_nb,
                                              buffer_dist)
                    if response is not None:
                        removal_pair, merge_pair = response
                        removals.append(removal_pair)
                        merge_pairs.append(merge_pair)
                        matched = True
                        break
                    # similarly check for n_nb point against j - j_nb line geom
                    response = _find_parallel(g_copy, j, j_nb, n_nb,
                                              buffer_dist)
                    if response is not None:
                        removal_pair, merge_pair = response
                        removals.append(removal_pair)
                        merge_pairs.append(merge_pair)
                        matched = True
                        break

                # break out if match found
                if matched:
                    break

            # if successful match, go ahead and add a new parent node, and merge n and j
            if matched:
                if parent_node_name is None:
                    parent_node_name = f'{n_n_template}_{n_n_count}'
                    n_n_count += 1
                node_group.update([n, j])
                removed_nodes.update([n, j])

        for s, e in removals:
            # in some cases, the edge may not exist anymore
            if (s, e) in g_copy.edges():
                g_copy.remove_edge(s, e)

        if not node_group:
            continue

        g_copy = _dissolve_adjacent(g_copy, parent_node_name, node_group)

    for pair in merge_pairs:
        # in some cases one of the pair of nodes may not exist anymore
        if pair[0] in g_copy and pair[1] in g_copy:
            parent_node_name = f'{n_n_template}_{n_n_count}'
            n_n_count += 1
            g_copy = _dissolve_adjacent(g_copy, parent_node_name, pair)

    return g_copy