Beispiel #1
0
    def test_others(self):
        (P, D) = nx.bellman_ford(self.XG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)
        (P, D) = nx.goldberg_radzik(self.XG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)

        G = nx.path_graph(4)
        assert_equal(nx.bellman_ford(G, 0),
                     ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        assert_equal(nx.goldberg_radzik(G, 0),
                     ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        assert_equal(nx.bellman_ford(G, 3),
                     ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))
        assert_equal(nx.goldberg_radzik(G, 3),
                     ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))

        G = nx.grid_2d_graph(2, 2)
        pred, dist = nx.bellman_ford(G, (0, 0))
        assert_equal(sorted(pred.items()),
                     [((0, 0), None), ((0, 1), (0, 0)),
                      ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()),
                     [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
        pred, dist = nx.goldberg_radzik(G, (0, 0))
        assert_equal(sorted(pred.items()),
                     [((0, 0), None), ((0, 1), (0, 0)),
                      ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()),
                     [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
Beispiel #2
0
    def test_others(self):
        (P, D) = nx.bellman_ford(self.XG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)
        (P, D) = nx.goldberg_radzik(self.XG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)

        G = nx.path_graph(4)
        assert_equal(nx.bellman_ford(G, 0),
                     ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        assert_equal(nx.goldberg_radzik(G, 0),
                     ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        assert_equal(nx.bellman_ford(G, 3),
                     ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))
        assert_equal(nx.goldberg_radzik(G, 3),
                     ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))

        G = nx.grid_2d_graph(2, 2)
        pred, dist = nx.bellman_ford(G, (0, 0))
        assert_equal(sorted(pred.items()),
                     [((0, 0), None), ((0, 1), (0, 0)),
                      ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()),
                     [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
        pred, dist = nx.goldberg_radzik(G, (0, 0))
        assert_equal(sorted(pred.items()),
                     [((0, 0), None), ((0, 1), (0, 0)),
                      ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()),
                     [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
Beispiel #3
0
    def test_bellman_ford(self):
        # single node graph
        G = nx.DiGraph()
        G.add_node(0)
        assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))
        assert_raises(KeyError, nx.bellman_ford, G, 1)

        # negative weight cycle
        G = nx.cycle_graph(5, create_using=nx.DiGraph())
        G.add_edge(1, 2, weight=-7)
        for i in range(5):
            assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
        G = nx.cycle_graph(5)  # undirected Graph
        G.add_edge(1, 2, weight=-3)
        for i in range(5):
            assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
        # no negative cycle but negative weight
        G = nx.cycle_graph(5, create_using=nx.DiGraph())
        G.add_edge(1, 2, weight=-3)
        assert_equal(nx.bellman_ford(G, 0), ({0: None, 1: 0, 2: 1, 3: 2, 4: 3}, {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}))

        # not connected
        G = nx.complete_graph(6)
        G.add_edge(10, 11)
        G.add_edge(10, 12)
        assert_equal(
            nx.bellman_ford(G, 0), ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})
        )

        # not connected, with a component not containing the source that
        # contains a negative cost cycle.
        G = nx.complete_graph(6)
        G.add_edges_from([("A", "B", {"load": 3}), ("B", "C", {"load": -10}), ("C", "A", {"load": 2})])
        assert_equal(
            nx.bellman_ford(G, 0, weight="load"),
            ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}),
        )

        # multigraph
        P, D = nx.bellman_ford(self.MXG, "s")
        assert_equal(P["v"], "u")
        assert_equal(D["v"], 9)
        P, D = nx.bellman_ford(self.MXG4, 0)
        assert_equal(P[2], 1)
        assert_equal(D[2], 4)

        # other tests
        (P, D) = nx.bellman_ford(self.XG, "s")
        assert_equal(P["v"], "u")
        assert_equal(D["v"], 9)

        G = nx.path_graph(4)
        assert_equal(nx.bellman_ford(G, 0), ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        assert_equal(nx.bellman_ford(G, 3), ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))

        G = nx.grid_2d_graph(2, 2)
        pred, dist = nx.bellman_ford(G, (0, 0))
        assert_equal(sorted(pred.items()), [((0, 0), None), ((0, 1), (0, 0)), ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()), [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
Beispiel #4
0
 def test_multigraph(self):
     P, D = nx.bellman_ford(self.MXG, 's')
     assert_equal(P['v'], 'u')
     assert_equal(D['v'], 9)
     P, D = nx.goldberg_radzik(self.MXG, 's')
     assert_equal(P['v'], 'u')
     assert_equal(D['v'], 9)
     P, D = nx.bellman_ford(self.MXG4, 0)
     assert_equal(P[2], 1)
     assert_equal(D[2], 4)
     P, D = nx.goldberg_radzik(self.MXG4, 0)
     assert_equal(P[2], 1)
     assert_equal(D[2], 4)
 def test_multigraph(self):
     P, D = nx.bellman_ford(self.MXG, 's')
     assert_equal(P['v'], 'u')
     assert_equal(D['v'], 9)
     P, D = nx.goldberg_radzik(self.MXG, 's')
     assert_equal(P['v'], 'u')
     assert_equal(D['v'], 9)
     P, D = nx.bellman_ford(self.MXG4, 0)
     assert_equal(P[2], 1)
     assert_equal(D[2], 4)
     P, D = nx.goldberg_radzik(self.MXG4, 0)
     assert_equal(P[2], 1)
     assert_equal(D[2], 4)
Beispiel #6
0
 def test_single_node_graph(self):
     G = nx.DiGraph()
     G.add_node(0)
     assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))
     assert_equal(nx.goldberg_radzik(G, 0), ({0: None}, {0: 0}))
     assert_raises(KeyError, nx.bellman_ford, G, 1)
     assert_raises(KeyError, nx.goldberg_radzik, G, 1)
Beispiel #7
0
def sssp_test(impl, G, source):
    pred, expected_dists = nx.bellman_ford(G, source, weight="weight")
    actual = impl(G, source)
    actual_without_unreachable = dict(
        filter(lambda (k, v): v != float("Inf") and v != sys.maxint,
               actual.iteritems()))
    return actual_without_unreachable, expected_dists
Beispiel #8
0
def get_skip_lengths(system):
    """
    :param system: a system
    :return: length of skip connection for each projection in the system, i.e. the length
        of the longest path between the nodes
    """
    import networkx as nx

    g = system.make_graph()
    for u, v, d in g.edges(data=True):
        d['weight'] = -1

    lengths = {}
    for pop in system.populations:
        lengths[pop.name] = nx.bellman_ford(g, pop.name)

    result = []
    for p in system.projections:
        result.append(-lengths[p.origin.name][1][p.termination.name])

    # print some additional information ...
    print('longest skip connection: {}'.format(max(result)))
    for i in range(len(system.projections)):
        if result[i] == max(result):
            print([
                system.projections[i].origin.name,
                system.projections[i].termination.name
            ])
    print('longest path: {}'.format(nx.dag_longest_path_length(g)))
    print(nx.dag_longest_path(g))

    return result
Beispiel #9
0
def SubSolve(G,pi):
    #print "pi=",pi
    G.edge[1][2]["cost"]-=pi[1]
    G.edge[1][3]["cost"]-=pi[1]
    G.edge[1]['t']["cost"]-=pi[1]
    G.edge[2][3]["cost"]-=pi[2]
    G.edge[2]['t']["cost"]-=pi[2]
    G.edge[3]['t']["cost"]-=pi[3]

    #print G.edges(data=True)
    
    pred, dist = nx.bellman_ford(G,'s',weight="cost")
    new_chemin=[]
    chemin=['t']
    predecesseur = pred['t']
    chemin.append(predecesseur)
    k=predecesseur
    while predecesseur != 's':
        predecesseur = pred[k]
        chemin.append(predecesseur)
        k=predecesseur

    chemin.reverse()

    G.edge[1][2]["cost"]+=pi[1]
    G.edge[1][3]["cost"]+=pi[1]
    G.edge[1]['t']["cost"]+=pi[1]
    G.edge[2][3]["cost"]+=pi[2]
    G.edge[2]['t']["cost"]+=pi[2]
    G.edge[3]['t']["cost"]+=pi[3]
    
    L=2*dist['t']+sum(pi[i] for i in [1,2,3])
    #print "chemin optimal=",chemin,"de cout",dist['t']
    print "L=",L
    return L,chemin
 def test_single_node_graph(self):
     G = nx.DiGraph()
     G.add_node(0)
     assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))
     assert_equal(nx.goldberg_radzik(G, 0), ({0: None}, {0: 0}))
     assert_raises(KeyError, nx.bellman_ford, G, 1)
     assert_raises(KeyError, nx.goldberg_radzik, G, 1)
Beispiel #11
0
def algorithm_ford_bellman(graph):
    graph.add_edge(1, 2, weight=1)
    graph.add_edge(1, 3, weight=2)
    graph.add_edge(1, 9, weight=8)
    graph.add_edge(1, 10, weight=5)
    graph.add_edge(2, 4, weight=4)
    graph.add_edge(2, 3, weight=3)
    graph.add_edge(3, 4, weight=6)
    graph.add_edge(4, 14, weight=4)
    graph.add_edge(4, 5, weight=5)
    graph.add_edge(5, 14, weight=3)
    graph.add_edge(5, 7, weight=1)
    graph.add_edge(5, 6, weight=8)
    graph.add_edge(7, 14, weight=2)
    graph.add_edge(6, 8, weight=3)
    graph.add_edge(6, 3, weight=7)
    graph.add_edge(6, 9, weight=7)
    graph.add_edge(7, 15, weight=6)
    graph.add_edge(8, 15, weight=4)
    graph.add_edge(8, 13, weight=3)
    graph.add_edge(8, 11, weight=9)
    graph.add_edge(9, 13, weight=8)
    graph.add_edge(9, 10, weight=4)
    graph.add_edge(10, 11, weight=3)
    graph.add_edge(11, 12, weight=6)
    graph.add_edge(12, 13, weight=5)
    graph.add_edge(12, 15, weight=1)
    graph.add_edge(13, 15, weight=2)
    result = dict(sorted(nx.bellman_ford(graph, 12)[1].items()))
    for k, v in result.items():
        print('Point ' + str(k) + ":\t" + str(v))
Beispiel #12
0
def main():
    dagLength = int(input())
    while dagLength != 0:
        dag = []
        for i in range(dagLength):
            content = input()
            node = []
            node = content.split()
            if len(node) != 0:
                node = map(int, node)
            dag += [node]
        temp = [x for x in dag if x != []]
        if len(temp) == 0:
            print(0)
        elif dag[0] == []:
            print(0)
        else:
            G = nx.DiGraph()
            for i in range(len(dag) - 1):
                for j in dag[i]:
                    G.add_edge(i, j)
            for n in G:
                for nbr in G[n]:
                    G[n][nbr]['weight'] = -1
            pred, dist = nx.bellman_ford(G, 0)
            print(-dist)
        dagLength = int(input())
Beispiel #13
0
    def getTree(self, node, reverse=False):
        """
        Get
        :param node:    A start node
        :param reverse: Should the graph be reversed (upstream search)
        :return:        A list of edges
        """
        if self.dirty:
            self.createGraph()

        if reverse:
            my_graph = self.graph.reverse()
        else:
            my_graph = self.graph

        # Returns pred, weight
        pred, _ = nx.bellman_ford(my_graph, node)
        edges = [(v, u, my_graph[v][u]) for (u, v) in pred.items()
                 if v is not None]
        nodes = [
            my_graph.node[n] for n in set(pred.keys() + pred.values())
            if n is not None
        ]

        return nodes, edges
def peptideSequencing(spectralVector, proteins=None):
    
    if proteins is None:
        proteins = proteinMass
    graph = nx.DiGraph()
    maxIndex = len(spectralVector)
    graph.add_nodes_from(xrange(maxIndex))

    for idx in xrange(maxIndex):
        # Ignore nodes with no incoming edges except the 1st one.
        if idx > 0 and len(graph.in_edges(idx)) == 0:
            continue
        
        for p, mass in proteins.iteritems():
            if idx + mass < len(spectralVector):
                try:
                    graph.add_edge(idx, idx+mass,{'amino': p,
                                              'weight': -1 * spectralVector[idx+mass]})
                except IndexError as e:
                    pass
    
    pred, dist = nx.bellman_ford(graph, 0)
    proteinLookup = {v:k for k,v in proteins.iteritems()}    
    idx = len(spectralVector)-1
    path = []
    while idx > 0:
        path.append(proteinLookup[idx-pred[idx]])
        idx = pred[idx]
    return ''.join(path[::-1])
Beispiel #15
0
    def test_bellman_ford(self):
        # single node graph
        G = nx.DiGraph()
        G.add_node(0)
        assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))

        # negative weight cycle
        G = nx.cycle_graph(5, create_using = nx.DiGraph())
        G.add_edge(1, 2, weight = -7)
        assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 0)
        G = nx.cycle_graph(5)
        G.add_edge(1, 2, weight = -7)
        assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 0)

        # not connected
        G = nx.complete_graph(6)
        G.add_edge(10, 11)
        G.add_edge(10, 12)
        assert_equal(nx.bellman_ford(G, 0),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))

        # not connected, with a component not containing the source that
        # contains a negative cost cycle.
        G = nx.complete_graph(6)
        G.add_edges_from([('A', 'B', {'load': 3}),
                          ('B', 'C', {'load': -10}),
                          ('C', 'A', {'load': 2})])
        assert_equal(nx.bellman_ford(G, 0, weight = 'load'),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))

        # multigraph
        P, D = nx.bellman_ford(self.MXG,'s')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)
        P, D = nx.bellman_ford(self.MXG4, 0)
        assert_equal(P[2], 1)
        assert_equal(D[2], 4)

        # other tests
        (P,D)= nx.bellman_ford(self.XG,'s')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)

        G=nx.path_graph(4)
        assert_equal(nx.bellman_ford(G,0),
                     ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
        G=nx.grid_2d_graph(2,2)
        pred,dist=nx.bellman_ford(G,(0,0))
        assert_equal(sorted(pred.items()),
                     [((0, 0), None), ((0, 1), (0, 0)), 
                      ((1, 0), (0, 0)), ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()),
                     [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
Beispiel #16
0
 def compute_paths(self):
     self.shortest_paths = list()
     for src in xrange(self.parameters.nodes_in):
         try:
             (pred, dist) = nx.bellman_ford(self, src)
         except:
             continue
         for tar in xrange(self.parameters.nodes_end_middle,\
                 self.parameters.nodes_total):
             if dist.has_key(tar):
                 self.shortest_paths.append(dist[tar])
Beispiel #17
0
    def test_not_connected(self):
        G = nx.complete_graph(6)
        G.add_edge(10, 11)
        G.add_edge(10, 12)
        assert_equal(nx.bellman_ford(G, 0),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
        assert_equal(nx.goldberg_radzik(G, 0),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))

        # not connected, with a component not containing the source that
        # contains a negative cost cycle.
        G = nx.complete_graph(6)
        G.add_edges_from([('A', 'B', {'load': 3}),
                          ('B', 'C', {'load': -10}),
                          ('C', 'A', {'load': 2})])
        assert_equal(nx.bellman_ford(G, 0, weight='load'),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
        assert_equal(nx.goldberg_radzik(G, 0, weight='load'),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
Beispiel #18
0
 def getTree(self,node,reverse=False):
     if self.dirty:
         self.createGraph()
     
     if reverse:
         myGraph = self.graph.reverse()
     else:
         myGraph = self.graph
         
     # Returns pred, weight
     pred, _ = nx.bellman_ford(myGraph, node)
     edges = [(v,u,myGraph[v][u]) for (u,v) in pred.items() if v is not None]
     
     return edges
def prog_20(fname):
    graph = nx.DiGraph()
    f = open(fname)
    value, num = map(int, f.readline().strip().split())
    for line in f:
        e1,e2,weight = map(int, line.strip().split())
        graph.add_weighted_edges_from([(e1,e2,weight)])
    graph.add_nodes_from(xrange(1,value+1))
    f.close()

    pred, dist = nx.bellman_ford(graph,1)

    for i in xrange(1,value+1):
        print dist.get(i, 'x'),
Beispiel #20
0
    def test_not_connected(self):
        G = nx.complete_graph(6)
        G.add_edge(10, 11)
        G.add_edge(10, 12)
        assert_equal(nx.bellman_ford(G, 0),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
        assert_equal(nx.goldberg_radzik(G, 0),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))

        # not connected, with a component not containing the source that
        # contains a negative cost cycle.
        G = nx.complete_graph(6)
        G.add_edges_from([('A', 'B', {'load': 3}),
                          ('B', 'C', {'load': -10}),
                          ('C', 'A', {'load': 2})])
        assert_equal(nx.bellman_ford(G, 0, weight='load'),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
        assert_equal(nx.goldberg_radzik(G, 0, weight='load'),
                     ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
                      {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
Beispiel #21
0
def _bellman_ford_path(G, source, target, weight):
    "Returns shortest path using bellman_ford algorithm."
    pred, dist = nx.bellman_ford(G, source, weight)
    if target not in pred:
        raise nx.NetworkXNoPath("Node %s not reachable from %s." %
                                (source, target))
    # Since predecessors are given, build path backwards, then reverse.
    path = []
    curr = target
    while curr != source:
        path.append(curr)
        curr = pred[curr]
    path.append(source)
    path.reverse()
    return path
Beispiel #22
0
def objective(graph, centers):
    """Calculates the distance between nodes and centers.

    :param graph: Graph
    :param centers: list
    :return: float
    """
    if centers:
        # For big k networkx.floyd_warshall_numpy can be faster:
        # distance = networkx.floyd_warshall_numpy(graph)
        # return distance[numpy.ix_([graph.nodes().index(c) for c in centers])].min(axis=0).max(axis=1)[0,0]
        distance = {c: networkx.bellman_ford(graph, c)[1] for c in centers}
        return max([min([distance[c].get(n, float('inf')) for c in centers]) for n in graph.nodes_iter()])
    else:
        return float("inf")
Beispiel #23
0
def _bellman_ford_path(G, source, target, weight):
    "Returns shortest path using bellman_ford algorithm."
    pred, dist = nx.bellman_ford(G, source, weight)
    if target not in pred:
        raise nx.NetworkXNoPath(
            "Node %s not reachable from %s." % (source, target))
    # Since predecessors are given, build path backwards, then reverse.
    path = []
    curr = target
    while curr != source:
        path.append(curr)
        curr = pred[curr]
    path.append(source)
    path.reverse()
    return path
Beispiel #24
0
    def predict(self, words):
        """
        use Viterbi to predict best sequence
        :param words:
        :param word_features:
        :return:
        """
        number_of_words = len(words)
        number_of_states = len(self.tags)
        tags = list(self.tags)

        V = nx.DiGraph()

        # initialize
        for j, tag in enumerate(tags):
            features = self.get_features(words[0], tag, self.START)
            feature_weights = sum((self.feature_weights[x] for x in features))

            V.add_edge(self.START, "%s_0" % (tags[j]), weight=-feature_weights)

        # iterate
        for i in xrange(1, number_of_words):
            for j, tag in enumerate(tags):
                for k, previous_tag in enumerate(tags):
                    features = self.get_features(words[i], tag, previous_tag)
                    feature_weights = sum(
                        (self.feature_weights[x] for x in features))

                    V.add_edge("%s_%s" % (tags[k], i - 1),
                               "%s_%s" % (tags[j], i),
                               weight=-feature_weights)

        # add END node
        for j, tag in enumerate(tags):
            V.add_edge("%s_%s" % (tags[j], number_of_words - 1),
                       self.END,
                       weight=1.0)

        # find shortest path
        predecessors, edge_weights = nx.bellman_ford(V, self.START)

        current = self.END
        best_path = []
        while current != self.START:
            best_path.append(predecessors[current])
            current = predecessors[current]

        return [node.split('_')[0] for node in best_path[::-1][1:]]
Beispiel #25
0
def _get_longest_paths(g, source_nodes):
    ''' Get the longest path for nodes in 'source_nodes'
        Find with bellman_ford() by setting weight = -1
    '''

    ng = copy.deepcopy(g)
    for u, v in ng.edges():
        ng[u][v]["weight"] = -1

    ret = {}
    for cn in source_nodes:
        pred, dist = nx.bellman_ford(ng, cn, weight="weight")
        path = _get_path(pred, dist)
        assert path[0] == cn
        assert len(path) - 1 == -dist[path[-1]]
        ret[cn] = path

    return ret
Beispiel #26
0
def _get_longest_paths(g, source_nodes):
    ''' Get the longest path for nodes in 'source_nodes'
        Find with bellman_ford() by setting weight = -1
    '''

    ng = copy.deepcopy(g)
    for u, v in ng.edges():
        ng[u][v]["weight"] = -1

    ret = {}
    for cn in source_nodes:
        pred, dist = nx.bellman_ford(ng, cn, weight="weight")
        path = _get_path(pred, dist)
        assert path[0] == cn
        assert len(path) - 1 == -dist[path[-1]]
        ret[cn] = path

    return ret
Beispiel #27
0
    def adm_weights(self, req, **kwargs):
        src = kwargs['method'][11:].split('-')[0]
        dst = kwargs['method'][11:].split('-')[1]
        
        graph = self.bqoe_path_spp.get_graph()
        for u,v,d in graph.edges(data=True):
            p1 = self.bqoe_path_spp.host_from_switch(u)
            p2 = self.bqoe_path_spp.host_from_switch(v)
            if ((p1 == "a2" and p2 == "a3") or (p1 == "c2" and p2 == "c1") or (p1 == "m5" and p2 == "m1") or (p1 == "a1" and p2 == "a4") or (p1 == "m3" and p2 == "m2")):
                d['weight'] = 1000
            elif (p1 == "m5" and p2 == "m4"):
                d['weight'] = 3
            else:
                d['weight'] = 1
        
        min_splen = 100000000
        min_sp = []
        if dst == "all":
            destinations_array = ["cdn1", "cdn2", "cdn3", "ext1"]
            random.shuffle(destinations_array)
            for dest in destinations_array:
                prev, dist = nx.bellman_ford(graph,source=src,weight='weight')
                sp = []
                sp.append(dest)
                pv = prev[dest]
                while pv != src:
                    sp.append(pv)
                    pv = prev[pv]
                sp.append(src)

                splen = dist[dest] 
                if splen < min_splen:
                    min_sp = sp
                    min_splen = splen

        humanmin_sp = []
        for elem in min_sp:
            humanmin_sp.append(self.bqoe_path_spp.host_from_switch(elem))

        result = dict(dst = humanmin_sp[0], dest_ip=self.bqoe_path_spp.ip_from_host(humanmin_sp[0]), path = humanmin_sp)
        self.bqoe_path_spp.deploy_any_path(humanmin_sp)

        body = json.dumps(result, indent=4)
        return Response(content_type='application/json', body=body)
Beispiel #28
0
def plot_big_graph(task):
    args = resolve_args(task._algorithm, *task._args)
    data = args[1]

    fig = pylab.figure(figsize=(5, 5))
    pylab.axis('off')
    ax = fig.add_subplot(111)
    ax.xaxis.set_major_locator(pylab.NullLocator())
    ax.yaxis.set_major_locator(pylab.NullLocator())
    ax.set_aspect('equal')

    pos = networkx.get_node_attributes(data, 'pos')
    nodes = data.nodes().copy()
    for c in task._result:
        nodes.remove(c)

    distance = {c: networkx.bellman_ford(data, c)[1] for c in task._result}
    node_colors = [
        COLORS[min(range(len(task._result)), key=lambda i: distance[task._result[i]].get(n, float('inf')))]
        for n in nodes
    ]

    networkx.draw_networkx(data, pos, with_labels=False, node_size=5, nodelist=nodes, node_color=node_colors,
                           linewidths=0)
    networkx.draw_networkx_nodes(data, pos, with_labels=False, node_size=100, node_color=COLORS,
                                 nodelist=task._result, node_shape='p')

    x = [p[0] for p in pos.values()]
    y = [p[1] for p in pos.values()]

    minx = min(x)
    maxx = max(x)
    extrax = 0.1 * (maxx - minx)
    miny = min(y)
    maxy = max(y)
    extray = 0.1 * (maxy - miny)
    ax.set_ylim([miny - extray, maxy + extray])
    ax.set_xlim([minx - extrax, maxx + extrax])

    stream = io.BytesIO()
    fig.savefig(stream, format='png', bbox_inches='tight', pad_inches=0)

    return stream.getvalue()
 def test_negative_weight_cycle(self):
     G = nx.cycle_graph(5, create_using=nx.DiGraph())
     G.add_edge(1, 2, weight=-7)
     for i in range(5):
         assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
         assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
     G = nx.cycle_graph(5)  # undirected Graph
     G.add_edge(1, 2, weight=-3)
     for i in range(5):
         assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
         assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
     G = nx.DiGraph([(1, 1, {'weight': -1})])
     assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 1)
     assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
     # no negative cycle but negative weight
     G = nx.cycle_graph(5, create_using=nx.DiGraph())
     G.add_edge(1, 2, weight=-3)
     assert_equal(nx.bellman_ford(G, 0), ({
         0: None,
         1: 0,
         2: 1,
         3: 2,
         4: 3
     }, {
         0: 0,
         1: 1,
         2: -2,
         3: -1,
         4: 0
     }))
     assert_equal(nx.goldberg_radzik(G, 0), ({
         0: None,
         1: 0,
         2: 1,
         3: 2,
         4: 3
     }, {
         0: 0,
         1: 1,
         2: -2,
         3: -1,
         4: 0
     }))
Beispiel #30
0
    def getTree(self, node, reverse=False):
        """
        Get
        :param node:    A start node
        :param reverse: Should the graph be reversed (upstream search)
        :return:        A list of edges
        """
        if self.dirty:
            self.createGraph()

        if reverse:
            my_graph = self.graph.reverse()
        else:
            my_graph = self.graph

        # Returns pred, weight
        pred, _ = nx.bellman_ford(my_graph, node)
        edges = [(v, u, my_graph[v][u]) for (u, v) in pred.items() if v is not None]

        return edges
Beispiel #31
0
def gonzalez(k, graph, randomized=True, heuristic=None, bellman_ford=True):
    """This function gives a 2-approximation for the k-center problem on a graph.
    See "Clustering to minimize the maximum intercluster distance" by
    Teofilo F. Gonzalez for more details.

    :param k: int
    :param graph: Graph
    :return: list
    """

    def distance(node, target):
        try:
            # return networkx.dijkstra_path_length(graph, node, target)
            return networkx.astar_path_length(graph, node, target, heuristic=heuristic)
        except networkx.NetworkXNoPath:
            return float('inf')

    if randomized:
        result = [random.choice(graph.nodes())]
    else:
        result = [graph.nodes()[0], ]
    for l in range(k - 1):
        dist = 0
        head = None
        if bellman_ford:
            distance = {c: networkx.bellman_ford(graph, c)[1] for c in result}
            head = max([
                (n, min([(c, distance[c].get(n, float('inf'))) for c in result], key=lambda i: i[1])[1])
                for n in graph.nodes_iter()
            ], key=lambda i: i[1])[0]
        else:
            for node in graph.nodes():
                tmp_dist = min(distance(node, target) for target in result)
                if tmp_dist > dist:
                    dist = tmp_dist
                    head = node
        if head:
            result.append(head)
        else:
            return result
    return result
 def _all_shortest_paths(self):
 
     """ Find all shortest paths from every node to destination """
     #make a reversed graph (reversing all the edges), so we can find single destination shortest paths problem
     
     _reverse_graph =  self._G.reverse(copy=True)
     _reverse_pred, _dist = nx.bellman_ford(_reverse_graph,'t') 
     print time.ctime(), "reverse_pred, & dist by using bellman ford "
     _pred = defaultdict(dict)
     for node, neighbor in _reverse_pred.iteritems():
         _pred[neighbor]=node
     for counter, node in enumerate(self._G.nodes()):
         try:
             self._G.node[node]['target_distance']=_dist[node]
         except KeyError:
             self._G.node[node]['target_distance']=float('inf')
         _path=self._get_path_from_predecessors((_reverse_pred), destination=node)
         path = list(reversed([(value, key) for key,value in _path]))
         self._G.node[node]['path']=path
         
     self.shortest_path_distance = self._G.node[self.source]['target_distance']
Beispiel #33
0
    def find_shortest_paths(self):
        """
        Shortest path detection between two nodes.

        If graph has negative weights, Bellman Ford algorithm is used for
        path detection, otherwise Djikstra Algorithm is used.

        :return A list with sequence of nodes that are included in path. If
        there is no path between these nodes None value is returned.
        """
        if not self.weight:
            weight = None
        else:
            weight = 'weight'
        if self.graph.has_negative_weights and self.graph.is_weighted:
            pred = nx.bellman_ford(self.graph.graph,
                                   self.source,
                                   weight='weight')
            sequen = [self.target]
            path_sequence = self.create_path_sequence(pred[0], self.target,
                                                      sequen)
            self.path_length = pred[1][self.target]
            return path_sequence
        else:
            paths = nx.all_shortest_paths(self.graph.graph,
                                          self.source,
                                          self.target,
                                          weight=weight)
            try:
                p = []
                for path in paths:
                    p.append(path)
            except nx.NetworkXNoPath:
                return None
            else:
                self.path_length = nx.shortest_path_length(self.graph.graph,
                                                           self.source,
                                                           self.target,
                                                           weight=weight)
                return p
Beispiel #34
0
    def _all_shortest_paths(self):
        """ Find all shortest paths from every node to destination """
        # make a reversed graph (reversing all the edges), so we can find single destination shortest paths problem

        _reverse_graph = self._G.reverse(copy=True)
        _reverse_pred, _dist = nx.bellman_ford(_reverse_graph, 't')
        print(time.ctime(), "reverse_pred, & dist by using bellman ford ")
        _pred = defaultdict(dict)
        for node, neighbor in _reverse_pred.items():
            _pred[neighbor] = node
        for counter, node in enumerate(self._G.nodes()):
            try:
                self._G.node[node]['target_distance'] = _dist[node]
            except KeyError:
                self._G.node[node]['target_distance'] = float('inf')
            _path = self._get_path_from_predecessors((_reverse_pred),
                                                     destination=node)
            path = list(reversed([(value, key) for key, value in _path]))
            self._G.node[node]['path'] = path

        self.shortest_path_distance = self._G.node[
            self.source]['target_distance']
Beispiel #35
0
def routeOptimizer(topEvents, relWeight):
	# Gets a list of the top events from multiple areas, creates a weighted graph, finds best route

	l = len(topEvents)
	fullDist = 1.0*lldist( topEvents[0]['venLat'], topEvents[0]['venLon'], topEvents[l-1]['venLat'], topEvents[l-1]['venLon']) # lat/lon distance from start to end	
	
	# MUST MAKE SURE START AND END ARE IN THE LIST
	DG = nx.DiGraph() # create a new directed graph
	
	for i in topEvents:
		DG.add_node(i['ind'],date=i['date'])
		DG.add_node(i['ind'],venue=i['venue'])
		DG.add_node(i['ind'],band=i['band'])
  		for j in topEvents:
        		day1 = to_datetime(i['date']).date()
        		day2 = to_datetime(j['date']).date()
        		deltaDay = day2 - day1
			dist = 1.0*lldist( i['venLat'], i['venLon'], j['venLat'], j['venLon'] ) 
			#dist = 1.0*osrmDist( i['venLat'], i['venLon'], j['venLat'], j['venLon'] ) 
			tooFar = 500 # in miles
			#tooFar = 300000 # in 10ths of seconds, about 8 hours
        		if (deltaDay.days > 0):
				if (dist/deltaDay.days < tooFar): # only link two events if the second is forward in time, and they're not too far
            				#wght = dist*np.exp((float(j['rank'])/rank_norm)-1)
            				wght = ((1.0-relWeight/100.0) *(dist/fullDist) - (relWeight/100.0)*(1-float(j['rank'])/rank_norm))
		#			wght = np.exp(wght) # map wght to between 0 and 1
	#				if (wght <= 0): wght = (j['rank']/rank_norm)*(dist/fullDist)
					print i['ind'],j['ind'],wght
            				DG.add_weighted_edges_from([(i['ind'],j['ind'],wght)])

	pred, dist = nx.bellman_ford(DG,'Start','weight')
	node = 'End'
	nodeList = []
	while (node != 'Start'):
		nodeList.append(node)
		node = pred[node]
	#path = nx.shortest_path(DG,'Start','End','weight') # the nodes are labeled by ind, so that's all this will return at the moment
	#print path
	return nodeList
Beispiel #36
0
def routeOptimizer(topEvents, relWeight):
	# Gets a list of the top events from multiple areas, creates a weighted graph, finds best route

	l = len(topEvents)
	fullDist = 1.0*lldist( topEvents[0]['venLat'], topEvents[0]['venLon'], topEvents[l-1]['venLat'], topEvents[l-1]['venLon']) # lat/lon distance from start to end	
	
	# MUST MAKE SURE START AND END ARE IN THE LIST
	DG = nx.DiGraph() # create a new directed graph
	
	for i in topEvents:
		DG.add_node(i['ind'],date=i['date'])
		DG.add_node(i['ind'],venue=i['venue'])
		DG.add_node(i['ind'],band=i['band'])
  		for j in topEvents:
        		day1 = to_datetime(i['date']).date()
        		day2 = to_datetime(j['date']).date()
        		deltaDay = day2 - day1
			dist = 1.0*lldist( i['venLat'], i['venLon'], j['venLat'], j['venLon'] ) 
			#dist = 1.0*osrmDist( i['venLat'], i['venLon'], j['venLat'], j['venLon'] ) 
			tooFar = 500 # in miles
			#tooFar = 300000 # in 10ths of seconds, about 8 hours
        		if (deltaDay.days > 0):
				if (dist/deltaDay.days < tooFar): # only link two events if the second is forward in time, and they're not too far
            				#wght = dist*np.exp((float(j['rank'])/rank_norm)-1)
            				wght = ((1.0-relWeight/100.0) *(dist/fullDist) - (relWeight/100.0)*(1-float(j['rank'])/rank_norm))
		#			wght = np.exp(wght) # map wght to between 0 and 1
	#				if (wght <= 0): wght = (j['rank']/rank_norm)*(dist/fullDist)
					print i['ind'],j['ind'],wght
            				DG.add_weighted_edges_from([(i['ind'],j['ind'],wght)])

	pred, dist = nx.bellman_ford(DG,'Start','weight')
	node = 'End'
	nodeList = []
	while (node != 'Start'):
		nodeList.append(node)
		node = pred[node]
	#path = nx.shortest_path(DG,'Start','End','weight') # the nodes are labeled by ind, so that's all this will return at the moment
	#print path
	return nodeList
Beispiel #37
0
def paths_to_leaves(digraph, sort_paths=False):
  '''Return paths from the root of the graph to each leaf. A path is a list of commits'''
  if len(digraph.nodes()) > 0:
    root = [n for n in digraph.nodes() if digraph.in_degree(n) == 0][0]
    leaves = [n for n in digraph.nodes() if digraph.out_degree(n) == 0]
    neg_graph = nx.DiGraph(digraph)
    for u, v in neg_graph.edges():
      neg_graph[u][v]['weight'] = -1
    pred, dist = nx.bellman_ford(neg_graph, root)
    dist_paths = []
    for leaf in leaves:
      path = [leaf]
      curr = leaf
      while pred[curr] is not None:
        curr = pred[curr]
        path.append(curr)
      path.reverse()
      dist_paths.append((-dist[leaf], path))
    if sort_paths is True:
      dist_paths = sorted(dist_paths, key=lambda x:-x[0])
    return dist_paths
  else:
    return None
Beispiel #38
0
 def test_negative_weight_cycle(self):
     G = nx.cycle_graph(5, create_using=nx.DiGraph())
     G.add_edge(1, 2, weight=-7)
     for i in range(5):
         assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
         assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
     G = nx.cycle_graph(5)  # undirected Graph
     G.add_edge(1, 2, weight=-3)
     for i in range(5):
         assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
         assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
     G = nx.DiGraph([(1, 1, {'weight': -1})])
     assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 1)
     assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
     # no negative cycle but negative weight
     G = nx.cycle_graph(5, create_using=nx.DiGraph())
     G.add_edge(1, 2, weight=-3)
     assert_equal(nx.bellman_ford(G, 0),
                  ({0: None, 1: 0, 2: 1, 3: 2, 4: 3},
                   {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}))
     assert_equal(nx.goldberg_radzik(G, 0),
                  ({0: None, 1: 0, 2: 1, 3: 2, 4: 3},
                   {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}))
Beispiel #39
0
    def __init__(self, graph, type_whitelist=('depot', 'customer', 'station')):
        """Compute shortest and most efficient path.

           Only nodes with labels matching type_whitelist will be considered.
        """
        self._cache = list()
        self._graph = graph
        self._type_whitelist = type_whitelist

        # from each depot, customer, station ...
        for src_coor, src_data in graph.nodes_iter(data=True):
            if src_data['type'] in type_whitelist:
                # get shortest paths starting from src_coor
                shortest_path = nx.single_source_dijkstra_path(graph,
                                                               src_coor,
                                                               weight='lenght')
                # get most energy-efficient paths from src_coor
                greenest_t = nx.bellman_ford(graph, src_coor, weight='energy')
                g_pred, g_energy = greenest_t

                # ... to other depot, customer, destination
                for dest_coor, dest_data in graph.nodes_iter(data=True):
                    if dest_data['type'] in type_whitelist \
                       and dest_coor != src_coor \
                       and dest_coor in shortest_path \
                       and dest_coor in g_energy:
                        # unroll the path from predecessors dictionary
                        greenest_path = list()
                        coor_to_add = dest_coor
                        while coor_to_add is not None:
                            greenest_path.append(coor_to_add)
                            coor_to_add = g_pred[coor_to_add]
                        greenest_path = list(reversed(greenest_path))

                        self._add((*src_coor, src_data['type']),
                                  (*dest_coor, dest_data['type']),
                                  greenest_path, shortest_path[dest_coor])
def prog_30(fname):
    graph = nx.DiGraph()
    f = open(fname)
    ns,es = map(int, f.readline().strip().split())
    graph.add_nodes_from(range(1,ns+1))
    for line in f:
        e1,e2,w = map(int, line.strip().split())
        graph.add_weighted_edges_from([(e1,e2,w)])
    f.close()

    pres,dist = nx.bellman_ford(graph,1);
    # print pres
    for i in xrange(1,ns+1):
        if i in dist:
            print dist[i],
        else:
            print 'x',

    with open('result.dat','w') as f:
        for i in xrange(1,ns+1):
            if i in dist:
                f.write(str(dist[i])+'\t')
            else:
                f.write('x\t')
    def get_best_variants(self, weights):

        iweight = {}
        for iw in weights:
            identifier = iw["name"]
            weight = iw["weight"]
            iweight[identifier] = weight

        add = {}
        add_ends = []
        for i,model in enumerate(nx.weakly_connected_components(self.graph)):
            source = "source_{}".format(i + 1)
            sink = "sink_{}".format(i + 1)
            ends = [k for k,v in self.graph.out_degree(model).items() 
                    if self.graph.node[k].get('ftype', "") == "exon_out" and v == 0]
            for end in ends:
                add_ends.append((end, sink))

            for node in model:
                if self.graph.node[node].get('ftype', "") ==  "exon_in":
                    add.setdefault(source, []).append(node)

        for p in add_ends:
            self.graph.add_path(p, ftype="sink")

        for source, targets in add.items():
            for target in targets:
                self.graph.add_path((source, target), ftype='source')
                self._set_source_weight((source, target), weights)

        for n1,n2 in self.graph.edges():
            self.graph.edge[n1][n2]['weight'] = -0.01
            d = self.graph.edge[n1][n2]
            for k,v in self.max_id_value.items():
                if v > 0:
                    if k in d:
                        w = d[k] / float(v) * iweight[k]
                        self.graph.edge[n1][n2]['weight'] -= w 

            if self.graph.edge[n1][n2]['ftype'] == "exon":
                self.logger.debug("Edge: %s, %s", n1, n2)
                for k,v in d.items():
                    self.logger.debug("Key: %s, value: %s", k, v)

        for n1,n2 in self.graph.edges():
            d = self.graph.edge[n1][n2]

        for source, targets in add.items():
            sink = source.replace("source", "sink")
            try:
                pred,dis = nx.bellman_ford(self.graph, source)
                if not pred.has_key(sink):
                    continue
                t = sink
                best_variant = []
                while pred[t]:
                    best_variant.append(pred[t])
                    t = pred[t]

                p = re.compile(r'(.+):(\d+)([+-])')
                model = []
                strand = "+"
                for i in range(0, len(best_variant) - 1, 2):
                    n1,n2 = best_variant[i:i+2]
                    e = self._nodes_to_exon(n1, n2)
                    if e:
                        strand = e.strand
                        model.append(e)
                if strand == "+":
                    model = model[::-1]
                #print model
                if len(model) > 0:
                    yield model
    
            except Exception as e:
                raise
                self.logger.warning("Failed: %s", self.graph.edge[source].keys())
                self.logger.warning("%s", e)
Beispiel #42
0
#rosalind_ba5b

import networkx as nx

#read data
f = open('rosalind_ba5b.txt').read().rstrip().split()
n = int(f[0])
m = int(f[1])

G = nx.DiGraph()
v = iter(f[2:])
for i in range(n):
    for j in range(m+1):
        w = int(next(v))
        G.add_edge((i, j), (i+1, j), weight=-w)
print(next(v))
for i in range(n+1):
    for j in range(m):
        w = int(next(v))
        G.add_edge((i, j), (i, j+1), weight=-w)

s, t = (0, 0), (n, m)
print(-nx.bellman_ford(G, s)[1][t])
Beispiel #43
0
#read data
f = open('rosalind_ba5d.txt')
s = int(f.readline().rstrip())
t = int(f.readline().rstrip())

G = nx.DiGraph()
for line in f:
    l = line.rstrip().split('->')
    u  = int(l[0])
    v, w = map(int, l[1].split(':'))
    
    # sign-reverse weight, thus turn a longest path problem to
    # a shortest path search with bellman_ford
    G.add_edge(u, v, weight=-w)  

bf = nx.bellman_ford(G, s)
longest = -bf[1][t]
path = []
while t != None:
    path.append(t)
    t = bf[0][t]

path = path[::-1]

'''
longest = 0
path = []
for p in nx.all_simple_paths(G, s, t):
    len_p = 0
    for i in range(len(p) - 1):
        len_p += G[p[i]][p[i+1]]['weight']
Beispiel #44
0
#read data
f = open('rosalind_ba5d.txt')
s = int(f.readline().rstrip())
t = int(f.readline().rstrip())

G = nx.DiGraph()
for line in f:
    l = line.rstrip().split('->')
    u = int(l[0])
    v, w = map(int, l[1].split(':'))

    # sign-reverse weight, thus turn a longest path problem to
    # a shortest path search with bellman_ford
    G.add_edge(u, v, weight=-w)

bf = nx.bellman_ford(G, s)
longest = -bf[1][t]
path = []
while t != None:
    path.append(t)
    t = bf[0][t]

path = path[::-1]
'''
longest = 0
path = []
for p in nx.all_simple_paths(G, s, t):
    len_p = 0
    for i in range(len(p) - 1):
        len_p += G[p[i]][p[i+1]]['weight']
    if len_p > longest:
Beispiel #45
0
FILE = open_local_file('data.txt').read()
G = nx.DiGraph()

# Set up the pyramid data
FILE = FILE.split('\n')
for i in range(0, len(FILE)):
    FILE[i] = FILE[i].split()

children = [1]
for l, row in enumerate(FILE):
    node_num = max(children) + 1

    for x in row:
        if (l != len(FILE) - 1):
            parent = children.pop(0)

            G.add_edge(parent, node_num, weight=-int(x))
            if node_num not in children:
                children.append(node_num)

            node_num += 1

            G.add_edge(parent, node_num, weight=-int(x))
            if node_num not in children:
                children.append(node_num)

for child, w in zip(children, FILE[-1]):
    G.add_edge(child, 0, weight=-int(w))

pred, dist = nx.bellman_ford(G, 1)
print - dist[0]
Beispiel #46
0
def bf(G):
    bf = nx.bellman_ford(G, 1)[1]
    return [bf.get(n, "x") for n in sorted(G.nodes())]
Beispiel #47
0
def main(argv):
    inputfile = ''
    outputfile = ''
    graphfile = ''
    source = 0
    destination = 0
    budget = 0

    try:
        opts, args = getopt.getopt(argv,"h:i:o:s:d:b:g:",["gfile=","ifile=",\
          "ofile="])
    except getopt.GetoptError:
        print 'tcf.py -i <intermediate_output_file> -o <outputfile> -s <source>'\
           + '-d <destination> -b <budget> -g <graphfile>'
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print 'tcf.py -i <intermediate_output_file> -o <outputfile> -s' \
            + ' <source> -d <destination> -b <budget> -g <graphfile>'
            sys.exit()
        elif opt in ("-i", "--ifile"):
            inputfile = arg
        elif opt in ("-o", "--ofile"):
            outputfile = arg
        elif opt in ("-s"):
            source = int(arg)
        elif opt in ("-d"):
            destination = int(arg)
        elif opt in ("-b"):
            budget = int(arg)
        elif opt in ("-g"):
            graphfile = arg

#  	Time contrained bitmap

    print graphfile
    f = open(graphfile, 'r')
    max_nodes = int(f.readline())
    f.close()

    graph = nx.DiGraph()
    TCGraph = nx.DiGraph()

    print "graphfile is", graphfile
    g = np.loadtxt(graphfile, dtype=int, skiprows=1, ndmin=2)

    # create graph from input file
    for i in range(1, max_nodes + 1):
        graph.add_node(i)
        TCGraph.add_node(i)

    for i in range(0, len(g)):
        graph.add_edge(g[i][0], g[i][1], weight=g[i][2])
        graph.add_edge(g[i][1], g[i][0], weight=g[i][2])

    # Dijkstra's to find shortest distance to source and destination from
    # each node

    to_source = nx.single_source_dijkstra_path_length(graph, source)
    to_dest = nx.single_source_dijkstra_path_length(graph, destination)

    # For each edge, check to see if shortest path using this edge is
    # within the time budget
    for edge in graph.edges(data='true'):
        if ((to_source[edge[0]] + edge[2]['weight'] + to_dest[edge[1]]) <=
                budget):
            TCGraph.add_edge(edge[0], edge[1], weight=edge[2]['weight'])

    # print the graph
    f = open(inputfile, 'w')
    f.write("Source : " + str(source) + '\n')
    f.write("Destination : " + str(destination) + '\n')
    f.write("Budget : " + str(budget) + '\n')
    f.write("Set of edges is\n")
    for e in TCGraph.edges(data='true'):
        f.write(str(e[0]) + ' ' + str(e[1]) + ' ' + str(e[2]['weight']) + '\n')

    f.close()

    # Loop removal using Suurballe's Algorithm

    print "input file", inputfile
    a = np.loadtxt(inputfile, dtype = int , delimiter = " ", skiprows = 4,\
        ndmin = 2)
    if len(a) == 0:
        f = open(outputfile, 'w')
        f.write("Source : " + str(source) + '\n')
        f.write("Destination : " + str(destination) + '\n')
        f.write("Budget : " + str(budget) + '\n')
        f.write("Set of edges is\n")
        f.close()
        sys.exit(0)

    NG = nx.DiGraph()

    for i in range(0, len(a)):
        NG.add_edge(a[i][0], a[i][1], weight=a[i][2])
        NG.add_edge(a[i][1], a[i][0], weight=a[i][2])

    remaining = deque(nx.nodes(NG))  # remaining nodes to run suurballe's from
    remaining.remove(source)
    remaining.remove(destination)  # these shouldn't be included

    # Split all nodes to in and out nodes connected by zero-weight edge
    working_graph = split_graph(NG)

    # add our fake node between the source and the destination with edges of
    # latency 0

    working_graph.add_node(dummy)
    working_graph.add_edge(str(source) + "_out", dummy, weight=0)
    working_graph.add_edge(str(destination) + "_out", dummy, weight=0)

    while len(remaining) > 0:
        # combine i in and i out
        i = remaining.popleft()
        working_graph = combine_node(working_graph, i)

        # step 1: run bellman ford from i to dummy, store path in a list
        # Total Latency is given by dest[dummy]
        pred, dest = nx.bellman_ford(working_graph, i, weight='weight')

        if (not (dummy in dest)):
            print "Error: dummy not reachable from ", i
            sys.exit(0)

        # step 2: get this shortest path and invert the edges in NG
        path = []
        temp = dummy
        while (temp != i):
            path.append([pred[temp], temp])
            temp = pred[temp]

        for e in path:
            temp_weight = working_graph.edge[e[0]][e[1]]['weight']
            temp_weight *= -1
            working_graph.remove_edge(e[0], e[1])

            if (working_graph.has_edge(e[1], e[0])):
                temp_lat = working_graph.edge[e[1]][e[0]]['weight']
                working_graph.add_edge(e[1], e[0], weight = temp_weight, original\
                   = temp_lat)
            else:
                working_graph.add_edge(e[1], e[0], weight = temp_weight, original\
                  = -1)

        # step 3: run bellman ford from i to dummy on new graph, store path in
        # a dictionary.  Add each latency to total latency.
        pred_inv, dest_inv = nx.bellman_ford(working_graph, i, weight='weight')

        # If bellman ford says distance to dummy is negative infinity, remove
        # node i from remaining and from NG and continue to next node in remaining
        # if total latency of both paths is higher than the budget, remove node i
        # from NG and continue to the next node in remaining

        if((not (dummy in dest_inv)) or ((dest[dummy] + dest_inv[dummy]) \
            > budget)):
            NG.remove_node(i)
            for e in path:
                if (working_graph.edge[e[1]][e[0]]['original'] == -1):
                    w = working_graph.edge[e[1]][e[0]]['weight']
                    w *= -1
                    working_graph.remove_edge(e[1], e[0])
                    working_graph.add_edge(e[0], e[1], weight=w)
                else:
                    temp_lat = working_graph.edge[e[1]][e[0]]['original']
                    w = working_graph.edge[e[1]][e[0]]['weight']
                    w *= -1
                    working_graph.add_edge(e[1], e[0], weight = temp_lat, \
                       original = -1)
                    working_graph.add_edge(e[0], e[1], weight=w)

        else:
            path_new = {}
            temp = dummy
            while (temp != i):
                path_new[pred_inv[temp]] = temp
                temp = pred_inv[temp]

        # iterate over path 1 list -- for each item, see if its inverse exists in
        # the dictionary -- if yes remove both (makes this path edge-disjoint)
            for e in path:
                if (e[1] in path_new):
                    if (path_new[e[1]] == e[0]):
                        path.remove(e)
                        del path_new[e[1]]
        # Else, remove path 1 node from remaining.
                else:
                    if (remaining.count(e[0]) > 0):
                        remaining.remove(e[0])

        # also switch inverse back on graph
                if (working_graph.edge[e[1]][e[0]]['original'] == -1):
                    w = working_graph.edge[e[1]][e[0]]['weight']
                    w *= -1
                    working_graph.remove_edge(e[1], e[0])
                    working_graph.add_edge(e[0], e[1], weight=w)
                else:
                    temp_lat = working_graph.edge[e[1]][e[0]]['original']
                    w = working_graph.edge[e[1]][e[0]]['weight']
                    w *= -1
                    working_graph.add_edge(e[1], e[0], weight = temp_lat, \
                       original = -1)
                    working_graph.add_edge(e[0], e[1], weight=w)

        # Create list from dictionary. Iterate over and remove each node from
        # remaining.

            for n in path_new.keys():
                if (remaining.count(n) > 0):
                    remaining.remove(n)

        # split i into i in and i out
        working_graph = split_node(working_graph, i)

    # print the graph
    f = open(outputfile, 'w')
    f.write("Source : " + str(source) + '\n')
    f.write("Destination : " + str(destination) + '\n')
    f.write("Budget : " + str(budget) + '\n')
    f.write("Set of edges is\n")
    for e in NG.edges():
        f.write(str(e[0]) + '->' + str(e[1]) + '\n')

    f.close()
Beispiel #48
0
# rosalind_sdag
# cycles in a graph

import networkx as nx

lines = open('rosalind_sdag.txt').read().rstrip().split('\n')
n, _m = [int(i) for i in lines[0].split()]
nodes = [i+1 for i in range(n)]
edges = [tuple([int(i) for i in j.split()]) for j in lines[1:]]

G = nx.DiGraph()
G.add_nodes_from(nodes)
G.add_weighted_edges_from(edges)


pred, d = nx.bellman_ford(G, source=1, weight='weight')

result = [str(d.get(n, 'x')) for n in nodes]    
    
print(' '.join(result))
open('rosalind_sdag_sub.txt', 'wt').write(' '.join(result))
Beispiel #49
0
def find_path(digraph, start="USD"):
    path = nx.bellman_ford(digraph, start, return_negative_cycle=True)
    return path
Beispiel #50
0
nx.is_isolate(G, 5)  # True

# HITS
nx.hits(G, max_iter=1000)  # cannot converge?

# maximal independent set
nx.maximal_independent_set(G)

# shortest path
nx.shortest_path(G)  # need "predecessors_iter"
nx.all_pairs_shortest_path(G)
nx.all_pairs_shortest_path_length(G)

nx.predecessor(G, 1)
nx.predecessor(G, 1, 378)

nx.dijkstra_path(G, 1, 300)
nx.dijkstra_path_length(G, 1, 300)
nx.single_source_dijkstra_path(G, 1)
nx.single_source_dijkstra_path_length(G, 1)
nx.all_pairs_dijkstra_path(G)
nx.all_pairs_dijkstra_path_length(G)

nx.bellman_ford(G, 1)

# Traversal
list(nx.dfs_edges(G))
list(nx.dfs_edges(G, 1))
nx.dfs_tree(G)  # return a networkx graph
list(nx.bfs_edges(G, 1))
Beispiel #51
0
# rosalind_sdag
# cycles in a graph

import networkx as nx

lines = open('rosalind_sdag.txt').read().rstrip().split('\n')
n, _m = [int(i) for i in lines[0].split()]
nodes = [i + 1 for i in range(n)]
edges = [tuple([int(i) for i in j.split()]) for j in lines[1:]]

G = nx.DiGraph()
G.add_nodes_from(nodes)
G.add_weighted_edges_from(edges)

pred, d = nx.bellman_ford(G, source=1, weight='weight')

result = [str(d.get(n, 'x')) for n in nodes]

print(' '.join(result))
open('rosalind_sdag_sub.txt', 'wt').write(' '.join(result))
Beispiel #52
0
def johnson(G, weight='weight', new_weight=None):
    """Compute shortest paths between all nodes in a weighted graph using
    Johnson's algorithm.

    Parameters
    ----------
    G : NetworkX graph

    weight: string, optional (default='weight')
        Edge data key corresponding to the edge weight.

    new_weight: string, optional (default=None)
        Edge data key corresponding to the new edge weight after graph transformation.

    Returns
    -------
    distance : dictionary
       Dictionary, keyed by source and target, of shortest paths.

    Raises
    ------
    NetworkXError
       If given graph is not weighted.

    Examples
    --------
    >>> import networkx as nx
    >>> graph = nx.DiGraph()
    >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
    ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)])
    >>> paths = nx.johnson(graph, weight='weight')
    >>> paths['0']['2']
    ['0', '1', '2']

    Notes
    ------
    Johnson's algorithm is suitable even for graphs with negative weights. It
    works by using the Bellman–Ford algorithm to compute a transformation of
    the input graph that removes all negative weights, allowing Dijkstra's
    algorithm to be used on the transformed graph.

    It may be faster than Floyd - Warshall algorithm in sparse graphs.
    Algorithm complexity: O(V^2 * logV + V * E)

    See Also
    --------
    floyd_warshall_predecessor_and_distance
    floyd_warshall_numpy
    all_pairs_shortest_path
    all_pairs_shortest_path_length
    all_pairs_dijkstra_path
    bellman_ford
    """
    if not nx.is_weighted(G, weight=weight):
        raise nx.NetworkXError('Graph is not weighted.')

    new_node = nx.utils.generate_unique_node()
    G.add_weighted_edges_from((new_node, node, 0) for node in G.nodes())

    # Calculate distance of shortest paths
    dist = nx.bellman_ford(G, source=new_node, weight=weight)[1]

    delete = False
    if new_weight is None:
        delete = True
        new_weight = uuid.uuid1()

    for u, v, w in G.edges(data=True):
        w[new_weight] = w[weight] + dist[u] - dist[v]

    G.remove_node(new_node)
    all_pairs_path = nx.all_pairs_dijkstra_path(G, weight=new_weight)

    if delete:
        for u, v, w in G.edges(data=True):
            if new_weight in w:
                w.pop(new_weight)

    return all_pairs_path
Beispiel #53
0
def main(argv):
   inputfile = ''
   outputfile = ''
   graphfile = ''
   source = 0
   destination = 0
   budget = 0
   

   try:
      opts, args = getopt.getopt(argv,"h:i:o:s:d:b:g:",["gfile=","ifile=",\
        "ofile="])
   except getopt.GetoptError:
      print 'tcf.py -i <intermediate_output_file> -o <outputfile> -s <source>'\
         + '-d <destination> -b <budget> -g <graphfile>'
      sys.exit(2)
   for opt, arg in opts:
      if opt == '-h':
         print 'tcf.py -i <intermediate_output_file> -o <outputfile> -s' \
         + ' <source> -d <destination> -b <budget> -g <graphfile>'
         sys.exit()
      elif opt in ("-i", "--ifile"):
         inputfile = arg
      elif opt in ("-o", "--ofile"):
         outputfile = arg
      elif opt in ("-s"):
         source = int(arg)
      elif opt in ("-d"):
         destination = int(arg)
      elif opt in ("-b"):
         budget = int(arg)
      elif opt in ("-g"):
         graphfile = arg

#  	Time contrained bitmap

   print graphfile
   f = open(graphfile, 'r')
   max_nodes = int(f.readline())
   f.close()
    
   graph = nx.DiGraph()
   TCGraph = nx.DiGraph()

   print "graphfile is", graphfile
   g = np.loadtxt(graphfile, dtype = int , skiprows = 1, ndmin = 2)

   # create graph from input file
   for i in range(1, max_nodes+1):
      graph.add_node(i)
      TCGraph.add_node(i)

   for i in range(0, len(g)):
      graph.add_edge(g[i][0], g[i][1], weight = g[i][2])
      graph.add_edge(g[i][1], g[i][0], weight = g[i][2])

   # Dijkstra's to find shortest distance to source and destination from
   # each node

   to_source = nx.single_source_dijkstra_path_length(graph,source)
   to_dest = nx.single_source_dijkstra_path_length(graph,destination)
   
   # For each edge, check to see if shortest path using this edge is 
   # within the time budget
   for edge in graph.edges(data = 'true'):
      if((to_source[edge[0]] + edge[2]['weight'] + to_dest[edge[1]]) <= budget):
         TCGraph.add_edge(edge[0],edge[1], weight = edge[2]['weight'])

   # print the graph
   f = open(inputfile,'w')
   f.write("Source : " + str(source) + '\n')
   f.write("Destination : " + str(destination) + '\n')
   f.write("Budget : " + str(budget) + '\n')
   f.write("Set of edges is\n")
   for e in TCGraph.edges(data = 'true'):
      f.write(str(e[0]) + ' ' + str(e[1]) + ' ' + str(e[2]['weight']) + '\n')

   f.close()

   # Loop removal using Suurballe's Algorithm
  
   print "input file", inputfile
   a = np.loadtxt(inputfile, dtype = int , delimiter = " ", skiprows = 4,\
       ndmin = 2)
   if len(a) == 0:
      f = open(outputfile,'w')
      f.write("Source : " + str(source) + '\n')
      f.write("Destination : " + str(destination) + '\n')
      f.write("Budget : " + str(budget) + '\n')
      f.write("Set of edges is\n")
      f.close()
      sys.exit(0)
  
   NG = nx.DiGraph()
    
   for i in range(0, len(a)):
      NG.add_edge(a[i][0], a[i][1], weight = a[i][2])
      NG.add_edge(a[i][1], a[i][0], weight = a[i][2])

   remaining = deque(nx.nodes(NG)) # remaining nodes to run suurballe's from
   remaining.remove(source)
   remaining.remove(destination) # these shouldn't be included

   # Split all nodes to in and out nodes connected by zero-weight edge
   working_graph = split_graph(NG)

   # add our fake node between the source and the destination with edges of 
   # latency 0
  
   working_graph.add_node(dummy)
   working_graph.add_edge(str(source) + "_out", dummy, weight = 0)
   working_graph.add_edge(str(destination) + "_out", dummy, weight = 0) 

   while len(remaining) > 0:
      # combine i in and i out
      i = remaining.popleft()
      working_graph = combine_node(working_graph, i)
      
      # step 1: run bellman ford from i to dummy, store path in a list
      # Total Latency is given by dest[dummy]
      pred, dest = nx.bellman_ford(working_graph, i, weight='weight')
      
      if(not (dummy in dest)):
          print "Error: dummy not reachable from ", i
          sys.exit(0)     
 
      # step 2: get this shortest path and invert the edges in NG
      path = []
      temp = dummy
      while(temp != i):
          path.append([pred[temp],temp])
          temp = pred[temp]
      
      for e in path:
          temp_weight = working_graph.edge[e[0]][e[1]]['weight'] 
          temp_weight*=-1
          working_graph.remove_edge(e[0],e[1])
          
          if(working_graph.has_edge(e[1],e[0])):
              temp_lat = working_graph.edge[e[1]][e[0]]['weight'] 
              working_graph.add_edge(e[1], e[0], weight = temp_weight, original\
                 = temp_lat)
          else:
              working_graph.add_edge(e[1], e[0], weight = temp_weight, original\
                = -1)
          
      
      # step 3: run bellman ford from i to dummy on new graph, store path in
      # a dictionary.  Add each latency to total latency.
      pred_inv, dest_inv = nx.bellman_ford(working_graph, i, weight='weight')

      # If bellman ford says distance to dummy is negative infinity, remove
      # node i from remaining and from NG and continue to next node in remaining
      # if total latency of both paths is higher than the budget, remove node i
      # from NG and continue to the next node in remaining
      
      if((not (dummy in dest_inv)) or ((dest[dummy] + dest_inv[dummy]) \
          > budget)):
         NG.remove_node(i)
         for e in path:
             if(working_graph.edge[e[1]][e[0]]['original'] == -1):
                  w =  working_graph.edge[e[1]][e[0]]['weight']
                  w *= -1
                  working_graph.remove_edge(e[1],e[0])
                  working_graph.add_edge(e[0],e[1], weight = w)
             else:
                  temp_lat =  working_graph.edge[e[1]][e[0]]['original']
                  w =  working_graph.edge[e[1]][e[0]]['weight']
                  w*=-1
                  working_graph.add_edge(e[1], e[0], weight = temp_lat, \
                     original = -1)
                  working_graph.add_edge(e[0],e[1], weight = w)

      else:
         path_new = {}
         temp = dummy
         while(temp != i):
             path_new[pred_inv[temp]] = temp
             temp = pred_inv[temp]

      # iterate over path 1 list -- for each item, see if its inverse exists in
      # the dictionary -- if yes remove both (makes this path edge-disjoint)  
         for e in path:
             if(e[1] in path_new):
                  if(path_new[e[1]] == e[0]):
                      path.remove(e)
                      del path_new[e[1]]
      # Else, remove path 1 node from remaining.      
             else:
                  if(remaining.count(e[0])>0):
                      remaining.remove(e[0])
                      
      # also switch inverse back on graph
             if(working_graph.edge[e[1]][e[0]]['original'] == -1):
                  w =  working_graph.edge[e[1]][e[0]]['weight']
                  w*=-1
                  working_graph.remove_edge(e[1],e[0])
                  working_graph.add_edge(e[0],e[1], weight = w) 
             else:
                  temp_lat =  working_graph.edge[e[1]][e[0]]['original']
                  w =  working_graph.edge[e[1]][e[0]]['weight']
                  w*=-1
                  working_graph.add_edge(e[1], e[0], weight = temp_lat, \
                     original = -1)
                  working_graph.add_edge(e[0],e[1], weight = w)
      
      # Create list from dictionary. Iterate over and remove each node from
      # remaining. 
          
         for n in path_new.keys():
             if(remaining.count(n)>0):
                      remaining.remove(n)

      # split i into i in and i out      
      working_graph = split_node(working_graph, i) 


   # print the graph
   f = open(outputfile,'w')
   f.write("Source : " + str(source) + '\n')
   f.write("Destination : " + str(destination) + '\n')
   f.write("Budget : " + str(budget) + '\n')
   f.write("Set of edges is\n")
   for e in NG.edges():
      f.write(str(e[0]) + '->' + str(e[1]) + '\n')

   f.close()
Beispiel #54
0
    def test_bellman_ford(self):
        # single node graph
        G = nx.DiGraph()
        G.add_node(0)
        assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))
        assert_raises(KeyError, nx.bellman_ford, G, 1)

        # negative weight cycle
        G = nx.cycle_graph(5, create_using=nx.DiGraph())
        G.add_edge(1, 2, weight=-7)
        for i in range(5):
            assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)

        # negative weight cycle that user asks for
        G = nx.cycle_graph(5, create_using=nx.DiGraph())
        G.add_edge(1, 2, weight=-7)
        assert_equal(nx.bellman_ford(G, 0, return_negative_cycle=True), ({
            0: 4,
            1: 0,
            2: 1,
            3: 2,
            4: 3
        }, {
            0: -3,
            1: 1,
            2: -6,
            3: -5,
            4: -4
        }))

        G = nx.cycle_graph(5)  # undirected Graph
        G.add_edge(1, 2, weight=-3)
        for i in range(5):
            assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)

        # no negative cycle but negative weight
        G = nx.cycle_graph(5, create_using=nx.DiGraph())
        G.add_edge(1, 2, weight=-3)
        assert_equal(nx.bellman_ford(G, 0), ({
            0: None,
            1: 0,
            2: 1,
            3: 2,
            4: 3
        }, {
            0: 0,
            1: 1,
            2: -2,
            3: -1,
            4: 0
        }))

        # not connected
        G = nx.complete_graph(6)
        G.add_edge(10, 11)
        G.add_edge(10, 12)
        assert_equal(nx.bellman_ford(G, 0), ({
            0: None,
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0
        }, {
            0: 0,
            1: 1,
            2: 1,
            3: 1,
            4: 1,
            5: 1
        }))

        # not connected, with a component not containing the source that
        # contains a negative cost cycle.
        G = nx.complete_graph(6)
        G.add_edges_from([('A', 'B', {
            'load': 3
        }), ('B', 'C', {
            'load': -10
        }), ('C', 'A', {
            'load': 2
        })])
        assert_equal(nx.bellman_ford(G, 0, weight='load'), ({
            0: None,
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0
        }, {
            0: 0,
            1: 1,
            2: 1,
            3: 1,
            4: 1,
            5: 1
        }))

        # multigraph
        P, D = nx.bellman_ford(self.MXG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)
        P, D = nx.bellman_ford(self.MXG4, 0)
        assert_equal(P[2], 1)
        assert_equal(D[2], 4)

        # other tests
        (P, D) = nx.bellman_ford(self.XG, 's')
        assert_equal(P['v'], 'u')
        assert_equal(D['v'], 9)

        G = nx.path_graph(4)
        assert_equal(nx.bellman_ford(G, 0), ({
            0: None,
            1: 0,
            2: 1,
            3: 2
        }, {
            0: 0,
            1: 1,
            2: 2,
            3: 3
        }))
        assert_equal(nx.bellman_ford(G, 3), ({
            0: 1,
            1: 2,
            2: 3,
            3: None
        }, {
            0: 3,
            1: 2,
            2: 1,
            3: 0
        }))

        G = nx.grid_2d_graph(2, 2)
        pred, dist = nx.bellman_ford(G, (0, 0))
        assert_equal(sorted(pred.items()), [((0, 0), None), ((0, 1), (0, 0)),
                                            ((1, 0), (0, 0)),
                                            ((1, 1), (0, 1))])
        assert_equal(sorted(dist.items()), [((0, 0), 0), ((0, 1), 1),
                                            ((1, 0), 1), ((1, 1), 2)])
    def enrichedConcepts(self,
                         AnnotationData,
                         Tentities,
                         model=None,
                         **kwargs):
        """	
		The enrichedConcepts method for fuzzy concept enrichment analysis tool. This 
		method allows fuzzy enriched annotations contributing to a given process using 
		their semantic similarity scores computed based on a selected concept semantic 
		similarity model. 
		
		Arguments:
		
			AnnotationData (dict): A dictionary with entity as key and set of concepts
			as value, representing the reference or background dataset.
			
			Tentities: Set of targeted entities (targets) representing the system under
			consideration.
			
			model (tuple): The entity semantic similarity measure to be used. Refer to 
			the Supplementary for more details on symbols used for different measures.
			 
			**kwargs can be used to set different parameters needed for the model chosen
			as well as other parameters associated to entity retrieval model, including
			
			score (float >= 0.0): The threshold score providing the semantic similarity 
			degree at which entities are considered to be semantically close or similar in 
			the ontology structure and it is set to 0.3 by default.
			
			stream (int 0 or 1): An Enum parameter taking values of 0 or 1. It is set to 1
			to output results on the screen, to 0 to output results in a file.			
		"""
        # Check other Concept Semantic Similarity parameters here
        self.parameterChecks(**kwargs)
        stream = kwargs['stream'] if 'stream' in kwargs else 1
        if not Tentities or not isinstance(Tentities, (list, set, tuple)):
            print(
                InputError(
                    'Tentities - Type or Value Error',
                    'Tentities should be a no empty list, set or a tuple of concepts.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(3)
        if not AnnotationData or not isinstance(AnnotationData, dict):
            print(
                InputError(
                    'AnnotationData - Type or Value Error',
                    'Tentities should be a no empty dict, mapping concepts to entities.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(4)
        if not 'score' in kwargs: kwargs['score'] = 0.3
        elif not isinstance(
                kwargs['score'],
            (float, int)) or kwargs['score'] > 1.0 or kwargs['score'] < 0.0:
            print(
                InputError(
                    'score parameter - Value or Type Error',
                    'The value of the parameter score, if provided\nshould be a positive float <= 1, by default set to 0.3.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(5)
        else:
            print(
                InputError(
                    'score parameter - Type Error',
                    'The value of the parameter score, if provided\nshould be a positive float <= 1, by default set to 0.3.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(6)
        if not 'pvalue' in kwargs: kwargs['pvalue'] = 0.05
        elif not isinstance(
                kwargs['pvalue'],
                float) or kwargs['pvalue'] > 1.0 or kwargs['pvalue'] < 0.0:
            print(
                InputError(
                    'pvalue parameter - Value or Type Error',
                    'The value of the parameter pvalue, if provided\nshould be a positive float<=1, by default set to 0.05.\n\nPlease refer to the tool documentation, fix this issue and try again...\n'
                ))
            sys.exit(7)
        else:
            print(
                InputError(
                    'score parameter - Type Error',
                    'The value of the parameter pvalue, if provided\nshould be a positive float <= 1, by default set to 0.05.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(8)

        self.Background = {}
        self.EntityMissing = {}
        if isinstance(AnnotationData, dict):
            for ent in AnnotationData:
                self.Background[ent] = set()
                self.EntityMissing[ent] = set()
                for tt in AnnotationData[ent]:
                    if tt in self.alt_id and self.alt_id[tt] in self.DagStr:
                        self.Background[ent].add(self.alt_id[tt])
                    elif tt in self.Dag and self.Dag.index(tt) in self.DagStr:
                        self.Background[ent].add(self.Dag.index(tt))
                    else:  # Term is either obsolete or does not exist in the onto!
                        self.EntityMissing[ent].add(tt)
                self.Background[ent].discard(self.oroot)
                if not self.Background[ent]: self.Background.pop(ent, False)
        else:
            print(
                InputError(
                    'AnnototationData - Type Error',
                    'AnnotationData should be either a dictionary: key (entity)/value (ontology annotation) mapping\nor a string representing a full path to an annotation file.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(9)
        if not self.Background:
            print(
                InputError(
                    'AnnotationData - Value Error',
                    'Sorry the background map is empty because concepts provided\ncould not be mapped to the ontology. Please check the type of the set of concept targets.'
                ))
            sys.exit(10)

        self.Targets = set()
        self.TargetMissing = set()
        for p in Tentities:
            if p in self.Background: self.Targets.add(p)
            else: self.TargetMissing.add(p)

        if not self.Targets:
            print(
                InputError(
                    'Tentities - Value Error',
                    'Please, the target entities provided not mapped! Check the type of the set of concept targets.'
                ))
            sys.exit(9)

        now = time.time()
        print(
            "\nComputing different parameters and concept frequency now, this may take time..."
        )
        DicLevels = nx.bellman_ford(self.DagStr, self.oroot)
        self.DicLevels = DicLevels[-1]
        del DicLevels
        self.deep = -min(list(set(self.DicLevels.values())))

        self.search(model, **kwargs)

        # computing p-values using hypergeometric distribution and corrected p-values by Bonferroni
        tn = time.time()
        print("\nComputing p-value now, this may take time...", end=',')
        Bonf = len(self.TargetConcepts)
        N = len(self.Background)
        n = len(self.Targets)
        P03 = {}
        for t in self.fouts:
            Pv = 1.0
            Pv = 1 - dst.hypergeom.cdf(self.fouts[t][0] - 1, N, self.fouts[t]
                                       [1], n) if self.fouts[t][0] > 0 else 1.0
            if Pv <= 0.0:
                Pv = 0.0  # The Pv can become negative because of floating point
            if Pv >= 1.0:
                Pv = 1.0  # The Pv can go greater than 1.0 because of floating point
            PvBf = Bonf * Pv  # Bonferroni multiple correction
            if PvBf >= 1.0: PvBf = 1.0
            if Pv < kwargs['pvalue']: P03[t] = (Pv, PvBf)
        print("\rComputing p-value done, time elapsed: %d %s" %
              (int(round(time.time() - tn)), 'seconds...'))

        if not P03:
            print(
                "\n\nUnfortunately, no enriched concept has been identified for\nthreshold or cutoff of %.5f indicated."
                % (kwargs['pvalue'], ))
            print(
                "\n***************************************************************\n"
            )
            sys.exit(10)

        print(
            "\nFiltering identified enriched concepts now, this may take time..."
        )
        # Filtering set of significant terms
        Fl03 = set(P03.keys())
        for t in Fl03.copy():
            if not t in Fl03: continue
            Fl03 -= nx.ancestors(self.DagStr,
                                 t)  # Remove ancestors of the term t

        Fl03 = set([t for t in Fl03 if P03[t][1] < kwargs['pvalue']])
        outs = []
        if not Fl03:  # No term passed the Bonferroni correction
            Tmp = set([t for t in Fl03 if P03[t][0] < kwargs['pvalue']])
            print(
                "\n\nUnfortunately, no enriched concept has been identified for\nthreshold or cutoff of %.5f indicated."
                % (kwargs['pvalue'], ))
            if Tmp:
                for t in Tmp:
                    outs.append([
                        self.Dag[t], -self.DicLevels[t], P03[t][0], P03[t][1]
                    ])
                outs = sorted(outs, key=lambda x: (x[-1], -x[1]))
                outputfile = 'EntityIdentificationResults%d.txt' % (
                    random.randint(0, 100000), )
                print(
                    "\nGenerale Statistics for each target concepts can be found in the file: [%s]"
                    % (outputfile, ))
                fp = open(outputfile, 'w')
                fp.write(
                    tabs(outs,
                         headers,
                         tablefmt='plain',
                         floatfmt="1.2e",
                         stralign="center"))
                fw.close()
            sys.exit()

        # building output
        for t in Fl03:  # Concept-ID, Term Level, p-value, Bonferroni correction
            outs.append(
                [self.Dag[t], -self.DicLevels[t], P03[t][0], P03[t][1]])
        outs = sorted(outs, key=lambda x: (x[-1], -x[1]))
        sts = [model[0].capitalize(), model[1].capitalize()
               ] if len(model) == 2 else [model[0].capitalize(), '']

        print("\nIdentifying fuzzy enriched concepts using : %s" %
              ("[conceptenrichment.py module]", ))
        print("Total number of concets in the target set : %d" % (Bonf, ))
        print("Number of enriched GO terms detected      : %d" % (len(outs), ))
        print(
            "Semantic Similarity approaches used is    : {}-{}\n".format(*sts))

        headers = ['Concept-ID', 'Level', 'p-value', 'Adj-p-values']
        if stream:
            print(
                tabs(outs,
                     headers,
                     tablefmt='grid',
                     floatfmt="1.5e",
                     stralign="center"))
        else:
            outputfile = 'EntityIdentificationResults%d.txt' % (random.randint(
                0, 100000), )
            print(
                "\nGenerale Statistics for each target concepts can be found in the file: [%s]"
                % (outputfile, ))
            fp = open(outputfile, 'w')
            fp.write(
                tabs(outs,
                     headers,
                     tablefmt='plain',
                     floatfmt="1.2e",
                     stralign="center"))
            fw.close()

        print("\nProcessing accomplished on %s" %
              str(time.asctime(time.localtime())))
        print("Total time elapsed is approximately: %d %s" %
              (int(round((time.time() - now) / 60)), 'minutes.'))
        print(
            "\n************************************************************************************\n"
        )
Beispiel #56
0
 156: 'R', 101: 'T', 186: 'W', 99: 'V', 163: 'Y'} #, 4: 'X', 5: 'Z'}

spec = np.array(['0'] + open('rosalind_ba11e.txt').readline().rstrip().split(), dtype=int)
m = len(spec)

G = nx.DiGraph()
G.add_nodes_from(range(m))

# use sign-reversed weight of the target node for each edge weight
for i in range(m-1):
    for j in range(i+1, m):
        d = j - i 
        if d in mass_aa.keys():
            G.add_edge(i, j, {'weight': -spec[j], 'label': mass_aa[d]})


# A Belllman_ford search and reconstruction to find
# the shortest path (i.e. maximal -Score) in the graph with sign-reversed weights 
v = m-1
path = nx.bellman_ford(G, 0)[0]


result = ''
u = path[v]
while u != None:
    result = G[u][v]['label'] + result
    v = u
    u = path[v]

print(result)
    def retrieveEntity(self,
                       AnnotationData,
                       Tconcepts,
                       model=('nunivers', 'universal'),
                       **kwargs):
        """
		The retrieveEntity method for protein functional classification tool. This 
		method allows the identification of an entity (e.g. gene or protein) fuzzy 
		contributing to a given process using their semantic similarity scores 
		computed based on a selected concept semantic similarity model. 
		
		Arguments:
		
			AnnotationData (dict): A dictionary with entity as key and set of concepts
			as value.
			
			Tconcets: Set of concepts for which associated entities needs to be identified
			
			model (tuple): The entity semantic similarity measure to be used. 
			Refer to the Supplementary for more details on symbols used for different 
			measures.
			 
			**kwargs can be used to set different parameters needed for the model chosen
			as well as other parameters associated to entity retrieval model, including
			
			score (float > 0.0): The threshold score providing the semantic similarity 
			degree at which entities are considered to be semantically close or similar in 
			the ontology structure and it is set to 0.3 by default.
			
			stream (int 0 or 1): An Enum parameter taking values of 0 or 1. It is set to 1
			to output results on the screen, to 0 to output results in a file.			
		"""

        if not 'score' in kwargs: kwargs['score'] = 0.3
        stream = kwargs['stream'] if 'stream' in kwargs else 1

        # Check other Concept Semantic Similarity parameters here
        self.parameterChecks(**kwargs)

        self.Background = {}
        self.EntityMissing = {}
        if isinstance(AnnotationData, dict):
            for ent in AnnotationData:
                self.Background[ent] = set()
                self.EntityMissing[ent] = set()
                for tt in AnnotationData[ent]:
                    if tt in self.alt_id and self.alt_id[tt] in self.DagStr:
                        self.Background[ent].add(self.alt_id[tt])
                    elif tt in self.Dag and self.Dag.index(tt) in self.DagStr:
                        self.Background[ent].add(self.Dag.index(tt))
                    else:  # Term is either obsolete or does not exist in the onto!
                        self.EntityMissing[ent].add(tt)
                self.Background[ent].discard(self.oroot)
                if not self.Background[ent]: self.Background.pop(ent, False)
        else:
            print(
                InputError(
                    'AnnototationData - Value Error',
                    'AnnotationData should be either a dictionary: key (entity)/value (ontology annotation) mapping\nor a string representing a full path to an annotation file.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(2)
        if not self.Background:
            print(
                InputError(
                    'Annots - Type Error',
                    'Sorry the background map is empty. Please check the type of the set of concept targets.'
                ))
            sys.exit(3)

        self.Targets = set()
        if isinstance(Tconcepts, (list, set, tuple)):
            for tt in Tconcepts:
                if tt in self.alt_id and self.alt_id[tt] in self.DagStr:
                    self.Targets.add(self.alt_id[tt])
                elif tt in self.Dag and self.Dag.index(tt) in self.DagStr:
                    self.Targets.add(self.Dag.index(tt))
        else:
            print(
                InputError(
                    'Tconcepts - Type Error',
                    'Tconcepts shoul be a list, set or a tuple of concepts.\n\nPlease refer to the tool documentation, fix this issue and try again ...\n'
                ))
            sys.exit(4)
        if not self.Targets:
            print(
                InputError(
                    'Tconcepts - Value Error',
                    'Please, the target set should not be empty! Check the type of the set of concept targets.'
                ))
            sys.exit(5)

        DicLevels = nx.bellman_ford(self.DagStr, self.oroot)
        self.DicLevels = DicLevels[-1]
        del DicLevels
        self.deep = -min(list(set(self.DicLevels.values())))

        now = time.time()
        P03, New = self.search(model, **kwargs)

        VideKey = set([t for t in New if not New[t]])
        for t in VideKey:
            Mew.pop(t, False)

        sts = [model[0].capitalize(), model[1].capitalize()
               ] if len(model) == 2 else [model[0].capitalize(), '']

        print("\nFuzzy Identification of proteins/genes using    %s:" %
              ("[Running entityidentification.py]", ))
        print("Total number of possible target GO IDs in the list: %d" %
              (len(self.Targets), ))
        print("Semantic Similarity approaches used is  : {}-{}, ".format(*sts))

        outputfile = 'EntityIdentificationResults%d.txt' % (random.randint(
            0, 100000), )

        outs = []
        if not New:  #Prot, high SS, AvgSS, termI
            print(
                "\n\nUnfortunately, no entity reached the threshold or cutoff of %.5f indicated. However,\nAll set of proteins and associated SS scores are in the file: %s"
                % (kwargs['score'], outputfile))
            for t in self.fouts:  #self.fouts[t].append((ent, csim[-1][1], mean([c[1] for c in csim]), csim[-1][0]))
                outs.append([self.Dag[t], -self.DicLevels[t]] +
                            list(self.fouts[t][0][:-1]))
                for i in range(1, len(self.fouts)):
                    outs.append(['', ''] + list(self.fouts[t][i][:-1]))
            fw = open(outputfile, 'w')
            headers = ['Concept-ID', 'Level', 'Entity', 'high SS', 'Avg SS']
            fw.write(
                tabs(outs,
                     headers,
                     tablefmt='plain',
                     floatfmt=".5f",
                     stralign="center"))
            fw.close()
            print(
                "\n***************************************************************\n"
            )
            sys.exit()

        for t in New:  # GO ID, GO term, Term Level, Number of proteins, Average SS, p-value, Bonferroni correction
            if not New[t]: continue
            outs.append([
                self.Dag[t], -self.DicLevels[t], P03[t][-2], New[t][0][0],
                round(New[t][0][1], 5),
                round(P03[t][-1], 5), P03[t][0], P03[t][1]
            ])
            for i in range(1, len(New[t])):
                outs.append([
                    '', '', '', New[t][i][0],
                    round(New[t][i][1], 5), '', '', ''
                ])

        headers = [
            'Concept-ID', 'Level', '# of Entities', 'Entity', 'ESSS', 'Avg SS',
            'p-value', 'Cp-values'
        ]
        if stream:
            print(
                tabs(outs,
                     headers,
                     tablefmt='grid',
                     floatfmt=".5f",
                     stralign="center"))
        else:
            print(
                "\nGenerale Statistics for each target GO ID can be found in the file: [%s]"
                % (outputfile, ))
            fp = open(outputfile, 'w')
            fp.write(
                tabs(outs,
                     headers,
                     tablefmt='plain',
                     floatfmt=".5f",
                     stralign="center"))
            fw.close()
        print("\nLegend of Entity Identification:")
        print("----------------------------------------")
        print(
            "Cp-values stand for corrected p-values done using Bonferroni multiple correction model."
        )

        print("\nProcessing accomplished on %s" %
              str(time.asctime(time.localtime())))
        print("Total time elapsed is approximately %.2f %s" %
              (time.time() - now, 'seconds'))
        print(
            "\n***************************************************************\n"
        )
def generate_rip_graph(size):
    
    network_graph = rip_graph(size)
    
    rip_first_iteration(network_graph)
    
    while not convergence:
        rip_broadcast(network_graph)
        rip_update_distance_matrix(network_graph)
        
    return network_graph


if __name__ == '__main__':

    network_graph = generate_rip_graph(10)

    # Comparing with bellman-ford for testing
    for n, nattr in network_graph.nodes(data=True):  # For each node n and attribute nattr
        print "Our RIP:"
        print "(%d,%s)" % (n, nattr['best_weights_vector'])
        print "(%d,%s)" % (n, nattr['default_next_hop'])

        pred, dist = nx.bellman_ford(network_graph, n)
        print "Bellman_ford:"
        print sorted(dist.items())
        print sorted(pred.items())
        #break
        
    draw_graph(network_graph)