def betweenness_centrality(network,weighted=False):

    betweenness = {}
    nodes = network.nodes()

    shortest_paths = {}
    for pre_lpu in nodes:
        shortest_paths[pre_lpu] = {}
        for post_lpu in nodes:
            if weighted:
                shortest_paths[pre_lpu][post_lpu] = list(nx.all_shortest_paths(network,pre_lpu,post_lpu,weight='weight'))
            else:
                shortest_paths[pre_lpu][post_lpu] = list(nx.all_shortest_paths(network,pre_lpu,post_lpu))

    for n in nodes:
        b = 0
        for pre_lpu in nodes:
            for post_lpu in nodes:
                if pre_lpu != post_lpu and pre_lpu != n and post_lpu !=n:
                    p = 0
                    pi = 0
                    for path in shortest_paths[pre_lpu][post_lpu]:
                        p+=1;
                        pi += n in path

                    ratio = float(pi)/p
                    b = b+ ratio 
        betweenness[n] = b
    return betweenness
def trunkroute(G,nodelist): 
    nodes = len(nodelist)
    for i in range(0,(nodes-1)):
        startname = nodelist[i]
        endname = nodelist[i+1]
 
        try:
            #pathshort = nx.dijkstra_path(G,startname,endname,'weight')#下面的函数返回对象,这函数返回list,我还不会把对象变成list。
            pathshort = nx.all_shortest_paths(G,startname,endname,'weight') #加weight是按距离选择,选出一条最短路由。
            #psl=nx.dijkstra_path_length(G,startname,endname) #返回最短路径的长度距离。与用all_shortest_paths()函数取出的路径不太一样,是个疑问。
            pathshortseq = []
            pathshortseq = pathsequence(pathshort) #不用排序,但是排序函数可以脱去对象,生成list的Array。
            pathshortlong = []
            pathshortlong=pathadddistance(pathshortseq,G)#在节点之间加上最短距离数据。
            #displayformatpath(pathshort,psl)
            displayformatpath(pathshortlong,'Short') #将路由节点和距离显示在终端上。
            #trunkselectdisplay(pathshortseq)#加入光缆选择信息。最短长度。
        except nx.NetworkXNoPath:
            print '**********Short Route Attention! %s to %s have no route in this G.**********'%(startname.encode('GB2312'),endname.encode('GB2312'))
        try:
            pathall = nx.all_shortest_paths(G,startname,endname) #不加weight按照节点最少为最短,加weight是按距离选择。
            pathallseq = []
            pathallseq = pathsequence(pathall) #从路由组对象转换成arrar,并排序,返回arrar。
            #allnodedist = nx.all_pairs_dijkstra_path_length(G) #返回所有节点间的直连距离字典。转移到全局变量里。
            #pathallseqlong = pathadddistance(allnodedist,pathallseq) #给路由组分别加上距离int。返回带距离的路由array。
            pathallseqlong = []
            pathallseqlong = pathadddistance(pathallseq,G) #给路由组分别加上距离int。返回带距离的路由array。
            displayformatpath(pathallseqlong) #将节点名字传入函数,方便输出错误信息。
            #trunkselectdisplay(pathallseq)#加入光缆选择信息。最少节点。
        except nx.NetworkXNoPath:
            print '**********Route Attention! %s to %s have no route in this G.**********'%(startname.encode('GB2312'),endname.encode('GB2312'))
        #print pathshortseq
        #print pathshortlong
        if len(pathshortseq) != 0 and len(pathallseq) != 0 :
            trunkselectdisplay(pathshortseq,pathallseq)#加入光缆选择函数。
Example #3
0
 def test_all_shortest_paths(self):
     G = nx.Graph()
     nx.add_path(G, [0, 1, 2, 3])
     nx.add_path(G, [0, 10, 20, 3])
     assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
                  sorted(nx.all_shortest_paths(G, 0, 3)))
     # with weights
     G = nx.Graph()
     nx.add_path(G, [0, 1, 2, 3])
     nx.add_path(G, [0, 10, 20, 3])
     assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
                  sorted(nx.all_shortest_paths(G, 0, 3, weight='weight')))
     # weights and method specified
     G = nx.Graph()
     nx.add_path(G, [0, 1, 2, 3])
     nx.add_path(G, [0, 10, 20, 3])
     assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
                  sorted(nx.all_shortest_paths(G, 0, 3, weight='weight',
                                               method='dijkstra')))
     G = nx.Graph()
     nx.add_path(G, [0, 1, 2, 3])
     nx.add_path(G, [0, 10, 20, 3])
     assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
                  sorted(nx.all_shortest_paths(G, 0, 3, weight='weight',
                                               method='bellman-ford')))
Example #4
0
    def concrete_path_exists(self, o1, o2):
        try:
            m1 = o1.leaf_model_name
            m2 = o2.leaf_model_name
        except AttributeError:
            # One of the nodes is not in the dependency graph
            # No dependency
            return False

        # FIXME: Dynamic dependency check
        G = self.model_dependency_graph[False]
        paths = all_shortest_paths(G, m1, m2)

        try:
            any(paths)
            paths = all_shortest_paths(G, m1, m2)
        except NetworkXNoPath:
            # Easy. The two models are unrelated.
            return False

        for p in paths:
            path_verdict = True
            src_object = o1
            da = None

            for i in range(len(p) - 1):
                src = p[i]
                dst = p[i + 1]
                edge_label = G[src][dst]
                sa = edge_label['src_accessor']
                da = edge_label['dst_accessor']
                try:
                    dst_object = getattr(src_object, sa)
                    if dst_object and dst_object.leaf_model_name != dst and i != len(
                            p) - 2:
                        raise AttributeError
                except AttributeError as e:
                    self.log.debug(
                        'Could not check object dependencies, making conservative choice', src_object = src_object, sa = sa, o1 = o1, o2 = o2)
                    return True
                src_object = dst_object

            if src_object and ((not da and src_object == o2) or (
                    da and src_object == getattr(o2, da))):
                return True

            # Otherwise try other paths

        return False
Example #5
0
    def findshortestpath(self, startrotmat4, goalrotmat4, base):
        self.__addstartgoal(startrotmat4, goalrotmat4, base)

        # startgrip = random.select(self.startnodeids)
        # goalgrip = random.select(self.goalnodeids)
        startgrip = self.startnodeids[0]
        goalgrip = self.goalnodeids[0]
        self.shortestpaths = nx.all_shortest_paths(self.regg, source = startgrip, target = goalgrip)
        self.directshortestpaths = []
        # directshortestpaths removed the repeated start and goal transit
        try:
            for path in self.shortestpaths:
                print path
                for i, pathnode in enumerate(path):
                    if pathnode.startswith('start') and i < len(path)-1:
                        continue
                    else:
                        self.directshortestpaths.append(path[i-1:])
                        break
                for i, pathnode in enumerate(self.directshortestpaths[-1]):
                    if i > 0 and pathnode.startswith('goal'):
                        self.directshortestpaths[-1]=self.directshortestpaths[-1][:i+1]
                        break
        except:
            print "No path found!"
            pass
Example #6
0
def main(json_file, output_prefix, source, target):
    
    with open(json_file) as data_file:    
        data = json.load(data_file)

    G = json_graph.node_link_graph(data, directed=False)

    print "Finished Reading in Graph: {0}".format(datetime.datetime.now())

    id_seq = networkx.get_node_attributes(G, "sequence")

    seq_id = { seq : node_id for node_id, seq in id_seq.items()}

    print "Created inverse lookup table: {0}".format(datetime.datetime.now())

    if ',' in target:
        targets = target.split(',')

    for target in targets:
        paths = networkx.all_shortest_paths(G, seq_id[source], seq_id[target])

        with open("{0}_paths_{1}_{2}.txt".format(output_prefix, source, target), 'w') as o:
            for path in paths:
                o.write(",".join( [id_seq[node_id] for node_id in path ] ))
	        o.write("\n")

    print "Output paths: {0}".format(datetime.datetime.now())
Example #7
0
def baconise(g, actorA, actorB):
	actorA = actorA.replace("_", " ")
	actorB = actorB.replace("_", " ")

	if not(actorA in g.nodes()):
		actorA = findClosest(g, actorA)

	if not(actorB in g.nodes()):
		actorB = findClosest(g, actorB)

	lists = nx.all_shortest_paths(g, actorA, actorB)
	lista = lists.next()
	i = 1
	actors = []
	while not(lista is None):
		try:
			printPath(g, lista)
			lista = lists.next()
			actors = actors +lista
			i = i+1
		except StopIteration:
			break

	print "found " + str(i) + " paths of length " + str(len(lista))
	return list(set(actors))
Example #8
0
 def shortest_paths(self, v1, v2):
     try:
         l = nx.shortest_path_length(self.graph, v1, v2)
         paths = nx.all_shortest_paths(self.graph, v1, v2)
     except:
         paths = []
     return paths
    def _calculate_shortest_paths(self, env, action_size):
        s_next_s_action = {}
        G = nx.DiGraph()

        for s in range(env.n_locations):
          for a in range(action_size):
            next_s = env.transition_graph[s, a]
            if next_s >= 0:
              s_next_s_action[(s, next_s)] = a
              G.add_edge(s, next_s)

        best_action = np.zeros((env.n_locations, action_size), dtype=np.float)
        for i in range(env.n_locations):
          if i == env.terminal_state_id:
            continue
          if env.shortest_path_distances[i, env.terminal_state_id] == -1:
            continue
          for path in nx.all_shortest_paths(G, source=i, target=env.terminal_state_id):
            action = s_next_s_action[(i, path[1])]
            best_action[i, action] += 1

        action_sum = best_action.sum(axis=1, keepdims=True)
        action_sum[action_sum == 0] = 1  # prevent divide-by-zero
        shortest_path_actions = best_action / action_sum

        return shortest_path_actions
Example #10
0
    def all_shortest_paths(self, source, target):
        """
        Generator which yields all shortest paths between the source
        and target types.

        Parameters:
        source   The source type.
        target   The target type.

        Yield: generator(steps)

        steps Yield: tuple(source, target, rules)

        source   The source type for this step of the information flow.
        target   The target type for this step of the information flow.
        rules    The list of rules creating this information flow step.
        """
        s = self.policy.lookup_type(source)
        t = self.policy.lookup_type(target)

        if self.rebuildsubgraph:
            self._build_subgraph()

        self.log.info("Generating all shortest information flow paths from {0} to {1}...".
                      format(s, t))

        with suppress(NetworkXNoPath, NodeNotFound):
            # NodeNotFound: the type is valid but not in graph, e.g.
            # excluded or disconnected due to min weight
            # NetworkXNoPath: no paths or the target type is
            # not in the graph
            for path in nx.all_shortest_paths(self.subG, s, t):
                yield self.__generate_steps(path)
Example #11
0
def getAllPaths():
    #import matplotlib.pyplot as plt
    
    g = nx.read_weighted_edgelist("hb.txt")   
    
    #print g["ASPA0085"]["HOHA0402"]
    
    
         
    fp = open("allpaths.txt", 'w')
    
    try:
        counter = 1
        for eachPath in nx.all_shortest_paths(g, u"ASPA0085", u"GLUA0194"):
            if not isValidPath(eachPath):
                continue
            fp.write("path%d" % counter)
            for eachResidue in eachPath:
                fp.write('%10s' % eachResidue)
            fp.write('\n')
            counter += 1
    except nx.exception.NetworkXNoPath:
        fp.write("No connected pathway\n")
    finally:
        fp.close()
Example #12
0
    def all_shortest_paths(self, source, target):
        """
        Generator which yields all shortest paths between the source
        and target types.

        Parameters:
        source   The source type.
        target   The target type.

        Yield: generator(steps)

        steps Yield: tuple(source, target, rules)

        source   The source type for this step of the information flow.
        target   The target type for this step of the information flow.
        rules    The list of rules creating this information flow step.
        """
        if self.rebuildgraph:
            self._build_graph()

        if source in self.G and target in self.G:
            try:
                paths = nx.all_shortest_paths(self.G, source, target)
            except nx.exception.NetworkXNoPath:
                pass
            else:
                for p in paths:
                    yield self.__get_steps(p)
Example #13
0
def shortestInOutPaths(model, input, output):
    shortestPaths = []
    inputnodes = []
    outputnodes = []
    colCount = 0

    for i in input.T:
        if i.any() == 1 and not colCount in inputnodes:
            inputnodes.append(colCount)
        colCount = colCount + 1

    colCount = 0
    for i in output.T:
        if i.any() == 1 and not colCount in outputnodes:
            outputnodes.append(colCount)
        colCount = colCount + 1


    for i in inputnodes:
        for o in outputnodes:
            #if i != o: NOT SURE?!?!?!!?!??!?!?!?
                #from perturbed to the inputs
            shortestPaths.append(networkx.all_shortest_paths(model.myGraph, i, o))

    return shortestPaths
def search_path_to_target(nx_graph, opts):
    """
        Try to find a path to the target resource 
    """
    target_candidates = []
    possible_paths = []
    H = None
    
    # first step, find possible candidates for target
    for node, data in nx_graph.nodes_iter(data=True):
        if fnmatch.fnmatch(node, '*%s*' % (opts.target)):
            target_candidates.append(node)
            
    for target_candidate in target_candidates:
        has_path = nx.has_path(nx_graph, source='.', target=target_candidate)
        if has_path:
            # build the graph for the targeted resource, just for graphical purposes
            H=nx.DiGraph()
            
            # find all possible path
            possible_paths = nx.all_shortest_paths(nx_graph, source='.', target=target_candidate)
            for path in possible_paths:
                print "[+] Possible path to '%s': %s" % (target_candidate, path)
                H.add_edges_from(zip(path[::], path[1::]))
                
    return possible_paths, H
Example #15
0
 def run(self):
     while True:
         flow = yield self.src.store.get()
         #print flow
         if flow.des == self.src.id:
             flow.end_time = self.env.now
             self.out.store.put(flow)
             #print flow,
             #print " -> arriving time %.8f" % self.env.now
             continue
         # compute the next hop
         
         paths = nx.all_shortest_paths(self.topo,self.src.id,flow.des)
         next_hops = []
         for path in paths:
             if len(path)>1:
                 next_hops.append(path[1])
         next_hop = random.choice(next_hops)
         #print next_hop,
         # forwarding to next_hop
         target = self.src.id
         for sw in self.god.all_nodes:
             if sw.id == next_hop:
                 target = sw
                 break
         #print target
         if target:
             flow.src = target.id
             yield self.env.timeout(flow.size*1.0/self.rate)
             target.store.put(flow)
 def get_minimal_pathway(self, g, source, target, steps_cut_off):
     vector_shortest = [s_path for s_path in nx.all_shortest_paths(g, source, target) if len(s_path) == steps_cut_off]
     random.shuffle(vector_shortest)
     try:
         return vector_shortest[0][1]
     except:
         return []
Example #17
0
def sorted_paths(graph, station, order_node):
    """Return a list of all paths from station to order_node, but sorted"""
    shortest_length = len(nx.shortest_path(graph, station, order_node))
    all_paths = list(filter(lambda s: len(s) < shortest_length + 5,
            nx.all_shortest_paths(graph, station, order_node)))
    all_paths.sort(key=len)
    return (path for path in all_paths)
Example #18
0
 def di_route(self, u, v):
     start = time.clock()
     route = None
     if nx.has_path(self.g,u,v):
         print("....di_route")
         bw = nx.get_edge_attributes(self.g,'weight')
         routes = nx.all_shortest_paths(self.g,u,v) # generator
         high_bw = 0
         for r in routes:
             if bw.has_key((r[0],r[1])):
                 min_bw = bw[(r[0],r[1])]
             else:
                 min_bw = bw[(r[1],r[0])]
             num = len(r)
             for i in range(1,num-1):
                 if bw.has_key((r[i],r[i+1])) and min_bw>bw[(r[i],r[i+1])]:
                     min_bw = bw[(r[i],r[i+1])]
                 elif bw.has_key((r[i+1],r[i])) and min_bw>bw[(r[i+1],r[i])]:
                     min_bw = bw[(r[i+1],r[i])]
             if min_bw > high_bw:
                 high_bw = min_bw
                 route = r
     cost = time.clock()-start
     print "cost_di:", cost
     return route
Example #19
0
    def all_shortest_paths(self, source, target):
        """
        Generator which yields all shortest domain transition paths
        between the source and target types.

        Parameters:
        source   The source type.
        target   The target type.

        Yield: generator(steps)

        steps    A generator that returns the tuple of
                 source, target, and rules for each
                 domain transition.
        """
        s = self.policy.lookup_type(source)
        t = self.policy.lookup_type(target)

        if self.rebuildsubgraph:
            self._build_subgraph()

        self.log.info("Generating all shortest domain transition paths from {0} to {1}...".
                      format(s, t))

        with suppress(NetworkXNoPath, NodeNotFound):
            # NodeNotFound: the type is valid but not in graph, e.g. excluded
            # NetworkXNoPath: no paths or the target type is
            # not in the graph
            for path in nx.all_shortest_paths(self.subG, s, t):
                yield self.__generate_steps(path)
    def get_degree_of_separation_visualisation(self, author1, author2):
        
        if author1 == '' or author2 == '':
            return Graph()
        
        if author1 == author2:
            return Graph()
        
        # Compute all the shortest paths from author1 to author2
        try:
            list_of_paths = all_shortest_paths(self.authors_graph, self.author_idx[author1], self.author_idx[author2])
        except NetworkXError as e:
            return "Not found"

        g = Graph()
        # Add the shortest paths to the graph
        try:
            for path in list_of_paths:
                g.add_path(path)
        except NetworkXNoPath as e:
            return Graph()

        # Add attributes to nodes
        for i in g.nodes():
            g.node[i]['name']=self.authors[i].name
        print g.nodes(data=True)
        return g
Example #21
0
def shortest_transfer(a, b, linegraph, offset=1):
    """
    Calculate shortest transfer path(s) for each pair of lines between stations.

    Arguments
    ---------
    a - LineStation,
        LineStation object origin
    b - Linestation,
        Linestation object destination
    linegraph - networkx.Graph()
        networkx graph object with connectivity of the lines
    offset - int [default = 1]
        will return solutions  <= shortest+offset
    """
    alines = a.lines
    blines = b.lines

    somesolutions = list()
    least_transfers = list()
    for a in alines:
        for b in blines:
            solutions = nx.all_shortest_paths(linegraph, a, b)
            for solution in solutions:
                somesolutions.append(solution)
    short = 25
    for solution in somesolutions:
        if len(solution) < short:
            short = len(solution)
    for solution in somesolutions:
        if len(solution) <= short + offset:
            least_transfers.append(solution)
    return least_transfers
Example #22
0
def network(inF, gene1, gene2):
    G = nx.Graph()

    inFile = open(inF)
    S = set()
    for line in inFile:
        line = line.strip()
        fields = line.split('\t')
        S.add(fields[0])
        S.add(fields[1])
    inFile.close()
    L = list(S)

    G.add_nodes_from(L)

    inFile =open(inF)
    for line in inFile:
        line = line.strip()
        fields = line.split('\t')
        G.add_edge(fields[0], fields[1])
    inFile.close()
    
    print(G.number_of_nodes())
    print(G.number_of_edges())

    #s = nx.shortest_path(G, 'RBM20', 'EGR1')
    s = nx.all_shortest_paths(G, gene1, gene2)
    ouFile = open(inF + '-' + gene1 + '-' + gene2, 'w')
    for x in s:
        ouFile.write('\t'.join(x) + '\n')
    ouFile.close()
Example #23
0
    def all_shortest_paths(self, source, target):
        """
        Generator which yields all shortest domain transition paths
        between the source and target types.

        Parameters:
        source   The source type.
        target   The target type.

        Yield: generator(steps)

        steps    A generator that returns the tuple of
                 source, target, and rules for each
                 domain transition.
        """
        if self.rebuildgraph:
            self._build_graph()

        if source in self.G and target in self.G:
            try:
                paths = nx.all_shortest_paths(self.G, source, target)
            except nx.exception.NetworkXNoPath:
                pass
            else:
                for p in paths:
                    yield self.__get_steps(p)
Example #24
0
def compute_distance_on_graph(G, s_id, t_id):
    """ Computes the sum of the length in the shortest path between <tt>s</tt> and <tt>t</tt>.
    If the shortest path are more than one the shorter in edge lengths is considered
    <tt>return</tt> a double value of the length of the shortestpath between <tt>s</tt> and <tt>t</tt>
    """
    all_sp = list(nx.all_shortest_paths(G, source=s_id, target=t_id))

    min_sp = float("inf")

    for sp in all_sp:
        curr_length = 0
        for s_index in range(0, len(sp)-1):
            t_index = s_index+1

            s_id = sp[s_index]
            t_id = sp[t_index]

            s = G.node[s_id]
            t = G.node[t_id]

            curr_length += compute_euclidean_distance(s, t)

        min_sp = min(min_sp, curr_length)

    return min_sp
	def do_path(self, args):
		"Display the shortest path between two nodes"

		arglist = args.split(" ")

		if arglist[0] and arglist[1]:
			#Grab the args
			node1=arglist[0].upper()
			node2=arglist[1].upper()
		else:
			print "[-] Error: Args Needed"

		#ensure they exist
		if G.has_node(node1) and G.has_node(node2):
			if (nx.has_path(G,node1,node2)):
				print "[*] Shortest Paths from %s to %s" %(node1,node2)
				#Get the shortest paths
				paths = nx.all_shortest_paths(G, node1, node2)

				#Print all paths in pretty format
				for p in paths:
					outputpath = "[*] "
					for n in p:
						outputpath+=n+" -> "
					print outputpath[:-4]
			else:
				print "[-] No path exist :("
		else:
			print "[-] Node %s or %s does not exist :(" % (node1, node2)
Example #26
0
    def onDone(self, node, onDone):
        nodeidx = self.nodes.index(node)
        
        for (start, goal, execData) in node.getConnectedStartGoalPairs():
            self.connectivityGraph.add_node((nodeidx, start))
            self.connectivityGraph.add_node((nodeidx+1, goal))
            self.connectivityGraph.add_edge((nodeidx, start), 
                                            (nodeidx+1, goal), execData=execData)
            #print 'ADD', node, hash(start), hash(goal)
            if node == self.nodes[0]:
                #print 'N0', node
                self.connectivityGraph.add_edge('start', (nodeidx, start))
            if node == self.nodes[-1]:
                #print 'N1', node
                self.connectivityGraph.add_edge((nodeidx+1, goal), 'end')
        if nx.has_path(self.connectivityGraph, 'start', 'end'):
            for path in nx.all_shortest_paths(self.connectivityGraph, 'start', 'end'):
                #import IPython; IPython.embed()
                self.addConnectedStartGoalPair((path[1][1], path[-2][1], None))

        self.connectIdx(nodeidx)
        self.connectIdx(nodeidx+1)
        if self.isPaused() or self.isRunOnce():
            onDone()
            return
        if len(self.getConnectedStartGoalPairs()) == 0:
            self.components.subnode.chooseAndRun(onDone)
        else:
            onDone()
            return
Example #27
0
 def get_author_distance(self,AuthorID1,AuthorID2):
     coauthors = {}
     G=nx.Graph()
     #numberOfAuthors=[]
     #numberOfAuthors=self.search_author_by_name("")
     #if AuthorID1>len(numberOfAuthors) or AuthorID2>len(numberOfAuthors) or AuthorID1<0 or AuthorID2<0:
     #    return "Author Not Found"
     for p in self.publications:
         for a in p.authors:
             for a2 in p.authors:
                 if a != a2:
                     #try:
                     #    coauthors[a].add(a2)
                     #except KeyError:
                     #    coauthors[a] = set([a2])
                     G.add_edge(self.authors[a].name,self.authors[a2].name)
  #   newpath = self.find_shortest_path(coauthors, AuthorID1, AuthorID2)
     newpaths = nx.all_shortest_paths(G, source=AuthorID1, target=AuthorID2)
     #print [p for p in nx.all_shortest_paths(G, source=AuthorID1, target=AuthorID2)]
     #if newpaths==None: 
     #    return []  
     #else:
     #    return newpaths
     try: 
         return [p for p in newpaths]
     except :
         return []
Example #28
0
 def find_all_shortestpaths(self, src=None, dst=None):
     """
     find the all shorest paths between two nodes
     """                
     path_list = []
     for path in nx.all_shortest_paths(self.snet_topo, src, dst):
         path_list.append(path)
     return path_list
 def search(self,source,target):
     try:
         #XXX https://networkx.github.io/documentation/latest/reference/algorithms.html
         ret = list(nx.all_shortest_paths(self.G, source, target))
             
     except:
             ret = None
     return ret
Example #30
0
 def createPaths(self, start, end, numPaths):
     paths = []
     for p in range(numPaths):
         path = random.choice(list(nx.all_shortest_paths(self.G, start, end)))
         paths.append(path)
     # print "=====path====="
     # print path
     return paths
Example #31
0
 def findMatch(self, diff_list, maxdiff, mindiff):
     # then find indices of team_id that matches the max and min averages
     maxdiff_ind_list = [i for i,j in enumerate(diff_list) if j==maxdiff]
     mindiff_ind_list = [i for i,j in enumerate(diff_list) if j==mindiff]
     print 'max min diff list', maxdiff_ind_list, mindiff_ind_list
     # ref http://stackoverflow.com/questions/2597104/break-the-nested-double-loop-in-python
     # for breaking out of double loops
     for (match_by_round, max_ind, min_ind) in product(self.match_by_round_list, maxdiff_ind_list, mindiff_ind_list):
         round_list = match_by_round[GAME_TEAM]
         round_id = match_by_round['round_id']
         max_team_id = max_ind + 1
         min_team_id = min_ind + 1
         try:
             # note match_ind is just the position in the round_list, and not the
             # round_id/game_count_id
             match_ind = round_list.index({home_CONST:max_team_id, away_CONST:min_team_id})
         except ValueError:
             # if index is not found
             continue
         else:
             print '===='
             #print 'matchG edge list attributes before swap',nx.get_edge_attributes(self.matchG,'gamecount_id')
             # first delete edge from matchG graph before adding new edge based on swap
             if not self.removeGraphEdgeAttribute(max_team_id, min_team_id, round_id):
                 print 'Possible Error: Not able to remove graph edge between', max_team_id, min_team_id, round_id
             # do swap
             round_list[match_ind] = {home_CONST:min_team_id, away_CONST:max_team_id}
             self.metrics_list[max_ind] -= 1
             self.metrics_list[min_ind] += 1
             # add swapped edge to directed graph
             self.addGraphEdgeAttribute(min_team_id, max_team_id, round_id)
             print '--------------------'
             print 'home away SWAPPED', min_team_id, max_team_id
             print 'new metrics list', self.metrics_list
             print 'targethome',self.targethome_count_list
             #print 'matchG edge list attributes AFTER swap',nx.get_edge_attributes(self.matchG,'gamecount_id')
             foundFlag = True
             break;
     else:
         gamecount_id_attrib = nx.get_edge_attributes(self.matchG,'gamecount_id')
         current_cost = self.computeCostFunction(self.metrics_list)
         bestcost = large_CONST  # specify a very large number
         print '+++++++No simple pair to swap found, going to search for multiple edges++++++++++++++++++'
         print 'current cost for self metrics =',self.metrics_list, current_cost
         foundFlag = False  #set default
         for (max_ind, min_ind) in product(maxdiff_ind_list, mindiff_ind_list):
             max_team_id = max_ind+1
             min_team_id = min_ind+1
             if nx.has_path(self.matchG, max_team_id, min_team_id):
                 print '++'
                 print 'OK there is some path between', max_team_id, min_team_id
                 paths_gen = nx.all_shortest_paths(self.matchG, max_team_id, min_team_id)
                 for path in paths_gen:
                     # for each path, we are going to emulate how cost function
                     # would change if home and away teams were swapped
                     # note that the path itself begins with the home team
                     # in the directed graph, but after the emulated swap, the
                     # first team_id in the path is the away team
                     # i.e.  if path is [10,5,8] which is H-A 10-5, H-A 5-8
                     # after the swap H-A 8-5, H-A 5-10
                     # we won't be doing the actual swap, but for metrics calculation
                     # the away team is considered the 'new' home team.
                     print 'Path=',path
                     tempmetrics_list = list(self.metrics_list)
                     # ref on use of zip to get homeaway pairs from path list
                     # https://groups.google.com/forum/#!topic/networkx-discuss/PgfA5nhh1VM
                     temporig_list = []
                     for (home_id, away_id) in zip(path[0:],path[1:]):
                         # do emulated swap and adjust metrics
                         tempmetrics_list[home_id-1] -= 1
                         tempmetrics_list[away_id-1] += 1
                         # get the gamecount id, corresponding to current edge
                         # if the gamecount id is a list, just get the first element
                         temporig_list.append({home_CONST:home_id,
                                               away_CONST:away_id,
                                               'gamecount_id':gamecount_id_attrib[(home_id,away_id)][0]})
                     tempcost = self.computeCostFunction(tempmetrics_list)
                     print 'temp metrics and cost w list', tempmetrics_list, tempcost, temporig_list
                     if tempcost < bestcost:
                         bestcost = tempcost
                         bestorig_list = list(temporig_list)
                 print 'prelim best path for teams, cost', max_team_id, min_team_id, bestorig_list, bestcost
                 foundFlag = True
             else:
                 print 'no path between', max_team_id, min_team_id,' but trying other'
         if foundFlag:
             gamecount_indexer = dict((p['round_id'],i) for i,p in enumerate(self.match_by_round_list))
             print '@@@@'
             print 'best path found between', max_team_id, min_team_id, bestorig_list, ' with cost=',bestcost
             for edgematch in bestorig_list:
                 home_id = edgematch[home_CONST]
                 away_id = edgematch[away_CONST]
                 gamecount_id = edgematch['gamecount_id']
                 gamecount_index = gamecount_indexer.get(gamecount_id)
                 match_by_round = self.match_by_round_list[gamecount_index]
                 round_list = match_by_round[GAME_TEAM]
                 try:
                     # note match_ind is just the position in the round_list, and not the
                     # round_id/game_count_id
                     match_ind = round_list.index({home_CONST:home_id, away_CONST:away_id})
                 except ValueError:
                     # if index is not found
                     print "ERROR: best path component cannot be found in current match list", home_id, away_id
                     continue
                 else:
                     # first delete edge from matchG graph before adding new edge based on swap
                     if not self.removeGraphEdgeAttribute(home_id, away_id, gamecount_id):
                         print 'Possible Error(2): Not able to remove graph edge between', max_team_id, min_team_id
                         continue
                     self.addGraphEdgeAttribute(away_id, home_id, gamecount_id)
                     # do swap
                     round_list[match_ind] = {home_CONST:away_id, away_CONST:home_id}
                     self.metrics_list[home_id-1] -= 1
                     self.metrics_list[away_id-1] += 1
                     # add swapped edge to directed graph
             print 'updated metrics list', self.metrics_list
             print '@@@@@'
     return foundFlag
Example #32
0
def obtain_fingerprint_of_nodes(G1, G2, candidate_nodes_set_of_G1,
                                candidate_nodes_set_of_G2, s_previous):
    '''
	Returns the fingerprints of nodes in 'candidata_nodes_set_of_G1' and 'candidate_nodes_set_of_G2'. 
	The a node fingerprint is defined as following:
	[degree of node, distance between the node and each node in 's_previous']

	Parameters
	----------

	G1,G2 : networkx graph 

	candidate_nodes_set_of_G1, candidate_nodes_set_of_G2 : List
		Consists of nodes that will be handled 

	s_previous : List
		Contains the anchor nodes which are used to obtain the distance
		
	Returns
	-------

	fingerprint_of_G1 : A dictionary
		A dictionary with the nodes in the 'candidate_nodes_set_of_G1' as the keys and the fingerprint as the values that are lists consisted of degree and distance to the nodes in 's_previous'.
		{G1_node1 : [degree,distance_1,distance_2,...,distance_m],G1_node2 : [degree, distance_1,distance_2,...,distance_m],....}

	fingerprint_of_G2 : A dictionary
		A dictionary with the nodes in the 'candidate_nodes_set_of_G2' as the keys and the fingerprint as the values that are lists consisted of degree and distance to the nodes in 's_previous'.
		{G1_node1 : [degree,distance_1,distance_2,...,distance_m],G1_node2 : [degree, distance_1,distance_2,...,distance_m],....}
	
	'''
    #fingerprint = {node1 : [degree,length1,length2,...],node2 : [degree,length1,..],...}
    fingerprint_of_G1 = {}
    fingerprint_of_G2 = {}
    degree_of_G1 = G1.degree()
    degree_of_G2 = G2.degree()

    #generate the fingerprint of nodes in G1
    for node in candidate_nodes_set_of_G1:
        fingerprint = []
        path = []
        fingerprint.append(degree_of_G1[node])
        for item in s_previous:
            path = [p for p in nx.all_shortest_paths(G1, node, item[0])]
            if len(path) == 0:
                fingerprint.append(0)
            else:
                fingerprint.append(len(path[0]))
        fingerprint_of_G1[node] = fingerprint
    #generate the fingerprint of nodes in G2
    for node in candidate_nodes_set_of_G2:
        fingerprint = []
        path = []
        fingerprint.append(degree_of_G2[node])
        for item in s_previous:
            path = [p for p in nx.all_shortest_paths(G2, node, item[1])]
            if len(path) == 0:
                fingerprint.append(0)
            else:
                fingerprint.append(len(path[0]))
        fingerprint_of_G2[node] = fingerprint
    return fingerprint_of_G1, fingerprint_of_G2
Example #33
0
    def _packet_in_handler(self, ev):
        # If you hit this you might want to increase
        # the "miss_send_length" of your switch
        if ev.msg.msg_len < ev.msg.total_len:
            self.logger.debug("packet truncated: only %s of %s bytes",
                              ev.msg.msg_len, ev.msg.total_len)
        msg = ev.msg
        datapath = msg.datapath
        ofproto = datapath.ofproto
        parser = datapath.ofproto_parser
        in_port = msg.match['in_port']

        pkt = packet.Packet(msg.data)
        eth = pkt.get_protocols(ethernet.ethernet)[0]

        if eth.ethertype == ether_types.ETH_TYPE_LLDP:
            # ignore lldp packet
            return

        arp_pkt = pkt.get_protocol(arp.arp)
        ip_pkt = pkt.get_protocol(ipv4.ipv4)
        ip_pkt_6 = pkt.get_protocol(ipv6.ipv6)

        dst = eth.dst
        src = eth.src

        dpid = datapath.id
        self.mac_to_port.setdefault(dpid, {})

        self.logger.debug("packet in %s %s %s %s", dpid, src, dst, in_port)

        self.mac_to_port[dpid][src] = in_port

        if src not in self.net:
            self.net.add_node(src)
            self.net.add_edges_from([(dpid, src, {
                'port': msg.match['in_port']
            })])
            self.net.add_edge(src, dpid)

        if isinstance(ip_pkt_6, ipv6.ipv6):  # Drop the IPV6 Packets.
            match = parser.OFPMatch(eth_type=eth.ethertype)
            actions = []
            self.add_flow(datapath, 1, match, actions)
            print "\033[91m" + "IPv6" + "\033[0m"
            return

        elif isinstance(arp_pkt, arp.arp):
            self.logger.debug("ARP processing")
            src_ip = arp_pkt.src_ip
            dst_ip = arp_pkt.dst_ip

        elif isinstance(ip_pkt, ipv4.ipv4):
            self.logger.debug("IPV4 processing")
            src_ip = ip_pkt.src
            dst_ip = ip_pkt.dst

        else:
            print "\033[91m" + "exit Nonetype" + "\033[0m"
            return

        print "Packet from " + "\033[92m" + "Eth src: " + "\033[0m" + src + " to " + "\033[92m" + "Eth dst: " + "\033[0m" + dst
        print "Packet from " + "\033[92m" + "IP src: " + "\033[0m" + src_ip + " to " + "\033[92m" + "IP dst: " + "\033[0m" + dst_ip

        self.arp_table.setdefault(src_ip, {})
        if not eth.src in self.arp_table[src_ip]:
            print "\033[93m" + "ip src not in arp table" + "\033[0m"
            self.arp_table[src_ip] = eth.src
            print "\033[92m" + "IP: " + "\033[0m" + src_ip + "\033[92m" + " Eth: " + "\033[0m" + self.arp_table[
                src_ip] + "\033[92m" + " added" + "\033[0m"
        if not dst_ip in self.arp_table:
            print "\033[93m" + "ip dst not in arp table" + "\033[0m"
            return
        else:
            self.arp_table.setdefault(dst_ip, {})
            dst = self.arp_table[dst_ip]
            if dst == None:
                print "\033[93m" + "Mac dst not in arp table" + "\033[0m"
                return
            elif dst in self.net:
                print "\033[94m" + "Eth dst in net" + "\033[0m"
                if (src, dst) not in self.s_path:
                    paths = nx.all_shortest_paths(self.net,
                                                  source=src,
                                                  target=dst)
                    i = 0
                    for path in paths:
                        i += 1
                    paths = nx.all_shortest_paths(self.net,
                                                  source=src,
                                                  target=dst)
                    n = random.randint(1, i)
                    i = 0
                    for path in paths:
                        i += 1
                        if i == n:
                            print "\033[95m" + "Path" + "\033[0m"
                            next = path[path.index(dpid) + 1]
                            out_port = self.net[dpid][next]['port']
                            self.s_path[src, dst] = path
                        else:
                            continue
                else:
                    path = self.s_path[src, dst]
                    if dpid in path:
                        print "\033[95m" + "Path" + "\033[0m"
                        print path
                        next = path[path.index(dpid) + 1]
                        out_port = self.net[dpid][next]['port']
                    else:
                        return

        actions = [parser.OFPActionOutput(out_port)]

        match = parser.OFPMatch(in_port=in_port, eth_dst=dst, eth_src=src)
        # verify if we have a valid buffer_id, if yes avoid to send both
        # flow_mod & packet_out
        if msg.buffer_id != ofproto.OFP_NO_BUFFER:
            self.add_flow(datapath, 1, match, actions, msg.buffer_id)
            return
        else:
            self.add_flow(datapath, 1, match, actions)
        data = None
        if msg.buffer_id == ofproto.OFP_NO_BUFFER:
            data = msg.data

        out = parser.OFPPacketOut(datapath=datapath,
                                  buffer_id=msg.buffer_id,
                                  in_port=in_port,
                                  actions=actions,
                                  data=data)
        datapath.send_msg(out)
Example #34
0
def normalTraffic(G,demandslist,linkdict1,metric,num,maxhops):
	dem=demandslist
	linkdict=linkdict1
	G=G
	metric=metric
	cnt=0
	failed=[]
	for demand in dem:
		try:
		
#		print(demand)
#		if demand["terrafic-level"]=="Y18.09":
			
#			b=re.search(source_pattern,demand["Source"])
#
#			Source=b.group(0)
#			
#			cnt=cnt+1
#			print(Source)
#			print(demand["Destination"])
			results=nx.all_shortest_paths(G,demand["Source"],demand["Destination"],weight=metric,method="dijkstra")
			
			allpath=[]
			
			
			#print(n)
			for result in results:
				
				
				if len(result)<maxhops:
					
					
				
				
					linkspath=[]
	
					for i in list(range(1,len(result))):
						st=result[i-1]
						nd=result[i]
						ln=st+"--"+ nd
							
						linkspath.append(ln)
						
					
					allpath.append(linkspath)
	#				print(allpath)
				
			
#			print(n)
			n=min(len(allpath),num)
			for path in allpath[:n]:
			
				for link in path:
					if link in linkdict:
						
						linkdict[link]=linkdict[link]+int(demand["Traffic"])/n
					else:
						
						linkdict[link]= int(demand["Traffic"])/n
				  
		except:
			
			pass
			failed.append(demand)

					 
	return linkdict,failed
Example #35
0
def srlg_failure(routerlist,failover,linkdictzero,srlgdict,dem,IGPlinks,linknormal,metric,num,maxhops):
	destinations=[]
	failed_by_seg=defaultdict(lambda:[])
	for pop in routerlist:
		if pop[0:2]=="sr":
			destinations.append(pop)




	wctnode=copy.deepcopy(linkdictzero)
	for key,value in wctnode.items():
		wctnode[key]=[]
	temp=copy.deepcopy(linkdictzero)

	for key in sorted(srlgdict.keys()):
		seg_failed=[]
		G=nx.MultiGraph()
		G.add_weighted_edges_from(IGPlinks,weight=metric)
		
		demlistkey=dem.copy()
		print(key)
		linkdict=copy.deepcopy(linkdictzero)
		


	#topology bedoone key o rasm kon link haii ke tahte tasirano az kar bendaz


		#delnode=Router.nodes.get(name=key)
		#for node in delnode.isislink.match():

		for t in srlgdict[key]:
			try:
				
				G.remove_edge(t[0],t[1])
				
			except:
				pass


		
			if t[1] in failover.keys():
				print("fail")

			

				for demand in demlistkey:

					if demand["Source"]==t[1]:
						demlistkey.remove(demand)

						for s in failover[t[1]]:
							newdic={}
						newdic["Traffic"]=float(s[1])*float(demand["Traffic"])/100
						newdic["TrafficLevel"]=demand["TrafficLevel"]
						newdic["ServiceClass"]=demand["ServiceClass"]
						newdic["Destination"]=demand["Destination"]
						newdic["Source"]=s[0]
						newdic["Name"]=demand["Name"]
						demlistkey.append(newdic)
					

				




			elif t[1] in destinations:

				
				for demand in demlistkey:


					if demand["Destination"]==t[1]:
						demlistkey.remove(demand)
						seg_failed.append(demand)


				
				


			
				
				

			elif  t[1] not in failover.keys() and (t[1][0:2]=="px" or t[1][0:2]=="tx"):
				print("ptnofail")

				for demand in demlistkey:
					

					if demand["Source"]==t[1]:
						demlistkey.remove(demand)
						seg_failed.append(demand)
					


			else:
				pass
				
	    
		print("demlist compelete!")
		print(len(demlistkey))			
	   
		for demand in demlistkey:

		
			try:
		
#		print(demand)
#		if demand["terrafic-level"]=="Y18.09":
			
#				b=re.search(source_pattern,demand["Source"])
#
#				Source=b.group(0)
				
				
	#			print(Source)
	#			print(demand["Destination"])
				results=nx.all_shortest_paths(G,demand["Source"],demand["Destination"],weight=metric,method="dijkstra")
				
				allpath=[]
				for result in results:
					if len(result)<maxhops:
	#				print(result)
					
						linkspath=[]

						for i in list(range(1,len(result))):
							st=result[i-1]
							nd=result[i]
							ln=st+"--"+ nd
								
							linkspath.append(ln)
							
						
						allpath.append(linkspath)
	#				print(allpath)
				
				n=min(len(allpath),num)
	#			print(n)
				for path in allpath[:n]:
				
					for link in path:
						if link in linkdict:
							
							linkdict[link]=linkdict[link]+int(demand["Traffic"])/n
						else:
							
							linkdict[link]= int(demand["Traffic"])/n
					  
			except:
			
				pass
				
				seg_failed.append(demand)
	




		failed_by_seg[key]=seg_failed





		for key1 in linkdict.keys():
			
			if linkdict[key1]> linknormal[key1]:
				
			
				if temp[key1]<=linkdict[key1]:


					
					if temp[key1]==linkdict[key1] :
						temp[key1]=linkdict[key1]
						wctnode[key1].append(key)
					elif temp[key1]<linkdict[key1]:
						temp[key1]=linkdict[key1]
						wctnode[key1]=[]
						wctnode[key1].append(key)	
						
				
			


	return temp,wctnode,failed_by_seg
def assign_flow_rates_heuristic(data, tm, mipgap):
    flow_rates = []
    od_pairs = []
    for i in range(1, len(nodes) + 1):
        for j in range(1, len(nodes) + 1):
            if i != j:
                flow_rates.append(tm[i - 1, j - 1])
                od_pairs.append((i, j))

    # Compute multipath routing between all node pairs:
    links = {}
    for link in data['links']:
        links[link['source'], link['destination']] = {
            'legit_load': 0,
            'capacity': link['capacity']
        }
    G = nx.DiGraph([(link['source'], link['destination'])
                    for link in data['links']])
    all_paths = {}
    for (src, dst) in od_pairs:
        all_paths[src, dst] = []
        try:
            paths = [
                p for p in nx.all_shortest_paths(G, source=src, target=dst)
            ]
        except nx.exception.NetworkXNoPath:
            print("Error: No path found between %d and %d." % (src, dst),
                  file=sys.stderr)
            sys.exit(1)
        max_flow = 0
        path_capacities = []
        for path in paths:
            capacity = path_capacity(links, path)
            path_capacities.append(capacity)
            max_flow += capacity
        for i in range(len(paths)):
            weight = path_capacities[i] * 1.0 / max_flow
            all_paths[src, dst].append((paths[i], weight))

    # Compute sort information on OD pairs:
    node_infos = {}
    for node in data['nodes']:
        node_infos[node['id']] = {
            'fanout': 0,
            'fanin': 0,
            'connectivity': 0,
            'nb_paths': 0
        }

    for link in data['links']:
        node_infos[link['destination']]['connectivity'] += 1
        node_infos[link['destination']]['fanin'] += link['capacity']
        node_infos[link['source']]['connectivity'] += 1
        node_infos[link['source']]['fanout'] += link['capacity']

    for (src, dst) in all_paths:
        for (path, _) in all_paths[src, dst]:
            for node in path[1:-1]:
                node_infos[node]['nb_paths'] += 1

    od_pair_infos = {}
    for (src, dst) in od_pairs:
        src_infos = node_infos[src]
        dst_infos = node_infos[dst]
        m1 = min(src_infos['fanout'], dst_infos['fanin'])
        m2 = min(src_infos['connectivity'], dst_infos['connectivity'])
        if src_infos['nb_paths'] == dst_infos['nb_paths'] == 0:
            m3 = float("Inf")
        else:
            m3 = 1.0 / max(src_infos['nb_paths'], dst_infos['nb_paths'])
        od_pair_infos[(src, dst)] = {'m1': m1, 'm2': m2, 'm3': m3}

    # Sort OD pairs:
    def make_comparator(od_pair_infos):
        def compare(od1, od2):
            pair1_infos = od_pair_infos[od1]
            pair2_infos = od_pair_infos[od2]
            if pair1_infos['m1'] == pair2_infos['m1']:
                if pair1_infos['m2'] == pair2_infos['m2']:
                    if pair1_infos['m3'] == pair2_infos['m3']:
                        return 0
                    elif pair1_infos['m3'] > pair2_infos['m3']:
                        return 1
                    else:
                        return -1
                elif pair1_infos['m2'] > pair2_infos['m2']:
                    return 1
                else:
                    return -1
            elif pair1_infos['m1'] > pair2_infos['m1']:
                return 1
            else:
                return -1

        return compare

    flow_rates.sort(reverse=True)
    od_pairs = sorted(od_pairs,
                      cmp=make_comparator(od_pair_infos),
                      reverse=True)

    # Route flows between OD pairs:
    route_flows_multipaths(links, all_paths, od_pairs, flow_rates)

    # Write link loads to YAML and compute objective value:
    objective = 0
    for link in data['links']:
        link['legit_load'] = links[link['source'],
                                   link['destination']]['legit_load']
        max_link_load = link['legit_load'] / link['capacity']
        if max_link_load > objective:
            objective = max_link_load

    return objective
Example #37
0
 def test_all_shortest_paths_raise(self):
     G = nx.Graph()
     G.add_path([0, 1, 2, 3])
     G.add_node(4)
     paths = list(nx.all_shortest_paths(G, 0, 4))
Example #38
0
def group_betweenness_centrality(G, C, normalized=True, weight=None):
    r"""Compute the group betweenness centrality for a group of nodes.

    Group betweenness centrality of a group of nodes $C$ is the sum of the
    fraction of all-pairs shortest paths that pass through any vertex in $C$

    .. math::

       c_B(C) =\sum_{s,t \in V-C; s<t} \frac{\sigma(s, t|C)}{\sigma(s, t)}

    where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
    shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
    those paths passing through some node in group $C$. Note that
    $(s, t)$ are not members of the group ($V-C$ is the set of nodes
    in $V$ that are not in $C$).

    Parameters
    ----------
    G : graph
      A NetworkX graph.

    C : list or set
      C is a group of nodes which belong to G, for which group betweenness
      centrality is to be calculated.

    normalized : bool, optional
      If True, group betweenness is normalized by `2/((|V|-|C|)(|V|-|C|-1))`
      for graphs and `1/((|V|-|C|)(|V|-|C|-1))` for directed graphs where `|V|`
      is the number of nodes in G and `|C|` is the number of nodes in C.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.

    Raises
    ------
    NodeNotFound
       If node(s) in C are not present in G.

    Returns
    -------
    betweenness : float
       Group betweenness centrality of the group C.

    See Also
    --------
    betweenness_centrality

    Notes
    -----
    The measure is described in [1]_.
    The algorithm is an extension of the one proposed by Ulrik Brandes for
    betweenness centrality of nodes. Group betweenness is also mentioned in
    his paper [2]_ along with the algorithm. The importance of the measure is
    discussed in [3]_.

    The number of nodes in the group must be a maximum of n - 2 where `n`
    is the total number of nodes in the graph.

    For weighted graphs the edge weights must be greater than zero.
    Zero edge weights can produce an infinite number of equal length
    paths between pairs of nodes.

    References
    ----------
    .. [1] M G Everett and S P Borgatti:
       The Centrality of Groups and Classes.
       Journal of Mathematical Sociology. 23(3): 181-201. 1999.
       http://www.analytictech.com/borgatti/group_centrality.htm
    .. [2] Ulrik Brandes:
       On Variants of Shortest-Path Betweenness
       Centrality and their Generic Computation.
       Social Networks 30(2):136-145, 2008.
       http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf
    .. [3] Sourav Medya et. al.:
       Group Centrality Maximization via Network Design.
       SIAM International Conference on Data Mining, SDM 2018, 126–134.
       https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
    """
    betweenness = 0  # initialize betweenness to 0
    V = set(G)  # set of nodes in G
    C = set(C)  # set of nodes in C (group)
    if len(C - V) != 0:  # element(s) of C not in V
        raise nx.NodeNotFound('The node(s) ' + str(list(C - V)) + ' are not '
                              'in the graph.')
    V_C = V - C  # set of nodes in V but not in C
    # accumulation
    for pair in combinations(V_C, 2):  # (s, t) pairs of V_C
        try:
            paths = 0
            paths_through_C = 0
            for path in nx.all_shortest_paths(G, source=pair[0],
                                              target=pair[1], weight=weight):
                if set(path) & C:
                    paths_through_C += 1
                paths += 1
            betweenness += paths_through_C / paths
        except nx.exception.NetworkXNoPath:
            betweenness += 0
    # rescaling
    v, c = len(G), len(C)
    if normalized:
        scale = 1 / ((v - c) * (v - c - 1))
        if not G.is_directed():
            scale *= 2
    else:
        scale = None
    if scale is not None:
        betweenness *= scale
    return betweenness
Example #39
0
    print("----------------------------")

    if not nx.is_connected(GA):
        exit()

    # Manual entering of world reference node by user
    # map_node = request_map_node_input(GA)
    map_node = 'C0'  # for debugging purposes

    # Iterate all nodes in graph
    for node in tqdm(GA.nodes):

        # print "--------------------------------------------------------"
        # print('Solving for ' + node + "...")

        paths = list(nx.all_shortest_paths(GA, node, map_node))

        if paths == []:
            paths = [[node]]

        # print(paths)

        transformations_for_path = []

        for path in paths:
            T = np.array(
                [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]],
                dtype=np.float)

            for i in range(1, len(path)):
Example #40
0
    target = sys.argv[1]
    local_storage = json.load(open("localStorage/user", 'r'))
    print(local_storage["name"])
    '''
    for user in array_users:
        if user["name"] != local_storage["name"]:
            if approx.local_node_connectivity(G, local_storage["name"], user["name"]) != 0:
                for p in nx.all_shortest_paths(G, local_storage["name"], user["name"]):
                    print(p)
                    
    '''

    print(target)
    #edgeList = nx.all_shortest_paths(G, local_storage["name"], array_users[0]["name"])[0]
    if approx.local_node_connectivity(G, local_storage["name"], target) != 0:
        for p in nx.all_shortest_paths(G, local_storage["name"], target):
            for x in range(0, len(list(p)) - 1):
                G.edges[p[x], p[x + 1]]['color'] = 'red'

    #nx.draw_random(G, **options)
    #plt.subplot(222)
edges = G.edges()
print(edges.data('color'))
colors = [G[u][v]['color'] for u, v in edges]

pos = nx.nx_pydot.graphviz_layout(G)

nx.draw(G,
        pos,
        with_labels=True,
        font_weight='bold',
def relocate_planner(robot_height, robot_pose, target_id, N, R, H, X, Y, x_min,
                     x_max, y_min, y_max):
    import sys
    import numpy as np
    import random as rn
    import networkx as nx
    import VFHplus_mobile as vfh
    import matplotlib
    import matplotlib.pyplot as plt

    # import VFHplus_change_radius as vfh
    # distance(x, y): compute the Euclidean distance between 2D points x and y
    def distance(point_one, point_two):
        return ((point_one[0] - point_two[0])**2 +
                (point_one[1] - point_two[1])**2)**0.5

    # unique_list(sequence): remove duplicated elements in the input list 'sequence'
    def unique_list(seq):
        seen = set()
        seen_add = seen.add
        return [x for x in seq if not (x in seen or seen_add(x))]

    def invisible_volume(object_pose, object_height, camera_pose,
                         camera_height, radius, mean_r):
        d = distance(object_pose, camera_pose)
        l = (object_height *
             (d + 2 * radius)) / (camera_height - object_height)
        x = (2 * radius * (d + 2 * radius + l)) / (d + mean_r)
        y = (2 * radius * (d + 2 * radius)) / d
        z = (((x - y)**2) / 2 + l**2)**0.5
        return object_height * (x * z - y * z + l * y) / 2

    edges_add = []
    edges_all = []
    objects = []
    walls = []

    min_len = 10000
    min_weight = min_len * 10

    mean_r = np.mean(R)
    robot_radius = max(R)
    R.append(robot_radius)

    x_min = x_min - 4 * mean_r
    y_min = y_min - 4 * mean_r
    x_max = x_max + 4 * mean_r
    y_max = y_max + 4 * mean_r

    # Generating bounding box (Wall)
    X_t = list(
        np.linspace(x_min, x_max, int(np.ceil(
            (x_max - x_min) / (4 * mean_r)))))
    Y_t = list(
        np.linspace(y_min, y_max, int(np.ceil(
            (y_max - y_min) / (4 * mean_r)))))

    X_w = X_t + [x_max] * len(Y_t)
    X_w = X_w + X_t
    Y_w = [y_max] * len(X_t) + Y_t
    Y_w = Y_w + [y_min] * len(X_t)

    M = len(X_w)
    R_wall = [2 * np.sqrt(mean_r**2)] * M
    for i in range(0, M):
        walls.append([X_w[i], Y_w[i]])

    for i in range(0, N):
        objects.append([X[i], Y[i]])
    objects.append(robot_pose)

    # Create an empty graph
    G = nx.Graph()
    # N+1 because the robot base pose is also a node
    all_nodes = list(range(0, N + 1))
    G.add_nodes_from(all_nodes)

    # Connect edges of the graph using VFH+
    #  Description: for each pair of node i and node j, check if an edge (i, j) can be connected between them (i is not equal to j)
    #               Note that (i, j) and (j, i) are different (directed edges a.k.a. "arrows")
    #               (i ,j) connected?: the end-effector can move any object from Object i's pose to Object j's pose without collision (if Objects i and j are removed)
    nodes_done = []
    for i in all_nodes:
        # Exclude myself (node i) and previously checked nodes
        nodes_wo_me = list(set(all_nodes) - set([i]) - set(nodes_done))
        nodes_done.append(i)
        for j in nodes_wo_me:
            # end_pose (the end-effector pose):
            # the end-effector is located at node j's pose (picking from node j's location-> go to i's location)
            end_pose = objects[j]
            edge = (
                i, j
            )  #A path i to j: any object can move from i to j and j to i
            # VFH+ checks within d_max (between Object i and the end-effector)
            d_max = distance(objects[i], end_pose)
            # d_max = distance(objects[i], end_pose)+0.09
            # Except i and j, other objects are regarded as obstacles
            obstacles_sub = list(set(all_nodes) - set([i, j]) - set([N]))
            # radius (to compute the augmented radius): choose the largest radius among all objects because any object in the scene should be able to move between i and j without collision
            radius = max(R)
            # Run VFH+ (in VFHplus_mobile.py): the wall is included as obstacles
            if i == target_id and j == N:
                _, _, _, collision_free = vfh.influence(
                    len(all_nodes) - 2 + M - 1, objects[i],
                    [objects[k] for k in obstacles_sub] + walls, end_pose,
                    d_max, [R[k] for k in obstacles_sub] + R_wall, radius, 1)
                # if collision_free == 1:
                #     edges_add.append(edge)
                if collision_free == 1:
                    _, _, _, collision_free_r = vfh.influence(
                        len(all_nodes) - 2 + M - 1, objects[j],
                        [objects[k]
                         for k in obstacles_sub] + walls, objects[i], d_max,
                        [R[k] for k in obstacles_sub] + R_wall, radius, 1)
                    if collision_free_r == 1:
                        edges_add.append(edge)
            else:
                _, _, _, collision_free = vfh.influence(
                    len(all_nodes) - 2 + M - 1, objects[i],
                    [objects[k] for k in obstacles_sub] + walls, end_pose,
                    d_max, [R[k] for k in obstacles_sub] + R_wall, radius, 0)
                # if collision_free == 1:
                #     edges_add.append(edge)
                if collision_free == 1:
                    _, _, _, collision_free_r = vfh.influence(
                        len(all_nodes) - 2 + M - 1, objects[j],
                        [objects[k]
                         for k in obstacles_sub] + walls, objects[i], d_max,
                        [R[k] for k in obstacles_sub] + R_wall, radius, 0)
                    if collision_free_r == 1:
                        edges_add.append(edge)
    G.add_edges_from(edges_add)

    # Find accessibile objects
    #  Description: the nodes connected to the robot node can be accessed by the robot since there are paths between the robot node and its neighbors
    all_sources = [N]  #robot node
    accessible_nodes = list(G.neighbors(N))
    accessible_nodes.sort()

    # print 'Accessible: ', accessible_nodes
    # print 'Target: ', target_id

    if target_id < 0:
        uncovered_volume = []
        for j in range(0, len(accessible_nodes)):
            uncovered_volume.append(
                invisible_volume(objects[accessible_nodes[j]],
                                 H[accessible_nodes[j]], robot_pose,
                                 robot_height, R[accessible_nodes[j]], mean_r))
        node_next = accessible_nodes[uncovered_volume.index(
            max(uncovered_volume))]
        path = [node_next]
        accessibility = 0
    else:
        # Find the min-hop path bewteen each pair of a visible (accessible) object and the target
        for source in all_sources:
            if nx.has_path(G, source, target_id):
                # Find all min-hop paths (all ties)
                paths = list(
                    nx.all_shortest_paths(G, source, target_id, weight=None))
                path_weights = [0] * len(paths)
                # Compute the Euclidean distance of each path for tie breaking
                for i in range(0, len(paths)):
                    for j in range(0, len(paths[i]) - 1):
                        path_weights[i] = path_weights[i] + distance(
                            objects[paths[i][j]], objects[paths[i][j + 1]])

                # Choose the path with the minimum Euclidean distance if there are multiple min-hop paths
                idx = path_weights.index(min(path_weights))
                if len(paths[idx]) < min_len:
                    path = paths[idx]
                    min_len = len(path)
                    min_weight = min(path_weights)
                elif len(paths[idx]
                         ) == min_len and min(path_weights) < min_weight:
                    path = paths[idx]
                    min_len = len(path)
                    min_weight = min(path_weights)
        # If there is a single path found (min_len remains 10000 if no path found), add it to the path explored until now
        if min_len < 10000:
            path.pop(0)  # remove the robot node from the path
        else:
            path = [-1]
            # sys.exit('No path found to the target')

        if path[0] == target_id:
            accessibility = 1
        else:
            accessibility = -1

    relocate_id = path[0]
    relocate_coordinates = objects[relocate_id]

    # Print
    #print('Target accessibility (0=unaccessible, 1=accessible): %d' % accessibility)
    #print('Relocate Object %d at (%f, %f)' % (relocate_id, relocate_coordinates[0], relocate_coordinates[1]))
    #
    # if 0:
    #     # Plot the final graph
    #     plt.figure(2)
    #
    #     # Plot the configuration (Fig. 8a in the paper)
    #     plt.subplot(121)
    #
    #     plt.gcf()
    #     ax = plt.gca()
    #
    #     all_nodes = list(range(0, N))
    #     all_obstacles = list(set(all_nodes) - set([target_id]))
    #     for i in all_obstacles:
    #         obstacle_plot = plt.Circle((objects[i][0], objects[i][1]), R[i], color='r', clip_on=False)
    #         ax.add_artist(obstacle_plot)
    #
    #     #source_plot = plt.Circle((objects[path[0]][0], objects[path[0]][1]), R[path[0]], color='gray', clip_on=False)
    #     #ax.add_artist(source_plot)
    #
    #     target_plot = plt.Circle((objects[target_id][0], objects[target_id][1]), R[target_id], color='lime', clip_on=False)
    #     ax.add_artist(target_plot)
    #
    #     for i in range(0, M):
    #         wall_plot = matplotlib.patches.Rectangle((walls[i][0] - 0.04, walls[i][1] - 0.04), 0.08, 0.08, color='sienna', clip_on=False)
    #         ax.add_patch(wall_plot)
    #
    #     plt.plot()
    #     plt.axis('scaled')
    #
    #     plt.xlim(min(X_w) - max(R), max(X_w) + max(R))
    #     plt.ylim(min(Y_w) - max(R), max(Y_w) + max(R))
    #
    #     # Plot the graph (Fig. 8b in the paper)
    #     plt.subplot(122)
    #
    #     pos = {}
    #     for i in all_nodes:
    #         pos[all_nodes[i]] = objects[i]
    #
    #     labels = {}
    #     for node_name in all_nodes:
    #         labels[node_name] = str(node_name)
    #
    #     nodes = nx.draw_networkx_nodes(G, pos, all_nodes, node_color='r', node_size=500)
    #     nodes_t = nx.draw_networkx_nodes(G, pos, nodelist=[target_id], node_color='lime', node_size=500)
    #     #if path:
    #     #    nodes_s = nx.draw_networkx_nodes(G, pos, nodelist=[path[0]], node_color='gray', node_size=500)
    #     #    nodes_s.set_edgecolor('black')
    #     nodes.set_edgecolor('black')
    #     nodes_t.set_edgecolor('black')
    #     nx.draw_networkx_labels(G, pos, labels, font_size=20)
    #
    #     path_edges = []
    #     for i in range(0, len(path) - 1):
    #         path_edges.append((path[i], path[i + 1]))
    #     nx.draw_networkx_edges(G, pos, edgelist=edges_add, width=0.5, arrowsize=20)
    #     nx.draw_networkx_edges(G, pos, edgelist=path_edges, width=2, edge_color='red', arrowsize=20)
    #
    #     plt.xlim(min(X) - 2 * max(R), max(X) + 2 * max(R))
    #     plt.ylim(min(Y) - 2 * max(R), max(Y) + 2 * max(R))
    #     plt.axis('off')
    #     plt.axis('scaled')
    #     plt.plot()
    #
    #     while True:
    #         try:
    #             plt.show()
    #         except UnicodeDecodeError:
    #             continue
    #         break

    return [accessibility, relocate_id, relocate_coordinates], path
def parallelProperties(name):
    print name
    #creating multi directed graph
    MG = nx.MultiGraph()

    #reading file and adding nodes - edges
    file = None
    if pathToFiles != None:
        file = open(pathToFiles + "/" + name, "r")
    else:
        file = open("./" + name, "r")
    listOfInteractions = [
    ]  #i will save interactions to rebuild the directed digraph
    for line in file:
        splittedLine = line.split("\t")
        node1 = splittedLine[0]
        node2 = splittedLine[1]
        listOfInteractions.append(node1 + ":" + node2)
        MG.add_edge(node1, node2)

    file.close()
    #####################################
    ##
    ## dict to save measures
    ##
    #####################################
    dictProp = {}
    for node in MG.nodes():
        dictProp[node] = {
            "average_shortest_path_length": '',
            "clustering_coefficient": '0',
            "closeness_centrality": '',
            "eccentricity": '',
            "stress": '0',
            "edge_count": '',
            "In_degree": '0',
            "Out_degree": '0',
            "Betweenness_centrality": '',
            "Neighborhood_conectivity": ''
        }

    file = None
    if pathToFiles != None:
        file = open(pathToFiles + "/" + name, "r")
    else:
        file = open("./" + name, "r")
    ####################################################################
    ##
    ## for in degree and out degree
    ##
    ####################################################################
    for line in file:
        splittedLine = line.split("\t")
        node1 = splittedLine[0]
        node2 = splittedLine[1]
        dictProp[node1]["Out_degree"] = str(
            int(dictProp[node1]["Out_degree"]) + 1)
        dictProp[node2]["In_degree"] = str(
            int(dictProp[node2]["In_degree"]) + 1)

    file.close()

    #we will see subgraphs
    subGS = list(nx.connected_component_subgraphs(MG))
    #now we will rebuild these graphs as multidigraphs
    for subG in list(nx.connected_component_subgraphs(MG)):
        #first step: create a multidigraph
        md = nx.MultiDiGraph()
        whitoutSL = nx.MultiGraph()  #a graph without selfloops
        directed = nx.DiGraph()
        MDNoSelfLoop = nx.MultiDiGraph()  #a graph without selfloops
        #the second step is to loop over the edges, searching for the direction of interaction
        for edge in nx.edges(subG):
            nodeX, nodeY = edge

            #if is a self interaction
            if nodeX == nodeY:
                md.add_edge(nodeX, nodeY)
                directed.add_edge(nodeX, nodeY)
            else:
                #if is not a self interaction I will look for the directions (if exist A:B and/or B:A) and Ill add the edge
                cont = 0
                if nodeX + ":" + nodeY in listOfInteractions:
                    md.add_edge(nodeX, nodeY)
                    directed.add_edge(nodeX, nodeY)
                    whitoutSL.add_edge(nodeX, nodeY)
                    MDNoSelfLoop.add_edge(nodeX, nodeY)
                if nodeY + ":" + nodeX in listOfInteractions:
                    md.add_edge(nodeY, nodeX)
                    whitoutSL.add_edge(nodeY, nodeX)
                    directed.add_edge(nodeY, nodeX)
                    MDNoSelfLoop.add_edge(nodeY, nodeX)

        ####################################################################
        ##
        ##							Metrics
        ##
        ####################################################################

        for node in md.nodes():
            ####################################################################
            ##
            ##					Edge count
            ##
            ####################################################################
            dictProp[node]["edge_count"] = str(
                int(dictProp[node]["Out_degree"]) +
                int(dictProp[node]["In_degree"]))

            ####################################################################
            ##
            ##					average shortest path length
            ##
            ####################################################################

            #at this point we have directed subgraphs, so now is time to comute average shortest path of each subgraph

            #first we will compute shortest path of one node, then we will compute average shortest path length
            shortestPaths = nx.shortest_path_length(md, source=node)

            summatory = 0
            cont = 0
            for item in shortestPaths.items():
                summatory += float(item[1])
                cont += 1
            if (cont - 1) != 0:
                dictProp[node]["average_shortest_path_length"] = str(
                    summatory / (cont - 1))
                #print node,(summatory/(cont-1))
            else:
                dictProp[node]["average_shortest_path_length"] = "0"
            ####################################################################
            ##
            ##					eccentricity
            ##
            ####################################################################
            higher = 0
            for paths in shortestPaths.items():
                if int(paths[1]) > higher:
                    higher = int(paths[1])
            dictProp[node]["eccentricity"] = str(higher)

        ####################################################################
        ##
        ##					closeness centrality
        ##
        ####################################################################

        for item in (nx.closeness_centrality(md, normalized=False)).items():
            dictProp[item[0]]["closeness_centrality"] = str(item[1])

        ####################################################################
        ##
        ##					neighborhood connectivity
        ##
        ####################################################################

        for item in (nx.average_neighbor_degree(whitoutSL)).items():
            dictProp[item[0]]["Neighborhood_conectivity"] = str(item[1])

        ####################################################################
        ##
        ##					stress centrality
        ##
        ####################################################################
        for Source in md.nodes():
            for Target in md.nodes():
                if Source != Target:
                    try:
                        for path in nx.all_shortest_paths(md,
                                                          source=Source,
                                                          target=Target):
                            if len(path) > 2:
                                for N in path[1:-1]:
                                    dictProp[N]["stress"] = str(
                                        int(dictProp[N]["stress"]) + 1)
                    except:
                        pass

        ####################################################################
        ##
        ##					betweenness centrality
        ##
        ####################################################################
        for item in (nx.betweenness_centrality(md)).items():
            dictProp[item[0]]["Betweenness_centrality"] = str(item[1])

        ####################################################################
        ##
        ##					clustering coefficient
        ##
        ####################################################################

        for node in MDNoSelfLoop.nodes():
            inPlusOut = float(dictProp[node]["Out_degree"]) + float(
                dictProp[node]["In_degree"])
            division = (len(whitoutSL.neighbors(node)) *
                        (len(whitoutSL.neighbors(node)) - 1))
            if len(whitoutSL.neighbors(
                    node)) > 1:  #if node has at least two neighbour
                connectedNeighbors = 0
                neighbors = whitoutSL.neighbors(node)
                for neighbor in neighbors:
                    #print neighbor
                    neighborsOfNeighbors = MDNoSelfLoop.neighbors(neighbor)
                    #print neighbor, neighborsOfNeighbors
                    for n in neighborsOfNeighbors:
                        #print n
                        if n in neighbors:
                            connectedNeighbors += 1
                dictProp[node]["clustering_coefficient"] = str(
                    float(connectedNeighbors) / division)

    outFile = None
    if Result != None:
        outFile = open(Result + "/" + name[:-4] + ".csv", "w")
    else:
        outFile = open("./" + name[:-4] + ".csv", "w")

    outFile.write(
        "\"AverageShortestPathLength\",\"BetweennessCentrality\",\"ClosenessCentrality\",\"ClusteringCoefficient\",\"Eccentricity\",\"EdgeCount\",\"Indegree\",\"name\",\"NeighborhoodConnectivity\",\"Outdegree\",\"Stress\"\n"
    )
    for item in dictProp.items():
        node = item[0]
        outFile.write("\"" + dictProp[node]["average_shortest_path_length"] +
                      "\",\"" + dictProp[node]["Betweenness_centrality"] +
                      "\",\"" + dictProp[node]["closeness_centrality"] +
                      "\",\"" + dictProp[node]["clustering_coefficient"] +
                      "\",\"" + dictProp[node]["eccentricity"] + "\",\"" +
                      dictProp[node]["edge_count"] + "\",\"" +
                      dictProp[node]["In_degree"] + "\",\"" + node + "\",\"" +
                      dictProp[node]["Neighborhood_conectivity"] + "\",\"" +
                      dictProp[node]["Out_degree"] + "\",\"" +
                      dictProp[node]["stress"] + "\"\n")

    outFile.close()
Example #43
0
 def all_shortest_paths(self, source, target, weight=None):
     return nx.all_shortest_paths(self.composite_graph, (0, source),
                                  (1, target), weight)
Example #44
0
def node_failure(routerlist,failover,linkdictzero,dem,IGPlink,linknormal,metric,num,maxhops):
	

		
	wctnode=copy.deepcopy(linkdictzero)
	for key,value in wctnode.items():
		wctnode[key]=[]
	temp=copy.deepcopy(linkdictzero)
	failed_by_node=defaultdict(lambda:[])

	for key in sorted(routerlist):
		
		G=nx.MultiGraph()
		G.add_weighted_edges_from(IGPlinks,weight=metric)
		print(key)
		demlistkey=[]
		node_failed=[]
		linkdict=copy.deepcopy(linkdictzero)


		


		if key[0:2]=="ar" or key[0:2]=="er":

			print("cdn or er")
			for demand in dem:
				if demand["Source"] in list(G[key]):
					pass
					node_failed.append(demand)
					
					
				else:
					demlistkey.append(demand)




			for node in G[key]:
				
					#print(node)
				
				
					if node in failover.keys():
						for demand in dem:
			#print("fail")

		

						
			
							if demand["Source"]==node:
			
								for s in failover[node]:
									newdic={}
									newdic["Traffic"]=float(s[1])*float(demand["Traffic"])/100
									newdic["TrafficLevel"]=demand["TrafficLevel"]
									newdic["ServiceClass"]=demand["ServiceClass"]
									newdic["Destination"]=demand["Destination"]
									newdic["Source"]=s[0]
									newdic["Name"]=demand["Name"]
									demlistkey.append(newdic)
#									node_failed.remove(demand)
						
				

		elif key in failover.keys():
			print("fail")

		

			for demand in dem:

				if demand["Source"]==key:

					for s in failover[key]:
						newdic={}
						newdic["Traffic"]=float(s[1])*float(demand["Traffic"])/100
						newdic["TrafficLevel"]=demand["TrafficLevel"]
						newdic["ServiceClass"]=demand["ServiceClass"]
						newdic["Destination"]=demand["Destination"]
						newdic["Source"]=s[0]
						newdic["Name"]=demand["Name"]
						demlistkey.append(newdic)
				else:
					demlistkey.append(demand)

			print("dem compelete!")




		elif key in [d for d in routerlist if d[0:2]=="sr"]:
			node_failed=[d for d in dem if d["Destination"]==key]
			print("dest")


		
			
			print("dem compelete!")

		elif  key not in failover.keys() and (key[0:2]=="px" or key[0:2]=="tx"):
			print("ptnofail")

			for demand in dem:
				

				if demand["Source"]==key:
					pass
					node_failed.append(demand)
				else:
					demlistkey.append(demand)
			print("dem compelete!")


		else:
			print("pop")
			demlistkey=dem
			print("dem compelete!")		







		print(len(demlistkey))


		G.remove_node(key)
	   
		
		
		
		
		for demand in demlistkey:

		
			try:
		
#		print(demand)
#		if demand["terrafic-level"]=="Y18.09":
			
#				b=re.search(source_pattern,demand["Source"])
#
#				Source=b.group(0)
				
				
	#			print(Source)
	#			print(demand["Destination"])
				results=nx.all_shortest_paths(G,demand["Source"],demand["Destination"],weight=metric,method="dijkstra")
				
				allpath=[]
				for result in results:
					if len(result)<maxhops:
	#				print(result)
					
						linkspath=[]
	
						for i in list(range(1,len(result))):
							st=result[i-1]
							nd=result[i]
							ln=st+"--"+ nd
								
							linkspath.append(ln)
							
						
						allpath.append(linkspath)
	#				print(allpath)
				
				n=min(len(allpath),num)
	#			print(n)
				for path in allpath[:n]:
				
					for link in path:
						if link in linkdict:
							
							linkdict[link]=linkdict[link]+int(demand["Traffic"])/n
						else:
							
							linkdict[link]= int(demand["Traffic"])/n
					  
			except:
			
				pass
				
				node_failed.append(demand)
	




		failed_by_node[key]=node_failed
		
		for key1 in linkdict.keys():
			
			if linkdict[key1]> linknormal[key1]:
				
			
				if temp[key1]<=linkdict[key1]:


					
					if temp[key1]==linkdict[key1] :
						temp[key1]=linkdict[key1]
						wctnode[key1].append(key)
					elif temp[key1]<linkdict[key1]:
						temp[key1]=linkdict[key1]
						wctnode[key1]=[]
						wctnode[key1].append(key)	
						
				
#		G=nx.MultiGraph()
#		G.add_weighted_edges_from(IGPlink,weight=metric)		
		
	

	return temp,wctnode,failed_by_node
Example #45
0
        print("Unexpected error:", sys.exc_info())  # sys.exc_info()返回出错信息
        input('press enter key to exit')


if __name__ == '__main__':
    path_input = input("请输入需要清洗的数据文件路径:")
    # input_flag = os.path.exists(path_input)
    while (True):
        input_flag = os.path.exists(path_input)
        if (input_flag):
            break
        else:
            path_input = input("请重新输入需要清洗的数据文件路径:")
    read(path_input)
    print("构建图完成")
    G = nx.read_gml('Project111111111111.txt', label='id')
    print("可选择点的范围为:", G.nodes())
    while (True):
        source = int(input("任意选择一个点作为起始点(最短路径输出时都为点的id号):"))
        target = int(input("任意选择一个点作为终止点(最短路径输出时都为点的id号):"))
        try:
            print([
                p
                for p in nx.all_shortest_paths(G, source=source, target=target)
            ])
        except:
            print()
        else:
            source = int(input("任意选择一个点作为起始点(最短路径输出时都为点的id号):"))
            target = int(input("任意选择一个点作为终止点(最短路径输出时都为点的id号):"))
    def answer(source_node_ID,
               target_node_type,
               association_node_type,
               use_json=False,
               threshold=0.2,
               n=20):
        """
		Answers the question what X are similar to Y based on overlap of common Z nodes. X is target_node_type,
		Y is source_node_ID, Z is association_node_type. The relationships are automatically determined in
		SimilarNodesInCommon by looking for 1 hop relationships and poping the FIRST one (you are warned).
		:param source_node_ID: actual name in the KG
		:param target_node_type: kinds of nodes you want returned
		:param association_node_type: kind of node you are computing the Jaccard overlap on
		:param use_json: print the results in standardized format
		:param threshold: only return results where jaccard is >= this threshold
		:param n: number of results to return (default 20)
		:return: reponse (or printed text)
		"""

        # Initialize the response class
        response = FormatOutput.FormatResponse(5)
        # add the column names for the row data
        response.message.table_column_names = [
            "source name", "source ID", "target name", "target ID",
            "Jaccard index"
        ]

        # Initialize the similar nodes class
        similar_nodes_in_common = SimilarNodesInCommon.SimilarNodesInCommon()

        # get the description
        source_node_description = RU.get_node_property(source_node_ID, 'name')

        # get the source node label
        source_node_label = RU.get_node_property(source_node_ID, 'label')

        # Get the nodes in common
        node_jaccard_tuples_sorted, error_code, error_message = similar_nodes_in_common.get_similar_nodes_in_common_source_target_association(
            source_node_ID, target_node_type, association_node_type, threshold)

        # reduce to top 100
        if len(node_jaccard_tuples_sorted) > n:
            node_jaccard_tuples_sorted = node_jaccard_tuples_sorted[0:n]

        # make sure that the input node isn't in the list
        node_jaccard_tuples_sorted = [
            i for i in node_jaccard_tuples_sorted if i[0] != source_node_ID
        ]

        # check for an error
        if error_code is not None or error_message is not None:
            if not use_json:
                print(error_message)
                return
            else:
                response.add_error_message(error_code, error_message)
                response.print()
                return

        #### If use_json not specified, then return results as a fairly plain list
        if not use_json:
            to_print = "The %s's involving similar %ss as %s are: \n" % (
                target_node_type, association_node_type,
                source_node_description)
            for other_disease_ID, jaccard in node_jaccard_tuples_sorted:
                to_print += "%s\t%s\tJaccard %f\n" % (
                    other_disease_ID,
                    RU.get_node_property(other_disease_ID, 'name'), jaccard)
            print(to_print)

        #### Else if use_json requested, return the results in the Translator standard API JSON format
        else:

            #### Create the QueryGraph for this type of question
            query_graph = QueryGraph()
            source_node = QNode()
            source_node.id = "n00"
            source_node.curie = source_node_ID
            source_node.type = source_node_label
            association_node = QNode()
            association_node.id = "n01"
            association_node.type = association_node_type
            association_node.is_set = True
            target_node = QNode()
            target_node.id = "n02"
            target_node.type = target_node_type
            query_graph.nodes = [source_node, association_node, target_node]

            #source_association_relationship_type = "unknown1"
            edge1 = QEdge()
            edge1.id = "en00-n01"
            edge1.source_id = "n00"
            edge1.target_id = "n01"
            #edge1.type = source_association_relationship_type

            #association_target_relationship_type = "unknown2"
            edge2 = QEdge()
            edge2.id = "en01-n02"
            edge2.source_id = "n01"
            edge2.target_id = "n02"
            #edge2.type = association_target_relationship_type

            query_graph.edges = [edge1, edge2]

            #### DONT Suppress the query_graph because we can now do the knowledge_map with v0.9.1
            response.message.query_graph = query_graph

            #### Create a mapping dict with the source curie and node types and edge types. This dict is used for reverse lookups by type
            #### for mapping to the QueryGraph. There is a potential point of failure here if there are duplicate node or edge types. FIXME
            response._type_map = dict()
            response._type_map[source_node.curie] = source_node.id
            response._type_map[association_node.type] = association_node.id
            response._type_map[target_node.type] = target_node.id
            response._type_map["e" + edge1.source_id + "-" +
                               edge1.target_id] = edge1.id
            response._type_map["e" + edge2.source_id + "-" +
                               edge2.target_id] = edge2.id

            #### Extract the sorted IDs from the list of tuples
            node_jaccard_ID_sorted = [
                id for id, jac in node_jaccard_tuples_sorted
            ]

            # print(RU.return_subgraph_through_node_labels(source_node_ID, source_node_label, node_jaccard_ID_sorted, target_node_type,
            #										[association_node_type], with_rel=[], directed=True, debug=True))

            # get the entire subgraph
            g = RU.return_subgraph_through_node_labels(source_node_ID,
                                                       source_node_label,
                                                       node_jaccard_ID_sorted,
                                                       target_node_type,
                                                       [association_node_type],
                                                       with_rel=[],
                                                       directed=False,
                                                       debug=False)

            # extract the source_node_number
            for node, data in g.nodes(data=True):
                if data['properties']['id'] == source_node_ID:
                    source_node_number = node
                    break

            # Get all the target numbers
            target_id2numbers = dict()
            node_jaccard_ID_sorted_set = set(node_jaccard_ID_sorted)
            for node, data in g.nodes(data=True):
                if data['properties']['id'] in node_jaccard_ID_sorted_set:
                    target_id2numbers[data['properties']['id']] = node

            for other_disease_ID, jaccard in node_jaccard_tuples_sorted:
                target_name = RU.get_node_property(other_disease_ID, 'name')
                to_print = "The %s %s involves similar %ss as %s with similarity value %f" % (
                    target_node_type, target_name, association_node_type,
                    source_node_description, jaccard)

                # get all the shortest paths between source and target
                all_paths = nx.all_shortest_paths(
                    g, source_node_number, target_id2numbers[other_disease_ID])

                # get all the nodes on these paths
                #try:
                if 1 == 1:
                    rel_nodes = set()
                    for path in all_paths:
                        for node in path:
                            rel_nodes.add(node)

                    if rel_nodes:
                        # extract the relevant subgraph
                        sub_g = nx.subgraph(g, rel_nodes)

                        # add it to the response
                        res = response.add_subgraph(sub_g.nodes(data=True),
                                                    sub_g.edges(data=True),
                                                    to_print,
                                                    jaccard,
                                                    return_result=True)
                        res.essence = "%s" % target_name  # populate with essence of question result
                        res.essence_type = target_node_type
                        row_data = []  # initialize the row data
                        row_data.append("%s" % source_node_description)
                        row_data.append("%s" % source_node_ID)
                        row_data.append("%s" % target_name)
                        row_data.append("%s" % other_disease_ID)
                        row_data.append("%f" % jaccard)
                        res.row_data = row_data


#				except:
#					pass
            response.print()
for item in data:
	route_name = item['name']
	stops = item['stops']
	stop_names = [stop['stop']['name'] for stop in stops]
	if stop_names!=[]:
		#graph.add_star(stop_names,route=route_name)
		graph.add_path(stop_names,route=route_name)
		#stop_names.reverse()
		#graph.add_path(stop_names,route=route_name)
		#graph.add_star(stop_names,route=route_name)

#print nx.shortest_path(graph,'Dilsukhnagar Bus station','Patancheru Bus Stop')
#print nx.shortest_path(graph,'Patancheru Bus Stop','Hayath Nagar Bus Stop')
#for path in nx.all_shortest_paths(graph,'Dilsukhnagar Bus station','Patancheru Bus Stop'):
#	print path
	
#all_paths =  nx.all_pairs_shortest_path(graph)
#path = all_paths['Chaitanyapuri']['Ziaguda']
locator = geopy.geocoders.GoogleV3()
paths = nx.all_shortest_paths(graph,'Chaitanyapuri','Malakpet')
for path in paths:
	for k,v in zip(path,path[1:]):
		print locator.geocode(k)
		print locator.geocode(v)
		edges = graph[k][v]
		routes = []
		for edge in edges.values():
			routes.append(edge['route'])
		#print k,v,routes
	print path
Example #48
0
'''
GRAFO 10
'''
G=nx.DiGraph()

A=[(1,2,{'peso':15}),(2,3,{'peso':5}),(2,4,{'peso':1}),(3,4,{'peso':3}),(4,5,{'peso':9})]
G.add_edges_from(A)
pesos=nx.get_edge_attributes(G,'peso')
pos =nx.spectral_layout(G)

Tiempos=[]
for i in range(30):
    T1=0
    for j in range(300000):
        t1=time.time()
R = [p for p in nx.all_shortest_paths(G, source=1, target=5,weight='peso')]
        t2=time.time()
        T=t2-t1
        T1=T1+T
    Tiempos.append(T1)  
    print('Tiempo total de repeticiones: ',T1)

media=np.round(np.mean(Tiempos),4)
desviacion=np.round(np.std(Tiempos),4)

Arcos=[(R[0][i],R[0][i+1]) for i in range(len(R[0])-1)]

nx.draw_networkx_nodes(G,pos,nodelist=[1,2,3,4,5],node_color='b', node_size=300)
nx.draw_networkx_edges(G,pos,width=2,edgelist=[(1,2),(2,3),(4,5),(2,4),(3,4)],alpha=1, edge_color='black')
nx.draw_networkx_edge_labels(G, pos,edge_labels=pesos,font_color='b')
nx.draw_networkx_labels(G, pos,font_color='w')
Example #49
0
def odd_even_fault_tolerance_metric(network_size, routing_type):

    turns_health_2d_network = {
        "N2W": False,
        "N2E": False,
        "S2W": False,
        "S2E": False,
        "W2N": False,
        "W2S": False,
        "E2N": False,
        "E2S": False
    }
    Config.ag.topology = '2DMesh'
    Config.ag.x_size = network_size
    Config.ag.y_size = network_size
    Config.ag.z_size = 1
    Config.RotingType = routing_type

    all_odd_evens_file = open(
        'Generated_Files/Turn_Model_Eval/' + str(network_size) + "x" +
        str(network_size) + '_OE_metric_' + Config.RotingType + '.txt', 'w')
    all_odd_evens_file.write("TOPOLOGY::" + str(Config.ag.topology) + "\n")
    all_odd_evens_file.write("X SIZE:" + str(Config.ag.x_size) + "\n")
    all_odd_evens_file.write("Y SIZE:" + str(Config.ag.y_size) + "\n")
    all_odd_evens_file.write("Z SIZE:" + str(Config.ag.z_size) + "\n")
    ag = copy.deepcopy(AG_Functions.generate_ag())
    shmu = SystemHealthMonitoringUnit.SystemHealthMonitoringUnit()
    turns_health = copy.deepcopy(turns_health_2d_network)
    shmu.setup_noc_shm(ag, turns_health, False)
    noc_rg = copy.deepcopy(
        Routing.generate_noc_route_graph(ag, shmu, [], False, False))

    classes_of_doa_ratio = []
    turn_model_class_dict = {}
    tm_counter = 0
    """
    selected_turn_models = []
    for tm in all_odd_even_list:
        if len(tm[0])+len(tm[1]) == 11 or len(tm[0])+len(tm[1]) == 12:
            selected_turn_models.append(all_odd_even_list.index(tm))
    """
    #selected_turn_models = [677, 678, 697, 699, 717, 718, 737, 739, 757, 759, 778, 779, 797, 799, 818, 819,
    #                        679, 698, 719, 738, 758, 777, 798, 817]

    for turn_model in all_odd_even_list:
        #for item in selected_turn_models:
        # print item
        # turn_model = all_odd_even_list[item]

        sys.stdout.write("\rnumber of processed turn models: %i " % tm_counter)
        sys.stdout.flush()
        tm_counter += 1
        link_dict = {}
        turn_model_index = all_odd_even_list.index(turn_model)
        turn_model_odd = turn_model[0]
        turn_model_even = turn_model[1]

        for node in ag.nodes():
            node_x, node_y, node_z = AG_Functions.return_node_location(node)
            if node_x % 2 == 1:
                for turn in turn_model_odd:
                    shmu.restore_broken_turn(node, turn, False)
                    from_port = str(node) + str(turn[0]) + "I"
                    to_port = str(node) + str(turn[2]) + "O"
                    Routing.update_noc_route_graph(noc_rg, from_port, to_port,
                                                   'ADD')
            else:
                for turn in turn_model_even:
                    shmu.restore_broken_turn(node, turn, False)
                    from_port = str(node) + str(turn[0]) + "I"
                    to_port = str(node) + str(turn[2]) + "O"
                    Routing.update_noc_route_graph(noc_rg, from_port, to_port,
                                                   'ADD')

        number_of_pairs = len(ag.nodes()) * (len(ag.nodes()) - 1)

        all_paths_in_graph = []
        for source_node in ag.nodes():
            for destination_node in ag.nodes():
                if source_node != destination_node:
                    if is_destination_reachable_from_source(
                            noc_rg, source_node, destination_node):
                        # print source_node, "--->", destination_node
                        if Config.RotingType == 'MinimalPath':
                            shortest_paths = list(
                                all_shortest_paths(
                                    noc_rg,
                                    str(source_node) + str('L') + str('I'),
                                    str(destination_node) + str('L') +
                                    str('O')))
                            paths = []
                            for path in shortest_paths:
                                minimal_hop_count = manhattan_distance(
                                    source_node, destination_node)
                                if (len(path) / 2) - 1 <= minimal_hop_count:
                                    paths.append(path)
                                    all_paths_in_graph.append(path)
                        else:
                            paths = list(
                                all_simple_paths(
                                    noc_rg,
                                    str(source_node) + str('L') + str('I'),
                                    str(destination_node) + str('L') +
                                    str('O')))
                            all_paths_in_graph += paths
                        link_dict = find_similarity_in_paths(link_dict, paths)

        metric = 0
        for item in link_dict.keys():
            metric += link_dict[item]

        if Config.RotingType == 'MinimalPath':
            doa = degree_of_adaptiveness(ag, noc_rg,
                                         False) / float(number_of_pairs)
            #metric = doa/(float(metric)/len(ag.edges()))
            metric = 1 / (float(metric) / len(ag.edges()))
            metric = float("{:3.3f}".format(metric))
            # print "Turn Model ", '%5s' %turn_model_index, "\tdoa:", "{:3.3f}".format(doa),
            #       "\tmetric:", "{:3.3f}".format(metric)
        else:
            doa_ex = extended_degree_of_adaptiveness(
                ag, noc_rg, False) / float(number_of_pairs)
            #metric = doa_ex/(float(metric)/len(ag.edges()))
            metric = 1 / (float(metric) / len(ag.edges()))
            metric = float("{:3.3f}".format(metric))
            # print "Turn Model ", '%5s' %turn_model_index, "\tdoa:", "{:3.3f}".format(doa_ex),
            #       "\tmetric:", "{:3.3f}".format(metric)

        if metric not in classes_of_doa_ratio:
            classes_of_doa_ratio.append(metric)
        if metric in turn_model_class_dict.keys():
            turn_model_class_dict[metric].append(turn_model_index)
        else:
            turn_model_class_dict[metric] = [turn_model_index]

        # return SHMU and RG back to default
        for node in ag.nodes():
            node_x, node_y, node_z = AG_Functions.return_node_location(node)
            if node_x % 2 == 1:
                for turn in turn_model_odd:
                    shmu.break_turn(node, turn, False)
                    from_port = str(node) + str(turn[0]) + "I"
                    to_port = str(node) + str(turn[2]) + "O"
                    Routing.update_noc_route_graph(noc_rg, from_port, to_port,
                                                   'REMOVE')
            else:
                for turn in turn_model_even:
                    shmu.break_turn(node, turn, False)
                    from_port = str(node) + str(turn[0]) + "I"
                    to_port = str(node) + str(turn[2]) + "O"
                    Routing.update_noc_route_graph(noc_rg, from_port, to_port,
                                                   'REMOVE')

    all_odd_evens_file.write("classes of metric" + str(classes_of_doa_ratio) +
                             "\n")
    all_odd_evens_file.write("----------" * 3 + "\n")
    all_odd_evens_file.write("turn models of class" + "\n")
    # print "classes of metric", classes_of_doa_ratio
    for item in sorted(turn_model_class_dict.keys()):
        # print item, turn_model_class_dict[item]
        all_odd_evens_file.write(
            str(item) + " " + str(turn_model_class_dict[item]) + "\n")

    all_odd_evens_file.write("----------" * 3 + "\n")
    all_odd_evens_file.write("distribution of turn models" + "\n")
    for item in sorted(turn_model_class_dict.keys()):
        temp_list = []
        for tm in turn_model_class_dict[item]:
            turn_model = all_odd_even_list[tm]
            number_of_turns = len(turn_model[0]) + len(turn_model[1])
            temp_list.append(number_of_turns)
        # print item, temp_list.count(8), temp_list.count(9), temp_list.count(10),
        # temp_list.count(11), temp_list.count(12)
        all_odd_evens_file.write(
            str(item) + " " + str(temp_list.count(8)) + " " +
            str(temp_list.count(9)) + " " + str(temp_list.count(10)) + " " +
            str(temp_list.count(11)) + " " + str(temp_list.count(12)) + "\n")
    all_odd_evens_file.close()
    return turn_model_class_dict
Example #50
0
    def GetSpfPath(self, source: Node, target: Node, demand: int,
                   demand_obj: Demand):
        G = nx.MultiDiGraph()
        # node_list = [node for node in self.get_nodes() if not node._failed]
        node_list = self.get_nodes()
        for node in node_list:
            for interface in node.interfaces:
                if not interface._failed:
                    G.add_edge(node,
                               interface.target,
                               **interface._networkX(),
                               data=interface)
        G.add_nodes_from(node_list)
        all_paths = list(
            nx.all_shortest_paths(G, source, target, weight="metric"))
        self._GetSpfPathList(source, target, demand, demand_obj, G)
        unique_next_hop = self.return_next_hops(
            all_paths, G)  # set([p[1] for p in all_paths])

        demand_next_hop = demand / sum(unique_next_hop.values())
        # print("first all_paths", all_paths)
        # print("first unique all next hops", unique_next_hop)
        # print("first demands per next hop", demand_next_hop)
        temp_list = []
        for nh, values in unique_next_hop.items():
            """
            print(
                f"***{source} will send {demand} to { nh } as {demand_next_hop * values}"
            )
            """
            self._GetSpfPath(source, nh, demand_next_hop * values, G,
                             demand_obj)
            temp_list.append({
                "source": nh,
                "demand": demand_next_hop * values
            })
        while len(temp_list) >= 1:
            for i, entry in enumerate(temp_list):
                # print(entry, target)
                if entry["source"] == target:
                    temp_list.pop(i)
                    continue
                all_paths = list(
                    nx.all_shortest_paths(G,
                                          entry["source"],
                                          target,
                                          weight="metric"))
                unique_next_hop = self.return_next_hops(
                    all_paths, G)  # set([p[1] for p in all_paths])
                demand_next_hop = entry["demand"] / sum(
                    unique_next_hop.values())
                # print("all_paths", all_paths)
                # print("unique all next hops", unique_next_hop)
                # print("demands per next hop", demand_next_hop)
                src = entry["source"]
                for nh, values in unique_next_hop.items():
                    self._GetSpfPath(src, nh, demand_next_hop * values, G,
                                     demand_obj)
                    """
                    for entry2 in temp_list:
                        if entry2["source"] == nh:
                            entry2["demand"] += demand_next_hop
                    
                    print(
                        f"***{src} will send {demand} to { nh } as {demand_next_hop * values}"
                    )
                    """
                    temp_list.append({
                        "source": nh,
                        "demand": demand_next_hop * values
                    })
                temp_list.pop(i)
 def test_bad_method(self):
     with pytest.raises(ValueError):
         G = nx.path_graph(2)
         list(nx.all_shortest_paths(G, 0, 1, weight='weight',
                                    method='SPAM'))
Example #52
0
	def _packet_in_handler(self, ev):
		"""
		Executes everytime a packet arrives on a controller
		"""
		# If you hit this you might want to increase
		# the "miss_send_length" of your switch
		if ev.msg.msg_len < ev.msg.total_len:
			self.logger.debug("packet truncated: only %s of %s bytes",
							  ev.msg.msg_len, ev.msg.total_len)
		msg = ev.msg
		datapath = msg.datapath
		ofproto = datapath.ofproto
		parser = datapath.ofproto_parser
		in_port = msg.match['in_port']

		pkt = packet.Packet(msg.data)
		eth = pkt.get_protocols(ethernet.ethernet)[0]

		if eth.ethertype == ether_types.ETH_TYPE_LLDP:
			# ignore lldp packet
			return

		dst = eth.dst
		src = eth.src

		self.logger.info("Na controller dorazi paket. Predtym prisiel na Switch cislo %s, na port %s. Dst: %s, Src: %s",datapath.id,in_port,dst,src)

		t = pkt.get_protocol(ipv4.ipv4)

		if t:
			print 'zdrojova ip: ',t.src
			print 'dest ip: ',t.dst

		ht = pkt.get_protocol(tcp.tcp)
		found_path=0
		
		# If TCP
		if ht:
			print 'zdrojovy port: ',ht.src_port
			print 'destination port: ',ht.dst_port
			options = ht.option
			# Parse TCP options
			if options and len(options) > 0:
				for opt in options:
					# Parse MPTCP options
					if opt.kind == 30:
						# Parse MPTCP subtype. 00 = MP_CAPABLE. 01 = MP_JOIN. 11 = MP_JOIN
						hexopt = binascii.hexlify(opt.value)
						subtype = hexopt[:2]
						# MP CAPABLE
						if subtype == "00":
							# MP CAPABLE SYN      
							if ht.bits == 2:
								self.logger.info("MP_CAPABLE SYN")
								
								# Send A->B traffic to controller
								match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.src,ipv4_dst=t.dst,tcp_src=ht.src_port,tcp_dst=ht.dst_port)
								actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
											  ofproto.OFPCML_NO_BUFFER)]
								self.add_flow(datapath, 3, match, actions)

								# Send B->A traffic to controller
								match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.dst,ipv4_dst=t.src,tcp_src=ht.dst_port,tcp_dst=ht.src_port)
								actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
											  ofproto.OFPCML_NO_BUFFER)]
								self.add_flow(datapath, 3, match, actions)

								# Sender's key.
								keya = hexopt[4:]

								# Sender's token is a SHA1 truncated hash of the key. 
								tokena = int(hashlib.sha1(binascii.unhexlify(hexopt[4:])).hexdigest()[:8],16)

								# Store IPs, ports, sender's key and sender's token. 
								values = {'tsrc':t.src,'tdst':t.dst,'keya':keya,'tokena':tokena,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port,'src':src,'dst':dst}
								query = "replace INTO mptcp.conn (ip_src,ip_dst,keya,tokena,tcp_src,tcp_dst,src,dst) values('{tsrc}','{tdst}','{keya}',{tokena},{htsrc_port},{htdst_port},'{src}','{dst}');"
								self.executeInsert(query.format(**values))
							# MP_CAPABLE SYN-ACK
							elif ht.bits == 18:
								self.logger.info("MP_CAPABLE SYN-ACK")

								# Receiver's key.
								keyb = hexopt[4:]

								# Receiver's token is a SHA1 truncated hash of the key.
								tokenb = int(hashlib.sha1(binascii.unhexlify(hexopt[4:])).hexdigest()[:8],16)

								# Store receiver's key and receiver's token to the appropriate connection. 
								values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port,'keyb':keyb,'tokenb':tokenb}
								query = "UPDATE mptcp.conn SET keyb='{keyb}',tokenb={tokenb} WHERE ip_src='{tdst}' AND ip_dst='{tsrc}' AND tcp_src={htdst_port} AND tcp_dst={htsrc_port};"
								self.executeInsert(query.format(**values))

							# MP_CAPABLE ACK
							elif ht.bits == 16:
								self.logger.info("MP_CAPABLE ACK")

								found_path = 1
								dpid = datapath.id
								paths = list(nx.all_shortest_paths(self.net,src,dst))
#								macs = src+'-'+dst
								path = random.choice(paths)
#								if macs in self.connpaths: #Ak uz mam zvolenu cestu
#									self.logger.info("Pre takyto srcdst uz mam zvolenu cestu. Pouzijem tuto cestu:")
#									path = paths[self.connpaths[macs]]
#									print(path)
#								else:
#									self.logger.info("Pre takyto srcdst nemam este cestu. Pouzijem tuto cestu:")
#									path_index = randrange(0,len(paths))
#									path = paths[path_index]
#									self.connpaths[macs] = path_index
#									print(path)
#									print(self.connpaths[macs])
#									self.logger.info("Takyto je random index: %d.",path_index)
							
								#path=['08:00:27:5f:ab:7f', 1, 5, 6, '08:00:27:77:27:8c']
								fullpath = path
								tmppath = path[1:-1]
								for s in tmppath:
									match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.src,ipv4_dst=t.dst,tcp_src=ht.src_port,tcp_dst=ht.dst_port)
									next = fullpath[fullpath.index(s)+1]
									out_port = self.net[s][next]['port']
									actions = [parser.OFPActionOutput(out_port)]
									self.logger.info("Instalujem out_port %d pravidlo do switchu %d",out_port,s)
									self.add_flow(get_datapath(self,s),3,match,actions)
									
									match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.dst,ipv4_dst=t.src,tcp_src=ht.dst_port,tcp_dst=ht.src_port)
									prev = fullpath[fullpath.index(s)-1]
									out_port = self.net[s][prev]['port']
									actions = [parser.OFPActionOutput(out_port)]
									self.logger.info("Instalujem out_port %d pravidlo do switchu %d",out_port,s)
									self.add_flow(get_datapath(self,s),3,match,actions)
#								command = 'ovs-ofctl -OOpenFlow13 del-flows s1 "eth_dst='+dst+',tcp,tcp_flags=0x010"'
#								os.system(command)

						# MP_JOIN
						elif subtype == "10" or subtype == "11":
							# MP_JOIN SYN 
							if ht.bits == 2:
								self.logger.info("MP_JOIN SYN")

								# Send A->B traffic to controller
								match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.src,ipv4_dst=t.dst,tcp_src=ht.src_port,tcp_dst=ht.dst_port)
								actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
											  ofproto.OFPCML_NO_BUFFER)]
								self.add_flow(datapath, 3, match, actions)

								# Send B->A traffic to controller
								match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.dst,ipv4_dst=t.src,tcp_src=ht.dst_port,tcp_dst=ht.src_port)
								actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
											  ofproto.OFPCML_NO_BUFFER)]
								self.add_flow(datapath, 3, match, actions)

								# Receiver's token. From the MPTCP connection. 
								tokenb = int(hexopt[4:][:8],16)

								# Sender's nonce. 
								noncea = hexopt[12:]

								# Store IPs, ports, sender's nonce into subflow table.
								values = {'tsrc':t.src,'tdst':t.dst,'tokenb':tokenb,'noncea':noncea,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port}
								query = "replace INTO mptcp.subflow (ip_src,ip_dst,tokenb,noncea,tcp_src,tcp_dst) values('{tsrc}','{tdst}',{tokenb},'{noncea}',{htsrc_port},{htdst_port});"
								self.executeInsert(query.format(**values))

							# MP_JOIN SYN-ACK
							elif ht.bits == 18:
								self.logger.info("MP_JOIN SYN-ACK.")
								
								# Receiver's truncated HASH. 
								trunhash = int(hexopt[4:][:16],16)

								# Receiver's nonce.
								nonceb = hexopt[20:]

								# Store truncated HASH and receiver's nonce into appropriate subflow. 
								values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port,'trunhash':trunhash,'nonceb':nonceb}
								query = "UPDATE mptcp.subflow SET trunhash={trunhash},nonceb='{nonceb}' WHERE ip_src='{tdst}' AND ip_dst='{tsrc}' AND tcp_src={htdst_port} AND tcp_dst={htsrc_port};"
								self.executeInsert(query.format(**values))

							# MP_JOIN ACK
							elif ht.bits == 16:
								self.logger.info("MP_JOIN ACK.")

								found_path = 1
								dpid = datapath.id
								paths = list(nx.all_shortest_paths(self.net,src,dst))
	#							macs = src+'-'+dst
								path = random.choice(paths)
	#							if macs in self.connpaths: #Ak uz mam zvolenu cestu
	#								self.logger.info("Pre takyto srcdst uz mam zvolenu cestu. Pouzijem tuto cestu:")
	#								path = paths[self.connpaths[macs]]
	#								print(path)
	#							else:
	#								self.logger.info("Pre takyto srcdst nemam este cestu. Pouzijem tuto cestu:")
	#								path_index = randrange(0,len(paths))
	#								path = paths[path_index]
	#								self.connpaths[macs] = path_index
	#								print(path)
	#								print(self.connpaths[macs])
	#								self.logger.info("Takyto je random index: %d.",path_index)
								
								fullpath = path
								tmppath = path[1:-1]
								for s in tmppath:
									match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.src,ipv4_dst=t.dst,tcp_src=ht.src_port,tcp_dst=ht.dst_port)
									next = fullpath[fullpath.index(s)+1]
									out_port = self.net[s][next]['port']
									actions = [parser.OFPActionOutput(out_port)]
									self.logger.info("Instalujem out_port %d pravidlo do switchu %d",out_port,s)
									self.add_flow(get_datapath(self,s),3,match,actions)
									
									match = parser.OFPMatch(eth_type=0x0800,ip_proto=6,ipv4_src=t.dst,ipv4_dst=t.src,tcp_src=ht.dst_port,tcp_dst=ht.src_port)
									prev = fullpath[fullpath.index(s)-1]
									out_port = self.net[s][prev]['port']
									actions = [parser.OFPActionOutput(out_port)]
									self.logger.info("Instalujem out_port %d pravidlo do switchu %d",out_port,s)
									self.add_flow(get_datapath(self,s),3,match,actions)
								
								# Sender's HASH.
								hmachash = hexopt[4:]

								# Store sender's HASH to appropriate subflow. 
								values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port,'hmachash':hmachash}
								query = "UPDATE mptcp.subflow SET hash='{hmachash}' WHERE ip_src='{tsrc}' AND ip_dst='{tdst}' AND tcp_src={htsrc_port} AND tcp_dst={htdst_port};"
								self.executeInsert(query.format(**values))

								# Select keys from appropriate connection based on receiver's token. 
								values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port}
								query = "SELECT keya,keyb from conn where tokenb in (SELECT tokenb from subflow where ip_src='{tsrc}' and ip_dst='{tdst}' and tcp_src={htsrc_port} and tcp_dst={htdst_port});"
								keys = self.executeSelect(query.format(**values))

								# Select nonces for current subflow. 
								values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port}
								query = "SELECT noncea,nonceb from subflow where ip_src='{tsrc}' AND ip_dst='{tdst}' AND tcp_src={htsrc_port} AND tcp_dst={htdst_port};"
								nonces = self.executeSelect(query.format(**values))

								# Key for generating HMAC is a concatenation of two keys. Message is a concatenation of two nonces. 
								keyhmac = binascii.unhexlify(keys[0]+keys[1])
								message = binascii.unhexlify(nonces[0]+nonces[1])

								# Generate hash.
								vysledok = hmac.new(keyhmac,message, hashlib.sha1).hexdigest()
								print(vysledok)

								# Compare generated HASH to the one from MP_JOIN ACK.
								if vysledok == hmachash:

									# Get connection ID based on tokens. 
									values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port}
									query = "SELECT id from conn where tokenb in (SELECT tokenb from subflow where ip_src='{tsrc}' and ip_dst='{tdst}' and tcp_src={htsrc_port} and tcp_dst={htdst_port});"
									ids = self.executeSelect(query.format(**values))[0]

									# Insert connection ID to a current subflow. 
									values = {'tsrc':t.src,'tdst':t.dst,'htsrc_port':ht.src_port,'htdst_port':ht.dst_port, 'id':ids}
									query = "update subflow set connid = {id} where ip_src='{tsrc}' and ip_dst='{tdst}' and tcp_src={htsrc_port} and tcp_dst={htdst_port};"
									self.executeInsert(query.format(**values))

									query = "select src,dst from conn join subflow on subflow.connid=conn.id where conn.id=(select connid from subflow where ip_src='{tsrc}' and ip_dst='{tdst}' and tcp_src={htsrc_port} and tcp_dst={htdst_port}) group by src;"

									result = self.executeSelect(query.format(**values))
									srcmac = result[0]
									dstmac = result[1]
									print ('srcmac = %s',srcmac)
									print ('dstmac = %s',dstmac)


			# Learn MAC addresses to avoid FLOOD.
		dpid = datapath.id
	#	self.mac_to_port.setdefault(dpid, {})
	#	self.mac_to_port[dpid][src] = in_port

		# Shortest path forwarding
		for f in msg.match.fields:
			if f.header == ofproto_v1_3.OXM_OF_IN_PORT:
				in_port = f.value

	#	if src not in self.net:
	#		self.net.add_node(src)
	#		self.net.add_edge(dpid,src,port=in_port)
	#		self.net.add_edge(src,dpid)
		if dst in self.net:
			print("Som v rozhodovani.")
			if found_path == 1:
				print ("Mam cestu")
			#	print ("Assingin random path.")
			#	path = random_path
			#	next = path[path.index(dpid) + 1]
			#	out_port = self.net[dpid][next]['port']
			else:
				print ("Assining not random path.")
				path = nx.shortest_path(self.net,dpid,dst)
				next = path[path.index(dpid) + 1]
				print (path)
				out_port = self.net[dpid][next]['port']
				print ("out: %d",out_port)
		else:
			out_port = ofproto.OFPP_FLOOD

#		if dst in self.mac_to_port[dpid]:
#			out_port = self.mac_to_port[dpid][dst]
#		else:
#			out_port = ofproto.OFPP_FLOOD

		if found_path==0:
			actions = [parser.OFPActionOutput(out_port)]

			# Install flow to avoid FLOOD next time. 
			if out_port != ofproto.OFPP_FLOOD:
				match = parser.OFPMatch(in_port=in_port, eth_dst=dst)
				if msg.buffer_id != ofproto.OFP_NO_BUFFER:
					self.add_flow(datapath, 1, match, actions, msg.buffer_id)
					return
				else:
					self.add_flow(datapath, 1, match, actions)
			data = None
			if msg.buffer_id == ofproto.OFP_NO_BUFFER:
				data = msg.data

			out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id,
									  in_port=in_port, actions=actions, data=data)
			datapath.send_msg(out)
Example #53
0
 def all_shortest_path(self, start, end):
     path = nx.all_shortest_paths(self.graph, start, end)
     return path
    def multi_spaths(self,test,host):
        #add switch to the list
        i=0
        break_loop = 0
        switch = []
        while i < len(test):
            j=0
            if len(switch) == 0:
                switch.append(test[i]['dst-switch'])
            else:
                while j < len(switch):
                    if test[i]['dst-switch'] == switch[j]:
                        break_loop+=1
                        break            
                    j+=1
                if break_loop == 0:
                    switch.append(test[i]['dst-switch'])
                j=0
                break_loop = 0
                while j < len(switch):
                    if test[i]['src-switch'] == switch[j]:
                        break_loop+=1
                        break            
                    j+=1
                if break_loop == 0:
                    switch.append(test[i]['src-switch'])
                break_loop = 0
            i+=1

        #Create Graph
        G = nx.Graph()
        G.add_nodes_from(switch)
        print '\n Add Nodes Completed \n'
        print G.nodes()
        print '\n ------------------- \n'

        #Add links
        link_list = []
        i=0
        while i < len(test):
            link_list.append((test[i]['src-switch'],test[i]['dst-switch']))
            i+=1
        G.add_edges_from(link_list)
        print '\n Add Links Completed \n'
        print G.edges()
        print '\n ------------------- \n'

        #Add connected ports between switches
        link_port = []
        i=0
        while i < len(link_list):
            if (test[i]['src-switch'] == link_list[i][0]) & (test[i]['dst-switch'] == link_list[i][1]):
                link_port.append((test[i]['src-switch'],test[i]['src-port'],test[i]['dst-port'],test[i]['dst-switch']))
            i+=1
        print '\n Add Connected Ports Completed \n'
        print link_port
        print '\n ----------------------------- \n'
        
        #check access nodes
        source = []
        host_mac = []
        i=0
        j=0
        k=0
        while i < len(host):
            if host[i]['attachmentPoint'] != []:
                #find host mac
                host_mac.append({'mac':host[i]['mac'],'SW':host[i]['attachmentPoint'][0]['switchDPID'],'port':host[i]['attachmentPoint'][0]['port']})
                while j < len(source):
                    if source[j] == host[i]['attachmentPoint'][0]['switchDPID']:
                        k+=1
                        break
                    j+=1
                if k == 0:
                    source.append(host[i]['attachmentPoint'][0]['switchDPID'])
            k=0
            j=0
            i+=1    
        print '\n Check Access Nodes Completed \n'
        print source
        print '\n ---------------------------- \n'

        print '\n Hosts & Switches Completed \n'
        print host_mac
        print '\n ---------------------------- \n'
        
        #find unique pair of sources and destinations of access switches
        comb = [list(p) for p in itertools.combinations(source,2)]
        print '\n Unique Pairs of Sources and Destinations of Access Switches \n'
        print comb
        print '\n ----------------------------------------------------------- \n'

        #find pair of sources and destinations of hosts
        pair = []
        i=0
        j=0
        k=0
        src_host = []
        dest_host = []
        while i < len(comb):
            src_sw = comb[i][0]
            dest_sw = comb[i][1]
            while j < len(host_mac):
                if host_mac[j]['SW'] == src_sw:
                    src_host.append(host_mac[j]['mac'])
                j+=1
            j=0
            while j < len(host_mac):
                if host_mac[j]['SW'] == dest_sw:
                    dest_host.append(host_mac[j]['mac'])
                j+=1
            j=0
            i+=1
        
        while k<len(dest_host):
            pair.append({'src':src_host[k],'dest':dest_host[k]})
            k+=1
            
        print '\n Unique Pairs of Sources and Destinations of Hosts \n'
        print pair
        print '\n ------------------------------------------------- \n'

        #Find All Shortest paths
        i=0
        j=0
        k=0
        paths = []
        while i < len(comb):
            [paths.append(p) for p in nx.all_shortest_paths(G,comb[i][0],comb[i][1])]
            i+=1
        print '\n Find All Shortest Paths Completed \n'
        print paths
        print '\n --------------------------------- \n'
        return paths
Example #55
0
def jellyfish_graph_to_dicts(useEcmp):
    # true if use ECMP, false if use k-shortest-paths
    def host_ip(node):
        return '10.0.0.' + str(node)

    def switch_ip(node):
        return '10.' + str(node) + '.0.0'
    
    data = None
    with open('generated_rrg', 'r') as infile:
        data = json.load(infile)
    graph = nx.readwrite.node_link_graph(data)
    
    calc_path_map = defaultdict(lambda:defaultdict(lambda:defaultdict(lambda:None)))
    link_to_port = defaultdict(lambda:defaultdict(lambda:None))
    ip_to_dpid = defaultdict(lambda:None)
    hosts = []

    host_to_ip = {}
    switch_to_ip = {}

    # set host and switch ips (every switch gets one host)
    for node_orig in graph.nodes():
        node = node_orig + 1
        switch_to_ip[node] = switch_ip(node) 
        host_to_ip[node] = host_ip(node) 
        hosts.append(host_to_ip[node])
    # dpids for switches 
        ip_to_dpid[switch_to_ip[node]] = node
    
    # links to ports
    for node_orig in graph.nodes():
        node = node_orig + 1
        host_ip = host_to_ip[node]
        switch_ip = switch_to_ip[node]
        link_to_port[host_ip][switch_ip] = node 
        link_to_port[switch_ip][host_ip] = node 
        for neigh_orig in graph.neighbors(node_orig):
            neigh = neigh_orig + 1
            neigh_switch_ip = switch_to_ip[neigh]
            link_to_port[neigh_switch_ip][switch_ip] = node 
            link_to_port[switch_ip][neigh_switch_ip] = neigh 

    # set path ips
    for node_i_orig in graph.nodes():
        node_i = node_i_orig + 1
        node_j = node_i + 1
        if (node_j == len(graph.nodes()) + 1):
            node_j = 1
        if (useEcmp):
            calc_paths = list(islice(nx.all_shortest_paths(graph, node_i_orig, node_j - 1), 7))
        else:
            calc_paths = list(islice(nx.shortest_simple_paths(graph, node_i_orig, node_j - 1), 8))

        calc_ip_paths = []
        calc_ip_rev_paths = []
        for calc_path in calc_paths:
            calc_ip_path = []
            calc_ip_rev_path = []
            print "path" + str(calc_path)
            for i in range(0, len(calc_path)):
                calc_ip_path.append(switch_to_ip[calc_path[i] + 1])
                calc_ip_rev_path.insert(0, switch_to_ip[calc_path[i] + 1])
            # add end host
            calc_ip_path.append(host_to_ip[calc_path[len(calc_path) - 1] + 1])
            calc_ip_rev_path.append(host_to_ip[calc_path[0] + 1])
            calc_ip_paths.append(calc_ip_path)
            calc_ip_rev_paths.append(calc_ip_rev_path)


        src_ip = host_to_ip[node_i]
        dst_ip = host_to_ip[node_j]
        print "source node: " + str(node_i)
        print "dst node: " + str(node_j)
        print "source ip: " + src_ip
        print "dst ip: " + dst_ip
        print "paths: " + str(calc_ip_paths)
        print "reverse paths: " + str(calc_ip_rev_paths)
        print ""

        calc_path_map[src_ip][dst_ip] = calc_ip_paths
        calc_path_map[dst_ip][src_ip] = calc_ip_rev_paths

    #print ecmp_path_map
    return hosts, calc_path_map, link_to_port, ip_to_dpid
for node in net.nodes():
    print node.id, node.memory, node.status
    print " "
#sim.reset()

#inicijaliziraj tmp routing listu, i postavi brojac greski na 0
tmpPathList = []
errorResultCounter = 0
#prolazi svaki par cvorova
for node in net.nodes():
    for node2 in net.nodes():
        #ako cvorovi nisu jednaki
        if not node == node2:
            #uzmi sve nakrace puteve izmedju para cvorova
            tmpPathList = [
                p for p in nx.all_shortest_paths(
                    net, source=node, target=node2, weight='weight')
            ]
            #uzmi najkraci put dobiven iz algoritma
            shortestPath = node.memory['routingTable'][node2]
            #makni sebe iz liste najkracih puteva
            for l in tmpPathList:
                l.remove(node)
            #provjeri ako je algoritam dobio najkraci put, ako nije povecaj brojac greski
            if shortestPath not in tmpPathList:
                errorResultCounter += 1

#ispisi broj gresaka i obavijesti o zavrsetku algoritma
print "\nERRORS: " + str(errorResultCounter)
print "\nDone script."
Example #57
0
 def test_all_shortest_paths(self):
     G = nx.Graph()
     G.add_path([0, 1, 2, 3])
     G.add_path([0, 10, 20, 3])
     assert_equal([[0, 1, 2, 3], [0, 10, 20, 3]],
                  sorted(nx.all_shortest_paths(G, 0, 3)))
 def test_all_shortest_paths_raise(self):
     with pytest.raises(nx.NetworkXNoPath):
         G = nx.path_graph(4)
         G.add_node(4)
         list(nx.all_shortest_paths(G, 0, 4))
Example #59
0
def mincut(G, source_node, destination_node):
    return len(
        list(
            nx.all_shortest_paths(G,
                                  source=source_node,
                                  target=destination_node))[0])
Example #60
0
# else:

with open(ppi_network_path) as nfh:
    G = nx.DiGraph()
    cnt = 0
    start_time = time.time()
    for line in nfh:
        (a, b, score) = line.split("\t")
        G.add_path([a.lower(), b.lower()], weight=10 - int(int(score) / 100))
        cnt += 1
        if cnt % 10000 == 0:
            print cnt
    print time.time() - start_time
    # nx.write_gpickle(G,"test.gpickle.gz")
    shorts = [
        p for p in nx.all_shortest_paths(
            G, source=key_from, target=key_to, weight='weight')
    ]
    with open(ppi_shorts_path, 'w') as ph:
        json.dump(shorts, ph)

#
# with open(tf_network_path) as nfh:
#     G = nx.DiGraph()
#     cnt = 0
#     for line in nfh:
#         (a, b, score, pvalue) = line.split("\t")
#         G.add_path([a.lower(), b.lower()])
#         cnt += 1
#         if cnt % 10000 == 0:
#             print cnt
#     shorts = [p for p in nx.all_shortest_paths(G, source=key_from, target=key_to)]