コード例 #1
0
ファイル: test_cuts.py プロジェクト: ProgVal/networkx
def test_brandes_erlebach_book():
    # Figure 1 chapter 7: Connectivity
    # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
    G = nx.Graph()
    G.add_edges_from([(1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 6), (3, 4),
                      (3, 6), (4, 6), (4, 7), (5, 7), (6, 8), (6, 9), (7, 8),
                      (7, 10), (8, 11), (9, 10), (9, 11), (10, 11)])
    for flow_func in flow_funcs:
        kwargs = dict(flow_func=flow_func)
        # edge cutsets
        assert_equal(3, len(nx.minimum_edge_cut(G, 1, 11, **kwargs)),
                     msg=msg.format(flow_func.__name__))
        edge_cut = nx.minimum_edge_cut(G, **kwargs)
        # Node 5 has only two edges
        assert_equal(2, len(edge_cut), msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_edges_from(edge_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
        # node cuts
        assert_equal(set([6, 7]), minimum_st_node_cut(G, 1, 11, **kwargs),
                     msg=msg.format(flow_func.__name__))
        assert_equal(set([6, 7]), nx.minimum_node_cut(G, 1, 11, **kwargs),
                     msg=msg.format(flow_func.__name__))
        node_cut = nx.minimum_node_cut(G, **kwargs)
        assert_equal(2, len(node_cut), msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_nodes_from(node_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
コード例 #2
0
ファイル: MotifExtraction.py プロジェクト: imperium9/PyGNA
 def generateNewExtractionPool(self, network):
     #Generate extraction candidates for each key in the extraction map
     import time
     totalstart = time.time()
     self.extractionPool = {}
     connected = 0
     
     #***************************************
     for element in self.extractionMap:
         if nx.is_connected(element[0]):
             connected += 1
     print "Total number of extracted subgraphs: " + str(len(self.extractionMap))
     print "Number of connected subgraphs: " + str(connected)   
     #***************************************
     
     for element in self.extractionMap: 
         substart = time.time()
         if nx.is_connected(element[0]):
             connected += 1
         extractionCandidates = self.util.findSubgraphInstances(network, element[0])
         print "Subpool size: " + str(len(extractionCandidates))
         subelapsed = time.time() - substart
         print "Subpool elapsed time: " + str(subelapsed)
         self.extractionPool[element[0]] = extractionCandidates
     
     print "Number of connected subgraphs: " + str(connected) 
     totalelapsed = time.time()- totalstart
     print "Total elapsed pool time: " + str(totalelapsed)
     import sys
     print "Total size of extraction pool in Bytes: " + str(sys.getsizeof(self.extractionPool))
コード例 #3
0
ファイル: mySTC.py プロジェクト: oilover/LZW
def test_solution(sets, G, new_G):
    for g in sets.values():
        if nx.is_connected(G.subgraph(g)) and \
        not nx.is_connected(new_G.subgraph(g)) and len(g)>=2:
            print 'Disconnect:',g, G.subgraph(g).edges(), new_G.subgraph(g).edges()
            return False
    return True
コード例 #4
0
ファイル: test_cuts.py プロジェクト: ProgVal/networkx
def test_white_harary_paper():
    # Figure 1b white and harary (2001)
    # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF
    # A graph with high adhesion (edge connectivity) and low cohesion
    # (node connectivity)
    G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
    G.remove_node(7)
    for i in range(4, 7):
        G.add_edge(0, i)
    G = nx.disjoint_union(G, nx.complete_graph(4))
    G.remove_node(G.order() - 1)
    for i in range(7, 10):
        G.add_edge(0, i)
    for flow_func in flow_funcs:
        kwargs = dict(flow_func=flow_func)
        # edge cuts
        edge_cut = nx.minimum_edge_cut(G, **kwargs)
        assert_equal(3, len(edge_cut), msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_edges_from(edge_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
        # node cuts
        node_cut = nx.minimum_node_cut(G, **kwargs)
        assert_equal(set([0]), node_cut, msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_nodes_from(node_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
コード例 #5
0
ファイル: makegraph.py プロジェクト: junkawahara/frontier
def number_of_3partition(G):
    edge_list = G.edges()
    count = 0
    for n in powerset(range(len(G.nodes()))):
        if len(n) == 0:
            continue
        H1 = G.subgraph(n)
        if not nx.is_connected(H1):
            continue
        nbar1 = []
        for i in range(0, len(G.nodes())):
            if i not in n:
                nbar1.append(i)
        for n2 in powerset(nbar1):
            if len(n2) == 0:
                continue
            H2 = G.subgraph(n2)
            if not nx.is_connected(H2):
                continue
            nbar = []
            for i in range(0, len(G.nodes())):
                if i not in n and i not in n2:
                    nbar.append(i)
            if len(nbar) == 0:
                continue
            H3 = G.subgraph(nbar)
            if not nx.is_connected(H3):
                continue
            count += 1
    return count / 6
コード例 #6
0
    def test_vertex_separator(self):
        sep, part1, part2 = nxmetis.vertex_separator(self.G)

        # The two separator nodes must not be present in the
        # two bisected chains
        nose.tools.ok_(sep[0] not in part1)
        nose.tools.ok_(sep[0] not in part2)
        nose.tools.ok_(sep[1] not in part1)
        nose.tools.ok_(sep[1] not in part2)

        # There should be two different separator nodes
        nose.tools.assert_equal(len(sep), 2)
        nose.tools.assert_not_equal(sep[0], sep[1])

        # The lists should be exhaustive with the node list of the Graph
        nose.tools.assert_equal(set(sep) | set(part1) | set(part2),
                                set(self.G))

        # The parts must be disjoint sets
        nose.tools.assert_equal(set(), set(part1) & set(part2))

        # Non-empty set
        nose.tools.assert_not_equal(len(part1), 0)
        nose.tools.assert_not_equal(len(part2), 0)

        # Duplicate-free
        nose.tools.assert_equal(len(part1), len(set(part1)))
        nose.tools.assert_equal(len(part2), len(set(part2)))

        # Connected
        nose.tools.ok_(nx.is_connected(self.G.subgraph(part1)))
        nose.tools.ok_(nx.is_connected(self.G.subgraph(part2)))
def generate_simple_graph(sfunction, N, avg_degree):
    """generate a simple random graph with sfunction degree sequence"""

    graphical_deg_seq = False
    is_connected_graph = False
    while is_connected_graph == False:
        while graphical_deg_seq == False:
            seq = sfunction(N, avg_degree, seqtype="simple_degree")
            graphical_deg_seq = nx.is_valid_degree_sequence(seq)
        G = nx.havel_hakimi_graph(seq)
        G.remove_edges_from(G.selfloop_edges())

        if not nx.is_connected(G):
            try:
                connect_simple_graph(G)
                is_connected_graph = True
                randomize_graph(G)
            except (IndexError):
                is_connected_graph = False

        if not nx.is_connected(G):
            try:
                connect_simple_graph(G)
                is_connected_graph = True

            except (IndexError):
                is_connected_graph = False
                graphical_deg_seq = False

    return G
コード例 #8
0
def test_edge_cutset_random_graphs():
    for i in range(5):
        G = nx.fast_gnp_random_graph(50,0.2)
        if not nx.is_connected(G):
            ccs = iter(nx.connected_components(G))
            start = next(ccs)[0]
            G.add_edges_from( (start,c[0]) for c in ccs )
        cutset = nx.minimum_edge_cut(G)
        assert_equal(nx.edge_connectivity(G), len(cutset))
        G.remove_edges_from(cutset)
        assert_false(nx.is_connected(G))
コード例 #9
0
ファイル: genMina1.py プロジェクト: rmlima/minas
def main():
    LOG = True

    #if (len(sys.argv) != 3):
     #       print "ERROR: genRandomGeorml <nodes> <raio>"
      #      sys.exit(1)

    NMAX = int(sys.argv[1])
    RAIO = float(sys.argv[2])
    #NMAX=40
    #RAIO=0.1
    ALCANCE=250

    G=nx.random_geometric_graph(NMAX,RAIO,2)

    while not nx.is_connected(G):
         RAIO=RAIO+.005
         G=nx.random_geometric_graph(NMAX,RAIO,2)
         if LOG: print "Graph is not full connected"

    pos=nx.get_node_attributes(G,'pos')
    network(G,pos,1)

    #Remove vizinhos que estejam demasiado perto
    while nodeNear(G)<1000 :
        G.remove_node(nodeNear(G))

    if nx.is_connected(G):
        pos=nx.get_node_attributes(G,'pos')
        network(G,pos,2)

        #Remove no que tem mais vizinhos
        T=G
        if not nodeSolo(T,nodeMaxDegree(T)): T.remove_node(nodeMaxDegree(T))
        if nx.is_connected(T):
                G=T

        pos=nx.get_node_attributes(G,'pos')
        network(G,pos,3)



        for n in G.neighbors(nodeMaxDegree(G)):
            if nx.degree(G,n)== 2 :
                degree=nx.degree(G,n)
                node=n
                print "node=",n
                if not nodeSolo(G,n): G.remove_node(n)
                break
        
        pos=nx.get_node_attributes(G,'pos')
        network(G,pos,4)
    else:
        if LOG: print "SubGraph is not full connected"
コード例 #10
0
def check_adj_list_connectivity(adj_graph_file):
    g = read_weighted_adj_graph(adj_graph_file)
    print 'finished reading in adjacency list file...'
    edges = list()
    for k, v in g.items():
        for val in v[0]:
            edges.append((k, val))
    print 'finished appending edges'
    G = networkx.Graph()
    # print len(edges)
    G.add_edges_from(edges)
    print networkx.is_connected(G)
コード例 #11
0
def simple_query(GLearnt, trials):
	i = 1
	
	global Degree_Node
	global NodeList

	G = nx.Graph(GLearnt)
	G = G.subgraph(nx.connected_components(G)[0])
	print nx.is_connected(G)
	print G.number_of_nodes()

	Degree_Node = G.degree()
	NodeList = G.nodes()

	for i in NodeList:
		Degree_Node[i] = [Degree_Node[i], GLearnt.neighbors(i)]

	PlainAdamicFullPaths = []
	TwoWayAdamicFullPaths = []

	djk_time = 0
	TwoWayAdamic_time = 0
	PlainAdamic_time = 0

	count = 0

	for i in range(trials):
		A = random.choice(NodeList)
		B = random.choice(NodeList)
	#for A in NodeList:
		#for B in NodeList[NodeList.index(A):]:
		if A != B :	
			src = A #raw_input("Enter source name:")
			dstn = B #raw_input("Enter destination name:")
			start = time.time()
			TwoWayAdamicFullPath = TwoWayAdamicWalk(G,src,dstn)
			finish = time.time()
			TwoWayAdamic_time+=(finish-start)


			start = time.time()
			PlainAdamicFullPath = OneWayAdamicWalk(G,src,dstn)
			finish = time.time()
			PlainAdamic_time+=(finish-start)
			
			count += 1
	                sys.stdout.write(" "*20 + "\b"*50) 
			sys.stdout.write( "Progress: " + str(float(count)/trials))

	print "\n"
	print "Plain Adamic Time : ", PlainAdamic_time
	print "Two Way Adamic Time : ", TwoWayAdamic_time
	return [PlainAdamic_time, TwoWayAdamic_time]
コード例 #12
0
ファイル: test_cuts.py プロジェクト: nishnik/networkx
def test_edge_cutset_random_graphs():
    for flow_func in flow_funcs:
        for i in range(3):
            G = nx.fast_gnp_random_graph(50, 0.25)
            if not nx.is_connected(G):
                ccs = iter(nx.connected_components(G))
                start = arbitrary_element(next(ccs))
                G.add_edges_from((start, arbitrary_element(c)) for c in ccs)
            cutset = nx.minimum_edge_cut(G, flow_func=flow_func)
            assert_equal(nx.edge_connectivity(G), len(cutset), msg=msg.format(flow_func.__name__))
            G.remove_edges_from(cutset)
            assert_false(nx.is_connected(G), msg=msg.format(flow_func.__name__))
コード例 #13
0
ファイル: genMina3.py プロジェクト: rmlima/minas
def main():
    LOG = True

    if (len(sys.argv) != 5):
            print "ERROR: genMina3.py <nodes> <radius> <delta> <maxdegree>"
            sys.exit(1)

    NMAX = int(sys.argv[1])
    RAIO = float(sys.argv[2])
    delta = float(sys.argv[3])
    degree = float(sys.argv[4])
    #NMAX=40
    #RAIO=0.1
    ALCANCE=250
    c=0
    run=True
    first=True
    while run:
        c+=1
        G=nx.random_geometric_graph(NMAX,RAIO,2)

        while not nx.is_connected(G):
            if first:
                RAIO=RAIO+.005
            G=nx.random_geometric_graph(NMAX,RAIO,2)
            if LOG: print c,"- Radius: Graph is not full connected R=",RAIO
        first=False

        #Remove vizinhos que estejam demasiado pertoc
        candidate=nodeNear(G,delta)
        
        while not candidate==10000 :
                G.remove_node(candidate)
                candidate=nodeNear(G,delta)
        if nx.is_connected(G):
            #Remove no que tem mais vizinhos
            candidate=nodeMaxDegree(G)
            while nx.degree(G,candidate)> degree :
                    G.remove_node(candidate)
                    candidate=nodeMaxDegree(G)    
            if nx.is_connected(G):
                run=False
            else:
                if LOG: print c,"- MaxDegree: Split Graph"
        else:
            if LOG: print c,"- nodeNear: Split Graph"

    pos=nx.get_node_attributes(G,'pos')
    network(G,pos,5)
    if LOG: print "Raio =",RAIO
    if LOG: print "NMAX =",NMAX
    if LOG: print "Nodes =",nx.number_of_nodes(G)
コード例 #14
0
ファイル: test_cuts.py プロジェクト: Friedsoap/networkx
def test_octahedral_cutset():
    G=nx.octahedral_graph()
    # edge cuts
    edge_cut = nx.minimum_edge_cut(G)
    assert_equal(4, len(edge_cut))
    H = G.copy()
    H.remove_edges_from(edge_cut)
    assert_false(nx.is_connected(H))
    # node cuts
    node_cut = nx.minimum_node_cut(G)
    assert_equal(4,len(node_cut))
    H = G.copy()
    H.remove_nodes_from(node_cut)
    assert_false(nx.is_connected(H))
コード例 #15
0
ファイル: nets.py プロジェクト: mayera/netx
    def undirected_stats(self):
        if nx.is_connected(self.nx_graph):
            conl = nx.connected_components(self.nx_graph) #needs work-around for unconnected subgraphs
            conl = conl.pop()
        else:
            conl = 'NA - graph is not connected'

        result = { #"""returns boolean"""
            'con': nx.is_connected(self.nx_graph),
            'conn': nx.number_connected_components(self.nx_graph), 
            'conl': conl,
            'Conl': g.subgraph(conl)
            }
        return result
コード例 #16
0
ファイル: nullmodel.py プロジェクト: Wuyanan520/nullmodel
def random_1kc(G0, nswap=1, max_tries=100):     #保持连通性下随机断边重连的1阶零模型
    """
    在random_1k()的基础上增加连通性判断,若置乱后的网络不保持连通性则撤销该置乱操作
    注:G0为连通网络
    """
    if not nx.is_connected(G0):
       raise nx.NetworkXError("Graph not connected")
    if len(G0) < 4:
        raise nx.NetworkXError("Graph has less than four nodes.")
    G = copy.deepcopy(G0)
    n=0
    swapcount=0
    keys,degrees=zip(*G.degree().items()) 
    cdf=nx.utils.cumulative_distribution(degrees)  
    while swapcount < nswap:
        swapped=[]
        (ui,xi)=nx.utils.discrete_sequence(2,cdistribution=cdf)
        if ui==xi:
            continue 
        u=keys[ui] 
        x=keys[xi]
        v=random.choice(list(G[u]))
        y=random.choice(list(G[x]))
        if v==y:
            continue
        if len(set([u,v,x,y]))<4:
            continue
        if (y not in G.neighbors(u)) and (v not in G.neighbors(x)) and ((u,v) in G.edges()) and ((x,y) in G.edges()):
            G.add_edge(u,y)         
            G.add_edge(v,x)
            G.remove_edge(u,v)      
            G.remove_edge(x,y)
            swapped.append((u,v,x,y))
            swapcount+=1
        if not nx.is_connected(G):      
            while swapped:
                (u, v, x, y) = swapped.pop()
                G.add_edge(u,v)         #撤销断边操作
                G.add_edge(x,y)
                G.remove_edge(u,x)      #撤销连新边操作
                G.remove_edge(v,y)
                swapcount -= 1
        if n >= max_tries:
            e=('Maximum number of swap attempts (%s) exceeded '%n +
            'before desired swaps achieved (%s).'%nswap)
            print e
            break
        n+=1
    return G
コード例 #17
0
ファイル: mlst.py プロジェクト: calebwang/mlst
def basic_local_search(graph, solution):

    original = solution.copy()
    before = count_leaves(solution)
    best_solution = solution.copy()
    best_leaves = count_leaves(best_solution)
    for i in range(10):
        best = count_leaves(solution)
        candidates = set(get_vertices_with_degree(solution, 2))
        leaves = set(get_vertices_with_degree(solution, 1))
        leaf_neighbors = []
        for leaf in leaves:
            leaf_neighbors.extend(nx.neighbors(solution, leaf))
        leaf_neighbors = set(leaf_neighbors)
        vs = candidates.intersection(leaf_neighbors)
        for v in vs:
            leafs = [l for l in nx.neighbors(solution, v) if l in leaves]
            if leafs:
                leaf = leafs[0]
            else:
                break
            solution.remove_edge(v, leaf)
            neighbors = nx.neighbors(graph, leaf)
            for neighbor in neighbors:
                solution.add_edge(leaf, neighbor)
                new = count_leaves(solution)
                if new > best:
                    best = new
                else:
                    solution.remove_edge(leaf, neighbor)
            if not nx.is_connected(solution):
                solution.add_edge(v, leaf)
        if count_leaves(solution) < best_leaves:
            solution = best_solution.copy()
        elif count_leaves(solution) > best_leaves:
            best_solution = solution.copy()
            best_leaves = count_leaves(best_solution)

    after = count_leaves(solution)
    if before > after:
        solution = original.copy()
    if before != after:
        print 'before/after: ', before, after
    if before > after:
        raise Exception('you dun goofed')
    if not nx.is_connected(solution):
        raise Exception('you dun goofed')
    return solution
コード例 #18
0
ファイル: DU_GraphTools99.py プロジェクト: Duke-NSOE/GeoHAT
def sensi_diameter(G):
    import networkx as nx
    
    """
    Compute graph sensitivity to node removal, in terms of
    the difference in graph diameter on the removal of each
    node in turn.
     
    This uses local function x_diameter(G), which is modified
    from networkx.diamter(G) to work on XGraphs.
    
    DL Urban (9 Feb 2007)
    """
    
    # Starting diameter for full graph:
    
    if nx.is_connected(G):
        d0 = x_diameter(G)
    else:
        G0 = nx.connected_component_subgraphs(G) [0] # the largest subgraph
        d0 = x_diameter(G0)
        nc = nx.number_connected_components(G)	     # how many are there?
    
    sensi = {}
    
    for node in G.nodes():
        ex = G.edges(node) 		# a set of edges adjacent to node; 
        G.delete_edges_from(ex)		# remove all of these,
        G.delete_node(node)		# and then kill the node, too
        if nx.is_connected(G):
            dx = x_diameter(G)
            cuts = 0
        else:
            Gx = nx.connected_component_subgraphs(G) [0]	# the biggest
            ncx = nx.number_connected_components(G)
            if nc == ncx:
                cuts = 0
            else:
                cuts = 1
            dx = x_diameter(Gx)
        delta = d0 - dx
        G.add_node(node)		# put the node and edges back again
        G.add_edges_from(ex)
        sensi[node] = (cuts, delta)
 

    # create and return a tuple (cuts, delta)
    return sensi
コード例 #19
0
ファイル: random_graphs.py プロジェクト: 4c656554/networkx
def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None):
    """Returns a connected Watts–Strogatz small-world graph.

    Attempts to generate a connected graph by repeated generation of
    Watts–Strogatz small-world graphs.  An exception is raised if the maximum
    number of tries is exceeded.

    Parameters
    ----------
    n : int
        The number of nodes
    k : int
        Each node is joined with its ``k`` nearest neighbors in a ring
        topology.
    p : float
        The probability of rewiring each edge
    tries : int
        Number of attempts to generate a connected graph.
    seed : int, optional
         The seed for random number generator.

    See Also
    --------
    newman_watts_strogatz_graph()
    watts_strogatz_graph()

    """
    for i in range(tries):
        G = watts_strogatz_graph(n, k, p, seed)
        if nx.is_connected(G):
            return G
    raise nx.NetworkXError('Maximum number of tries exceeded')
コード例 #20
0
ファイル: makegraph.py プロジェクト: junkawahara/frontier
def number_of_partition(G):
    count = 0
    for n in powerset(range(len(G.nodes()))):
        if len(n) >= 1 and len(n) < G.number_of_nodes():
            H1 = G.subgraph(n)
            if not nx.is_connected(H1):
                continue
            nbar = []
            for i in range(0, len(G.nodes())):
                if i not in n:
                    nbar.append(i)
            H2 = G.subgraph(nbar)
            if not nx.is_connected(H2):
                continue
            count += 1
    return count / 2
コード例 #21
0
ファイル: plan.py プロジェクト: willunicamp/hvnscripts
def start(G, name):
	#pega somente o maior subgrafo
	if(not(nx.is_connected(G))):
		G = nx.connected_component_subgraphs(G)[0]

	# tuple of all parallel python servers to connect with
	ppservers = ()
	#ppservers = ("a3.ft.unicamp.br","a9.ft.unicamp.br","a7.ft.unicamp.br","a8.ft.unicamp.br","a10.ft.unicamp.br")
        job_server = pp.Server(ppservers=ppservers)
        job_server.set_ncpus(1)

	job = []
	capacities = []
	damage = []
	ran = 30 #range
	print "server e variaveis carregados"

	for i in xrange(1,ran):
		#Aqui faz-se um range de para 50 valores diferentes de capacidade inicial na rede
		capacity = 1.0+(1.0/float(ran)*float(i))
		job.append(job_server.submit(Attack, (cp.copy(G),capacity),(GlobalEfficiency,setCapacity), ("networkx as nx",)))
		capacities.append(capacity)

	job_server.wait()

	for i in xrange(len(job)):
		damage.append(job[i]())

	#Salva o arquivo da estrategia testada
        res = (capacities, damage)
        pickle.dump(res, open("dados/planejada/"+name+".pickle","w"))
        job_server.print_stats()
コード例 #22
0
ファイル: random_graphs.py プロジェクト: 666888/networkx
def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None):
    """Return a connected Watts-Strogatz small-world graph.

    Attempt to generate a connected realization by repeated
    generation of Watts-Strogatz small-world graphs.
    An exception is raised if the maximum number of tries is exceeded.

    Parameters
    ----------
    n : int
        The number of nodes
    k : int
        Each node is connected to k nearest neighbors in ring topology
    p : float
        The probability of rewiring each edge
    tries : int
        Number of attempts to generate a connected graph.
    seed : int, optional
         The seed for random number generator.

    See Also
    --------
    newman_watts_strogatz_graph()
    watts_strogatz_graph()

    """
    G = watts_strogatz_graph(n, k, p, seed)
    t=1
    while not nx.is_connected(G):
        G = watts_strogatz_graph(n, k, p, seed)
        t=t+1
        if t>tries:
            raise nx.NetworkXError("Maximum number of tries exceeded")
    return G
コード例 #23
0
    def __init__(self,graph,fk,dht_fingers,ident_bits):

        # Known finger nodes parameter:
        self.fk = fk

        # The network graph we are going to use:
        self.graph = graph

        # Assert that the graph is connected:
        assert nx.is_connected(self.graph)

        # Amount of nodes:
        self.num_nodes = self.graph.number_of_nodes()

        # Amount of bits in identity:
        self.ident_bits = ident_bits

        # Maximum size of identity:
        self.max_ident = 2**self.ident_bits

        # Evade the birthday paradox:
        assert (self.num_nodes ** 2.5) <= self.max_ident

        # Load fingers to be used in the Chord DHT:
        self.dht_succ_fingers,self.dht_pred_fingers = dht_fingers

        # Generate nodes and neighbours links:
        self.gen_nodes()
        self.install_neighbours()
コード例 #24
0
ファイル: test_graphs.py プロジェクト: ExpHP/pymatgen
    def test_build_unique_fragments(self):
        edges = {(e[0], e[1]): None for e in self.pc_edges}
        mol_graph = MoleculeGraph.with_edges(self.pc, edges)
        unique_fragments = mol_graph.build_unique_fragments()
        self.assertEqual(len(unique_fragments), 295)
        nm = iso.categorical_node_match("specie", "ERROR")
        for ii in range(295):
            # Test that each fragment is unique
            for jj in range(ii + 1, 295):
                self.assertFalse(
                    nx.is_isomorphic(unique_fragments[ii].graph,
                                     unique_fragments[jj].graph,
                                     node_match=nm))

            # Test that each fragment correctly maps between Molecule and graph
            self.assertEqual(len(unique_fragments[ii].molecule),
                             len(unique_fragments[ii].graph.nodes))
            species = nx.get_node_attributes(unique_fragments[ii].graph, "specie")
            coords = nx.get_node_attributes(unique_fragments[ii].graph, "coords")

            mol = unique_fragments[ii].molecule
            for ss, site in enumerate(mol):
                self.assertEqual(str(species[ss]), str(site.specie))
                self.assertEqual(coords[ss][0], site.coords[0])
                self.assertEqual(coords[ss][1], site.coords[1])
                self.assertEqual(coords[ss][2], site.coords[2])

            # Test that each fragment is connected
            self.assertTrue(nx.is_connected(unique_fragments[ii].graph.to_undirected()))
コード例 #25
0
 def networkxTestTopology(self, graph_links, graph_nodes):
     # create a networkx graph object and store the axial links
     try:
         g = nx.Graph()
         g.add_nodes_from(graph_nodes)
         g.add_edges_from(graph_links)
     except:
         return False
     # networkx just accepts all sorts of node ids... no need to fix
     if not nx.is_connected(g):
         start_time = time.time()
         components = sorted(nx.connected_components(g), key=len, reverse=True)
         if len(components) > 1:
             islands = []
             # get vertex ids
             for cluster in components[1:len(components)]:  #excludes the first giant component
                 # identify orphans
                 if len(cluster) == 1:
                     node = cluster.pop()
                     self.axial_errors['orphan'].append(node)
                     self.problem_nodes.append(node)
                 # identify islands
                 elif len(cluster) > 1:
                     nodes = list(cluster)
                     islands.append(nodes)
                     self.problem_nodes.extend(nodes)
             # add results to the list of problems
             if islands:
                 self.axial_errors['island'] = islands
         if is_debug: print "analyse orphans/islands: %s" % str(time.time() - start_time)
     return True
コード例 #26
0
ファイル: test_cuts.py プロジェクト: ProgVal/networkx
def test_icosahedral_cutset():
    G = nx.icosahedral_graph()
    for flow_func in flow_funcs:
        kwargs = dict(flow_func=flow_func)
        # edge cuts
        edge_cut = nx.minimum_edge_cut(G, **kwargs)
        assert_equal(5, len(edge_cut), msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_edges_from(edge_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
        # node cuts
        node_cut = nx.minimum_node_cut(G, **kwargs)
        assert_equal(5, len(node_cut), msg=msg.format(flow_func.__name__))
        H = G.copy()
        H.remove_nodes_from(node_cut)
        assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
コード例 #27
0
ファイル: stats.py プロジェクト: jim-pansn/sypy
    def normalized_conductance(self, subgraph, edge_cover=False):
        """
        Returns the normalized conductance of the graph over the given
        subgraph as described in You Are Who You Know: Inferring User Profiles
        in Online Social Networks, Mislove et al., WSDM, 2010.
        If specified, the implementation also returns the edge cover of the
        subgraph (i.e., the edges in the graph incident to the subgraph).
        """
        if not isinstance(subgraph, sypy.BaseGraph):
            raise Exception("Invalid graph")

        if not nx.is_connected(subgraph.structure):
            raise Exception("Subgraph is disconnected")

        in_edges = subgraph.edges()
        other_edges = list(set(self.graph.edges()) - set(in_edges))

        shared_edges = []
        for (left_node, right_node) in other_edges:
            if left_node in subgraph.structure or right_node in subgraph.structure:
                shared_edges.append((left_node, right_node))

        out_edges = list(set(other_edges) - set(shared_edges))
        inshared_edges = list(set(in_edges) | set(shared_edges))

        subgraph_conductance = self.__compute_subgraph_conductance(len(in_edges), len(shared_edges))
        randgraph_conductance = self.__compute_randgraph_conductance(len(inshared_edges), len(other_edges))
        norm_conductance = subgraph_conductance - randgraph_conductance

        if not edge_cover:
            return norm_conductance

        return (norm_conductance, shared_edges)
コード例 #28
0
ファイル: mlst.py プロジェクト: calebwang/mlst
def run(g):
    if not nx.is_connected(g):
        return None
    best = 0
    best_fn = None
    best_sol = None

    fns = [approximate_solution, med_approximate_solution, fast_approximate_solution, 
           fast_approximate_solution_two, nx.minimum_spanning_tree]

    print '--------------------------'
    for f in fns:
        sol = f(g)
        score = count_leaves(sol)

        new_sol = basic_local_search(g, sol.copy())
        if count_leaves(new_sol) > score:
            score = count_leaves(new_sol)
            sol = new_sol

        print f.func_name, score
        if score > best:
            best = score
            best_fn = f.func_name
            best_sol = sol

    print 'best: ', best_fn, best

    return best_sol
コード例 #29
0
ファイル: euler.py プロジェクト: 4c656554/networkx
def is_eulerian(G):
    """Returns ``True`` if and only if ``G`` is Eulerian.

    An graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian
    circuit* is a closed walk that includes each edge of a graph exactly
    once.

    Parameters
    ----------
    G : NetworkX graph
       A graph, either directed or undirected.

    Examples
    --------
    >>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]}))
    True
    >>> nx.is_eulerian(nx.complete_graph(5))
    True
    >>> nx.is_eulerian(nx.petersen_graph())
    False

    Notes
    -----
    If the graph is not connected (or not strongly connected, for
    directed graphs), this function returns ``False``.

    """
    if G.is_directed():
        # Every node must have equal in degree and out degree and the
        # graph must be strongly connected
        return (all(G.in_degree(n) == G.out_degree(n) for n in G)
                and nx.is_strongly_connected(G))
    # An undirected Eulerian graph has no vertices of odd degree and
    # must be connected.
    return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G)
コード例 #30
0
 def add_edge(self, u, v):
     print('[euler_tour_forest] add_edge(%r, %r)' % (u, v))
     if self.has_edge(u, v):
         return
     ru = self.find_root(u)
     rv = self.find_root(v)
     if ru is None:
         self.add_node(u)
         ru = u
     if rv is None:
         self.add_node(v)
         rv = v
     assert ru is not rv, (
         'u=%r, v=%r not disjoint, can only join disjoint edges' % (u, v))
     assert ru in self.trees, 'ru must be a root node'
     assert rv in self.trees, 'rv must be a root node'
     subtree1 = self.trees[ru]
     subtree2 = self.trees[rv]
     del self.trees[rv]
     new_tree = nx.compose(subtree1, subtree2)
     new_tree.add_edge(u, v)
     self.trees[ru] = new_tree
     print(list(new_tree.nodes()))
     assert nx.is_connected(new_tree)
     assert nx.is_tree(new_tree)
コード例 #31
0
                  names=['id'])  #(dir_path+'corpus idx',index_col=0)
idx.columns = ['id']
idx['id'] = idx['id'].str.replace('pub.', '').astype(str).astype(int)
idx = idx['id'].astype(str).values.tolist()

data = data[(data['referring_id'].isin(idx)) |
            (data['cited_id'].isin(idx))]  # mask
sample = data.sample(5000)
# =============================================================================
# Prepare graph
# =============================================================================
graph = nx.Graph()
for i, row in tqdm(data.iterrows(), total=data.shape[0]):
    graph.add_edge(row['referring_id'], row['cited_id'])

print('Graph fully connected:', nx.is_connected(graph))
print('Connected components:', nx.number_connected_components(graph))

# connected_components = list(nx.connected_components(graph))

del data
gc.collect()
# =============================================================================
# Train
# =============================================================================
node2vec = Node2Vec(graph,
                    dimensions=100,
                    walk_length=70,
                    num_walks=20,
                    workers=1,
                    p=1,
コード例 #32
0
ファイル: test_embedding.py プロジェクト: seibert/metagraph
def test_line(default_plugin_resolver):

    dpr = default_plugin_resolver

    # Graph Generation Parameters

    layer_sizes = [100, 20, 10, 20, 100, 20, 10, 20, 100]

    # Generate Graph

    nx_graph = nx.Graph()

    all_nodes = list(range(sum(layer_sizes)))

    for node in all_nodes:
        nx_graph.add_node(node)

    layer_index_to_end_indices = np.cumsum(layer_sizes).tolist()
    layer_index_to_start_indices = [0] + layer_index_to_end_indices[:-1]
    layer_index_to_nodes = [
        all_nodes[start:end] for start, end in zip(
            layer_index_to_start_indices, layer_index_to_end_indices)
    ]
    assert layer_sizes == list(map(len, layer_index_to_nodes))

    for layer_index, layer_nodes in enumerate(layer_index_to_nodes[:-1]):
        next_layer_nodes = layer_index_to_nodes[layer_index + 1]
        for layer_node in layer_nodes:
            for next_layer_node in next_layer_nodes:
                nx_graph.add_edge(layer_node, next_layer_node)

    assert nx.is_connected(nx_graph)

    def cmp_func(matrix_node_map_pair):
        matrix, node_map = matrix_node_map_pair

        a_nodes = layer_index_to_nodes[0]
        b_nodes = layer_index_to_nodes[4]
        c_nodes = layer_index_to_nodes[8]

        a_indices = node_map[a_nodes]
        b_indices = node_map[b_nodes]
        c_indices = node_map[c_nodes]

        gmm = GaussianMixture(3)
        predicted_labels = gmm.fit_predict(matrix)
        a_labels = predicted_labels[a_indices]
        b_labels = predicted_labels[b_indices]
        c_labels = predicted_labels[c_indices]

        a_label = int(np.median(a_labels))
        b_label = int(np.median(b_labels))
        c_label = int(np.median(c_labels))

        assert np.sum(a_labels == a_label) / len(a_labels) > 0.95
        assert np.sum(b_labels == b_label) / len(b_labels) > 0.95
        assert np.sum(c_labels == c_label) / len(c_labels) > 0.95

        a_variances = np.sum(gmm.covariances_[a_label] *
                             np.eye(embedding_size),
                             axis=0)
        b_variances = np.sum(gmm.covariances_[b_label] *
                             np.eye(embedding_size),
                             axis=0)
        c_variances = np.sum(gmm.covariances_[c_label] *
                             np.eye(embedding_size),
                             axis=0)

        assert a_variances.max() < 0.15
        assert b_variances.max() < 0.15
        assert c_variances.max() < 0.15

    graph = dpr.wrappers.Graph.NetworkXGraph(nx_graph)
    walks_per_node = 8
    negative_sample_count = 5
    embedding_size = 10
    epochs = 10
    learning_rate = 0.25
    batch_size = 1

    MultiVerify(dpr).compute(
        "embedding.train.line",
        graph,
        walks_per_node,
        negative_sample_count,
        embedding_size,
        epochs,
        learning_rate,
        batch_size,
    ).normalize((dpr.types.Matrix.NumpyMatrixType,
                 dpr.types.NodeMap.NumpyNodeMapType)).custom_compare(cmp_func)
コード例 #33
0
def average_shortest_path_length(G, weight=None, method=None):
    r"""Returns the average shortest path length.

    The average shortest path length is

    .. math::

       a =\sum_{s,t \in V} \frac{d(s, t)}{n(n-1)}

    where `V` is the set of nodes in `G`,
    `d(s, t)` is the shortest path from `s` to `t`,
    and `n` is the number of nodes in `G`.

    Parameters
    ----------
    G : NetworkX graph

    weight : None or string, optional (default = None)
       If None, every edge has weight/distance/cost 1.
       If a string, use this edge attribute as the edge weight.
       Any edge attribute not present defaults to 1.

    method : string, optional (default = 'unweighted' or 'djikstra')
        The algorithm to use to compute the path lengths.
        Supported options are 'unweighted', 'dijkstra', 'bellman-ford',
        'floyd-warshall' and 'floyd-warshall-numpy'.
        Other method values produce a ValueError.
        The default method is 'unweighted' if `weight` is None,
        otherwise the default method is 'dijkstra'.

    Raises
    ------
    NetworkXPointlessConcept
        If `G` is the null graph (that is, the graph on zero nodes).

    NetworkXError
        If `G` is not connected (or not weakly connected, in the case
        of a directed graph).

    ValueError
        If `method` is not among the supported options.

    Examples
    --------
    >>> G = nx.path_graph(5)
    >>> nx.average_shortest_path_length(G)
    2.0

    For disconnected graphs, you can compute the average shortest path
    length for each component

    >>> G = nx.Graph([(1, 2), (3, 4)])
    >>> for C in (G.subgraph(c).copy() for c in connected_components(G)):
    ...     print(nx.average_shortest_path_length(C))
    1.0
    1.0

    """
    single_source_methods = ['unweighted', 'dijkstra', 'bellman-ford']
    all_pairs_methods = ['floyd-warshall', 'floyd-warshall-numpy']
    supported_methods = single_source_methods + all_pairs_methods

    if method is None:
        method = 'unweighted' if weight is None else 'dijkstra'
    if method not in supported_methods:
        raise ValueError('method not supported: {}'.format(method))

    n = len(G)
    # For the special case of the null graph, raise an exception, since
    # there are no paths in the null graph.
    if n == 0:
        msg = ('the null graph has no paths, thus there is no average'
               'shortest path length')
        raise nx.NetworkXPointlessConcept(msg)
    # For the special case of the trivial graph, return zero immediately.
    if n == 1:
        return 0
    # Shortest path length is undefined if the graph is disconnected.
    if G.is_directed() and not nx.is_weakly_connected(G):
        raise nx.NetworkXError("Graph is not weakly connected.")
    if not G.is_directed() and not nx.is_connected(G):
        raise nx.NetworkXError("Graph is not connected.")

    # Compute all-pairs shortest paths.
    def path_length(v):
        if method == 'unweighted':
            return nx.single_source_shortest_path_length(G, v)
        elif method == 'dijkstra':
            return nx.single_source_dijkstra_path_length(G, v, weight=weight)
        elif method == 'bellman-ford':
            return nx.single_source_bellman_ford_path_length(G,
                                                             v,
                                                             weight=weight)

    if method in single_source_methods:
        # Sum the distances for each (ordered) pair of source and target node.
        s = sum(l for u in G for l in path_length(u).values())
    else:
        if method == 'floyd-warshall':
            all_pairs = nx.floyd_warshall(G, weight=weight)
            s = sum([sum(t.values()) for t in all_pairs.values()])
        elif method == 'floyd-warshall-numpy':
            s = nx.floyd_warshall_numpy(G, weight=weight).sum()
    return s / (n * (n - 1))
コード例 #34
0
ファイル: utils.py プロジェクト: MichaelArbel/OT-sync
def generate_graph(N, completeness):
    done = False
    while not done:
        I, G, C = _generate_graph(N, completeness)
        done = nx.is_connected(C)
    return I, G
コード例 #35
0
 def scale_free(self, nnodes):
     G = nx.scale_free_graph(nnodes).to_undirected()
     while not nx.is_connected(G):
         G = nx.scale_free_graph(nnodes).to_undirected()
     return G
コード例 #36
0
def RandomNetworkAnalysis(someNetwork, name):
    # function message
    print("\t- Running Random Network Analysis ...")
    # variables
    samples = 100
    orderRef = 0
    sizeRef = 0
    randomGNM = nx.Graph()
    # properties to analize
    diameterMean = 0
    radiusMean = 0
    densityMean = 0
    meanDegreeMean = 0
    maxInDegreeMean = 0
    maxOutDegreeMean = 0
    numMaxInDegreeHubsMean = 0
    numMaxOutDegreeHubsMean = 0
    clustCoeffMean = 0
    modularityMean = 0
    coverageMean = 0
    performanceMean = 0
    numConnCompsMean = 0
    numStroConnCompsMean = 0
    numCyclesMean = 0
    numCommunitiesMean = 0
    auxA = 0
    auxB = 0
    auxc = 0
    auxD = 0
    # obtain order and size
    orderRef = someNetwork.order()
    sizeRef = someNetwork.size()
    # iterate generating random networks and obtaining sum of properties
    for i in range(samples):
        randomGNM = nx.gnm_random_graph(orderRef, sizeRef, directed=True)
        if (nx.is_connected(randomGNM.to_undirected())):
            diameterMean = diameterMean + float(
                nx.diameter(randomGNM.to_undirected()))
            radiusMean = radiusMean + float(
                nx.radius(randomGNM.to_undirected()))
        densityMean = densityMean + float(nx.density(randomGNM))
        meanDegreeMean = meanDegreeMean + float(sizeRef / orderRef)
        clustCoeffMean = clustCoeffMean + (float(
            nx.average_clustering(randomGNM)))
        (auxA, auxB, auxC, auxD) = maxDegreeRandomModule(randomGNM)
        maxInDegreeMean = maxInDegreeMean + float(auxA)
        maxOutDegreeMean = maxOutDegreeMean + float(auxB)
        numMaxInDegreeHubsMean = numMaxInDegreeHubsMean + float(auxC)
        numMaxOutDegreeHubsMean = numMaxOutDegreeHubsMean + float(auxD)
        (auxA, auxB, auxC,
         auxD) = communitiesRandomModule(randomGNM.to_undirected())
        modularityMean = modularityMean + float(auxA)
        coverageMean = coverageMean + float(auxC)
        performanceMean = performanceMean + float(auxD)
        numCommunitiesMean = numCommunitiesMean + float(auxB)
        numConnCompsMean = numConnCompsMean + float(
            len(list(nx.connected_components(randomGNM.to_undirected()))))
        numStroConnCompsMean = numStroConnCompsMean + float(
            len(list(nx.strongly_connected_components(randomGNM))))
        numCyclesMean = numCyclesMean + float(
            len(list(nx.cycle_basis(randomGNM.to_undirected()))))
    # obtain mean value for everything
    diameterMean = diameterMean / samples
    radiusMean = radiusMean / samples
    densityMean = densityMean / samples
    meanDegreeMean = meanDegreeMean / samples
    maxInDegreeMean = maxInDegreeMean / samples
    maxOutDegreeMean = maxOutDegreeMean / samples
    numMaxInDegreeHubsMean = numMaxInDegreeHubsMean / samples
    numMaxOutDegreeHubsMean = numMaxOutDegreeHubsMean / samples
    clustCoeffMean = clustCoeffMean / samples
    modularityMean = modularityMean / samples
    coverageMean = coverageMean / samples
    performanceMean = performanceMean / samples
    numConnCompsMean = numConnCompsMean / samples
    numStroConnCompsMean = numStroConnCompsMean / samples
    numCyclesMean = numCyclesMean / samples
    numCommunitiesMean = numCommunitiesMean / samples
    # print results
    randomAnalysis = open(name + "_random_results.txt", "w")
    randomAnalysis.write(
        "Random Analysis Results, Mean Measures:\t\t\truns(" + str(samples) +
        ")\n" + " - [/] order:\t" + str(orderRef) + "\n" + " - [/] size:\t" +
        str(sizeRef) + "\n" + " - [u] diameter:\t" + str(diameterMean) + "\n" +
        " - [u] radius:\t" + str(radiusMean) + "\n" + " - [d] density:\t" +
        str(densityMean) + "\n" + " - [d] mean degree:\t" +
        str(meanDegreeMean) + "\n" + " - [d] clustering coefficient:\t" +
        str(clustCoeffMean) + "\n" + " - [/] maximum in degree:\t" +
        str(maxInDegreeMean) + "\n" + " - [/] maximum out degree:\t" +
        str(maxOutDegreeMean) + "\n" + " - [d] hubs with max in degree:\t" +
        str(numMaxInDegreeHubsMean) + "\n" +
        " - [d] hubs with max out degree:\t" + str(numMaxOutDegreeHubsMean) +
        "\n" + " - [u] modularity:\t" + str(modularityMean) + "\n" +
        " - [u] coverage:\t" + str(coverageMean) + "\n" +
        " - [u] performance:\t" + str(performanceMean) + "\n" +
        " - [u] number of communities:\t" + str(numCommunitiesMean) + "\n" +
        " - [/] number of connected components:\t" + str(numConnCompsMean) +
        "\n" + " - [d] number of strongly connected components:\t" +
        str(numStroConnCompsMean) + "\n" +
        " - [u] number of cycles in cycle basis:\t" + str(numCyclesMean) +
        "\n" + "\n\n\n" + "[u] undirected associated graph\n" +
        "[d] directed graph\n" + "[/] both undirected and directed\n\n")
    randomAnalysis.close()
    # end of function
    return ()
コード例 #37
0
import numpy as np

#QUESTION 3i:

glist = []  #list of percents of connected graphs in each q
qlist = []  #list of q's
num = 0  #num of iterations in each q
connect = 0  #num of connected graphs in each q
for k in range(5, 105, 5):
    q = k / 100
    qlist.insert(num, q)
    for i in range(0, 10):
        G = nx.binomial_graph(20, q)
        #nx.draw(G, with_labels=True, font_weight='bold')
        #plt.show()
        if (nx.is_connected(G) == True):
            connect += 1
    connect = (connect / 10) * 100  #in percents
    glist.insert(num, connect)
    connect = 0
    num += 1
plt.plot(qlist, glist)
plt.show()
print(glist)
print(qlist)

#QUESTION 4i:

tlist = []  #list of percents of triangles in each n iteration
nlist = []  #list of n's
num = 0  #num of iterations in each n
コード例 #38
0
ファイル: kcutsets.py プロジェクト: ashray-00/Computer-Vision
def all_node_cuts(G, k=None, flow_func=None):
    r"""Returns all minimum k cutsets of an undirected graph G. 

    This implementation is based on Kanevsky's algorithm [1]_ for finding all
    minimum-size node cut-sets of an undirected graph G; ie the set (or sets) 
    of nodes of cardinality equal to the node connectivity of G. Thus if 
    removed, would break G into two or more connected components.
   
    Parameters
    ----------
    G : NetworkX graph
        Undirected graph

    k : Integer
        Node connectivity of the input graph. If k is None, then it is 
        computed. Default value: None.

    flow_func : function
        Function to perform the underlying flow computations. Default value
        edmonds_karp. This function performs better in sparse graphs with
        right tailed degree distributions. shortest_augmenting_path will
        perform better in denser graphs.
        

    Returns
    -------
    cuts : a generator of node cutsets
        Each node cutset has cardinality equal to the node connectivity of
        the input graph.

    Examples
    --------
    >>> # A two-dimensional grid graph has 4 cutsets of cardinality 2
    >>> G = nx.grid_2d_graph(5, 5)
    >>> cutsets = list(nx.all_node_cuts(G))
    >>> len(cutsets)
    4
    >>> all(2 == len(cutset) for cutset in cutsets)
    True
    >>> nx.node_connectivity(G)
    2

    Notes
    -----
    This implementation is based on the sequential algorithm for finding all
    minimum-size separating vertex sets in a graph [1]_. The main idea is to
    compute minimum cuts using local maximum flow computations among a set 
    of nodes of highest degree and all other non-adjacent nodes in the Graph.
    Once we find a minimum cut, we add an edge between the high degree
    node and the target node of the local maximum flow computation to make 
    sure that we will not find that minimum cut again.

    See also
    --------
    node_connectivity
    edmonds_karp
    shortest_augmenting_path

    References
    ----------
    .. [1]  Kanevsky, A. (1993). Finding all minimum-size separating vertex 
            sets in a graph. Networks 23(6), 533--541.
            http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract

    """
    if not nx.is_connected(G):
        raise nx.NetworkXError('Input graph is disconnected.')

    # Addess some corner cases first.
    # For cycle graphs
    if G.order() == G.size():
        if all(2 == d for n, d in G.degree()):
            seen = set()
            for u in G:
                for v in nx.non_neighbors(G, u):
                    if (u, v) not in seen and (v, u) not in seen:
                        yield {v, u}
                        seen.add((v, u))
            return
    # For complete Graphs
    if nx.density(G) == 1:
        for cut_set in combinations(G, len(G)-1):
            yield set(cut_set)
        return
    # Initialize data structures.
    # Keep track of the cuts already computed so we do not repeat them.
    seen = []
    # Even-Tarjan reduction is what we call auxiliary digraph 
    # for node connectivity.
    H = build_auxiliary_node_connectivity(G)
    mapping = H.graph['mapping']
    R = build_residual_network(H, 'capacity')
    kwargs = dict(capacity='capacity', residual=R)
    # Define default flow function
    if flow_func is None:
        flow_func = default_flow_func
    if flow_func is shortest_augmenting_path:
        kwargs['two_phase'] = True
    # Begin the actual algorithm
    # step 1: Find node connectivity k of G
    if k is None:
        k = nx.node_connectivity(G, flow_func=flow_func)
    # step 2: 
    # Find k nodes with top degree, call it X:
    X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]}
    # Check if X is a k-node-cutset
    if _is_separating_set(G, X):
        seen.append(X)
        yield X

    for x in X:
        # step 3: Compute local connectivity flow of x with all other
        # non adjacent nodes in G
        non_adjacent = set(G) - X - set(G[x])
        for v in non_adjacent:
            # step 4: compute maximum flow in an Even-Tarjan reduction H of G
            # and step:5 build the associated residual network R
            R = flow_func(H, '%sB' % mapping[x], '%sA' % mapping[v], **kwargs)
            flow_value = R.graph['flow_value']

            if flow_value == k:
                ## Remove saturated edges form the residual network
                saturated_edges = [(u, w, d) for (u, w, d) in
                                    R.edges(data=True)
                                    if d['capacity'] == d['flow']]
                R.remove_edges_from(saturated_edges)
                # step 6: shrink the strongly connected components of 
                # residual flow network R and call it L
                L = nx.condensation(R)
                cmap = L.graph['mapping']
                # step 7: Compute antichains of L; they map to closed sets in H
                # Any edge in H that links a closed set is part of a cutset
                for antichain in nx.antichains(L):
                    # Nodes in an antichain of the condensation graph of
                    # the residual network map to a closed set of nodes that
                    # define a node partition of the auxiliary digraph H.
                    S = {n for n, scc in cmap.items() if scc in antichain}
                    # Find the cutset that links the node partition (S,~S) in H
                    cutset = set()
                    for u in S:
                        cutset.update((u, w) for w in H[u] if w not in S)
                    # The edges in H that form the cutset are internal edges
                    # (ie edges that represent a node of the original graph G)
                    node_cut = {H.nodes[n]['id'] for edge in cutset for n in edge}

                    if len(node_cut) == k:
                        if node_cut not in seen:
                            yield node_cut
                            seen.append(node_cut)
                        # Add an edge (x, v) to make sure that we do not
                        # find this cutset again. This is equivalent
                        # of adding the edge in the input graph 
                        # G.add_edge(x, v) and then regenerate H and R:
                        # Add edges to the auxiliary digraph.
                        H.add_edge('%sB' % mapping[x], '%sA' % mapping[v],
                                   capacity=1)
                        H.add_edge('%sB' % mapping[v], '%sA' % mapping[x],
                                   capacity=1)
                        # Add edges to the residual network.
                        R.add_edge('%sB' % mapping[x], '%sA' % mapping[v],
                                   capacity=1)
                        R.add_edge('%sA' % mapping[v], '%sB' % mapping[x],
                                   capacity=1)
                        break
                # Add again the saturated edges to reuse the residual network
                R.add_edges_from(saturated_edges)
コード例 #39
0
def current_flow_closeness_centrality(G,normalized=True):
    """Compute current-flow closeness centrality for nodes.

    A variant of closeness centrality based on effective
    resistance between nodes in a network.  This metric
    is also known as information centrality.

    Parameters
    ----------
    G : graph
      A networkx graph 

    normalized : bool, optional
      If True the values are normalized by 1/(n-1) where n is the 
      number of nodes in G.
       
    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with current flow closeness centrality as the value.
        
    See Also
    --------
    closeness_centrality

    Notes
    -----
    The algorithm is from Brandes [1]_.

    See also [2]_ for the original definition of information centrality.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer,
       Centrality Measures Based on Current Flow. 
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). 
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005. 
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf

    .. [2] Stephenson, K. and Zelen, M.
       Rethinking centrality: Methods and examples.
       Social Networks. Volume 11, Issue 1, March 1989, pp. 1-37
       http://dx.doi.org/10.1016/0378-8733(89)90016-6
    """
    try:
        import numpy as np
    except ImportError:
        raise ImportError("flow_closeness_centrality() requires NumPy: http://scipy.org/ ")
    

    if G.is_directed():
        raise nx.NetworkXError(\
            "current_flow_closeness_centrality() not defined for digraphs.")

    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")

    betweenness=dict.fromkeys(G,0.0) # b[v]=0 for v in G
    n=len(G)
    mapping=dict(zip(G,list(range(n))))  # map nodes to integers
    C=_compute_C(G)
    for v in G:
        vi=mapping[v]
        for w in G:
            wi=mapping[w]
            betweenness[v]+=C[vi,vi]-2*C[wi,vi]
            betweenness[w]+=C[vi,vi]
                
    if normalized:
        nb=len(betweenness)-1.0
    else:
        nb=1.0
    for v in G:
        betweenness[v]=nb/(betweenness[v])
    return betweenness            
コード例 #40
0
import json
import networkx as nx

num_nodes = int(sys.argv[1])
topology = sys.argv[2]

nodes = []
connections = []

if topology == "clique":
    graph = nx.complete_graph(num_nodes)
elif topology == "randreg":
    degree = int(sys.argv[3])
    while True:
        graph = nx.random_regular_graph(degree, num_nodes)
        if nx.is_connected(graph):
            break
else:
    print("Unrecognized topology")
    sys.exit(1)

sys.stderr.write('diameter:' +
                 str(nx.algorithms.distance_measures.diameter(graph)) + '\n')
sys.stderr.write('avg_short_path:' +
                 str(nx.average_shortest_path_length(graph)) + '\n')

for node in graph.nodes():
    name = "node_" + str(node)
    nodes.append(name)
for edge in graph.edges():
    src = "node_" + str(edge[0])
コード例 #41
0
def current_flow_betweenness_centrality_subset(G,
                                               sources,
                                               targets,
                                               normalized=True,
                                               weight=None,
                                               dtype=float,
                                               solver='lu'):
    r"""Compute current-flow betweenness centrality for subsets of nodes.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    sources: list of nodes
      Nodes to use as sources for current

    targets: list of nodes
      Nodes to use as sinks for current

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
      n is the number of nodes in G.

    weight : string or None, optional (default=None)
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    approximate_current_flow_betweenness_centrality
    betweenness_centrality
    edge_betweenness_centrality
    edge_current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
    time [1]_, where $I(n-1)$ is the time needed to compute the
    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
    Laplacian matrix condition number.

    The space required is $O(nw)$ where $w$ is the width of the sparse
    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow.
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf

    .. [2] A measure of betweenness centrality based on random walks,
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering
    try:
        import numpy as np
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires NumPy ',
            'http://scipy.org/')
    try:
        import scipy
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires SciPy ',
            'http://scipy.org/')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    mapping = dict(zip(ordering, range(n)))
    H = nx.relabel_nodes(G, mapping)
    betweenness = dict.fromkeys(H, 0.0)  # b[v]=0 for v in H
    for row, (s, t) in flow_matrix_row(H,
                                       weight=weight,
                                       dtype=dtype,
                                       solver=solver):
        for ss in sources:
            i = mapping[ss]
            for tt in targets:
                j = mapping[tt]
                betweenness[s] += 0.5 * np.abs(row[i] - row[j])
                betweenness[t] += 0.5 * np.abs(row[i] - row[j])
    if normalized:
        nb = (n - 1.0) * (n - 2.0)  # normalization factor
    else:
        nb = 2.0
    for v in H:
        betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n)
    return dict((ordering[k], v) for k, v in betweenness.items())
コード例 #42
0
    def dist(self, G1, G2, p=2):
        """The resistance perturbation graph distance is the p-norm of the
        difference between two graph resistance matrices.

        The resistance perturbation distance changes if either graph is relabeled
        (it is not invariant under graph isomorphism), so node labels should be
        consistent between the two graphs being compared. The distance is not
        normalized.

        The resistance matrix of a graph $G$ is calculated as
        $R = \text{diag}(L_i) 1^T + 1 \text{diag}(L_i)^T - 2L_i$,
        where L_i is the Moore-Penrose pseudoinverse of the Laplacian of $G$.

        The resistance perturbation graph distance of $G_1$ and $G_2$ is
        calculated as the $p$-norm of the differenc in their resitance matrices,
        $d_{r(p)} = ||R^{(1)} - R_^{(2)}|| = [\sum_{i,j \in V} |R^{(1)}_{i,j} - R^{(2)}_{i,j}|^p]^{1/p}$,
        where R^{(1)} and R^{(2)} are the resistance matrices of $G_1$ and $G_2$
        respectively. When $p = \infty$,
        $d_{r(\infty)} = \max_{i,j \in V} |R^{(1)}_{i,j} - R^{(2)}_{i,j}|$.

        This method assumes that the input graphs are undirected; if directed
        graphs are used, it will coerce them to undirected graphs and emit a
        RuntimeWarning.

        For details, see https://arxiv.org/abs/1605.01091v2

        The results dictionary also stores a 2-tuple of the underlying resistance
        matrices in the key `'resistance_matrices'`.

        Params
        ------
        G1, G2 (nx.Graph): two networkx graphs to be compared.
        p (float or str, optional): $p$-norm to take of the difference between
            the resistance matrices. Specify `np.inf` to take $\infty$-norm.

        Returns
        -------
        dist (float): the distance between G1 and G2.

        """

        # Coerce to undirected, if needed.
        G1 = ensure_undirected(G1)
        G2 = ensure_undirected(G2)

        # Check for connected graphs
        if not nx.is_connected(G1) or not nx.is_connected(G2):
            raise ValueError(
                "Resistance perturbation is undefined for disconnected graphs."
            )

        # Get resistance matrices
        R1 = get_resistance_matrix(G1)
        R2 = get_resistance_matrix(G2)
        self.results['resistance_matrices'] = R1, R2

        # Get resistance perturbation distance
        if not np.isinf(p):
            dist = np.power(np.sum(np.power(np.abs(R1 - R2), p)), 1 / p)
        else:
            dist = np.amax(np.abs(R1 - R2))
        self.results['dist'] = dist

        return dist
コード例 #43
0
def _compute_ricci_flow(G: nx.Graph,
                        weight="weight",
                        iterations=20,
                        step=1,
                        delta=1e-4,
                        surgery=(lambda G, *args, **kwargs: G, 100),
                        **kwargs):
    """Compute the given Ricci flow metric of each edge of a given connected NetworkX graph.

    Parameters
    ----------
    G : NetworkX graph
        A given directional or undirectional NetworkX graph.
    weight : str
        The edge weight used to compute Ricci curvature. (Default value = "weight")
    iterations : int
        Iterations to require Ricci flow metric. (Default value = 20)
    step : float
        step size for gradient decent process. (Default value = 1)
    delta : float
        process stop when difference of Ricci curvature is within delta. (Default value = 1e-4)
    surgery : (function, int)
        A tuple of user define surgery function that will execute every certain iterations.
        (Default value = (lambda G, *args, **kwargs: G, 100))
    **kwargs
        Additional keyword arguments passed to `_compute_ricci_curvature`.

    Returns
    -------
    G: NetworkX graph
        A NetworkX graph with ``weight`` as Ricci flow metric.
    """

    if not nx.is_connected(G):
        logger.info(
            "Not connected graph detected, compute on the largest connected component instead."
        )
        G = nx.Graph(G.subgraph(max(nx.connected_components(G), key=len)))

    # Set normalized weight to be the number of edges.
    normalized_weight = float(G.number_of_edges())

    global _apsp

    # Start compute edge Ricci flow
    t0 = time.time()

    if nx.get_edge_attributes(G, "original_RC"):
        logger.info("original_RC detected, continue to refine the ricci flow.")
    else:
        logger.info("No ricciCurvature detected, compute original_RC...")
        _compute_ricci_curvature(G, weight=weight, **kwargs)

        for (v1, v2) in G.edges():
            G[v1][v2]["original_RC"] = G[v1][v2]["ricciCurvature"]

        # clear the APSP since the graph have changed.
        _apsp = {}

    # Start the Ricci flow process
    for i in range(iterations):
        for (v1, v2) in G.edges():
            G[v1][v2][weight] -= step * (
                G[v1][v2]["ricciCurvature"]) * G[v1][v2][weight]

        # Do normalization on all weight to prevent weight expand to infinity
        w = nx.get_edge_attributes(G, weight)
        sumw = sum(w.values())
        for k, v in w.items():
            w[k] = w[k] * (normalized_weight / sumw)
        nx.set_edge_attributes(G, values=w, name=weight)
        logger.info(" === Ricci flow iteration %d === " % i)

        _compute_ricci_curvature(G, weight=weight, **kwargs)

        rc = nx.get_edge_attributes(G, "ricciCurvature")
        diff = max(rc.values()) - min(rc.values())

        logger.trace("Ricci curvature difference: %f" % diff)
        logger.trace("max:%f, min:%f | maxw:%f, minw:%f" % (max(
            rc.values()), min(rc.values()), max(w.values()), min(w.values())))

        if diff < delta:
            logger.trace("Ricci curvature converged, process terminated.")
            break

        # do surgery or any specific evaluation
        surgery_func, do_surgery = surgery
        if i != 0 and i % do_surgery == 0:
            G = surgery_func(G, weight)
            normalized_weight = float(G.number_of_edges())

        for n1, n2 in G.edges():
            logger.debug("%s %s %s" % (n1, n2, G[n1][n2]))

        # clear the APSP since the graph have changed.
        _apsp = {}

    logger.info("%8f secs for Ricci flow computation." % (time.time() - t0))

    return G
コード例 #44
0
def minimum_node_cut(G, s=None, t=None, flow_func=None):
    r"""Returns a set of nodes of minimum cardinality that disconnects G.

    If source and target nodes are provided, this function returns the
    set of nodes of minimum cardinality that, if removed, would destroy
    all paths among source and target in G. If not, it returns a set
    of nodes of minimum cardinality that disconnects G.

    Parameters
    ----------
    G : NetworkX graph

    s : node
        Source node. Optional. Default value: None.

    t : node
        Target node. Optional. Default value: None.

    flow_func : function
        A function for computing the maximum flow among a pair of nodes.
        The function has to accept at least three parameters: a Digraph,
        a source node, and a target node. And return a residual network
        that follows NetworkX conventions (see :meth:`maximum_flow` for
        details). If flow_func is None, the default maximum flow function
        (:meth:`edmonds_karp`) is used. See below for details. The
        choice of the default function may change from version
        to version and should not be relied on. Default value: None.

    Returns
    -------
    cutset : set
        Set of nodes that, if removed, would disconnect G. If source
        and target nodes are provided, the set contains the nodes that
        if removed, would destroy all paths between source and target.

    Examples
    --------
    >>> # Platonic icosahedral graph has node connectivity 5
    >>> G = nx.icosahedral_graph()
    >>> node_cut = nx.minimum_node_cut(G)
    >>> len(node_cut)
    5

    You can use alternative flow algorithms for the underlying maximum
    flow computation. In dense networks the algorithm
    :meth:`shortest_augmenting_path` will usually perform better
    than the default :meth:`edmonds_karp`, which is faster for
    sparse networks with highly skewed degree distributions. Alternative
    flow functions have to be explicitly imported from the flow package.

    >>> from networkx.algorithms.flow import shortest_augmenting_path
    >>> node_cut == nx.minimum_node_cut(G, flow_func=shortest_augmenting_path)
    True

    If you specify a pair of nodes (source and target) as parameters,
    this function returns a local st node cut.

    >>> len(nx.minimum_node_cut(G, 3, 7))
    5

    If you need to perform several local st cuts among different
    pairs of nodes on the same graph, it is recommended that you reuse
    the data structures used in the maximum flow computations. See
    :meth:`minimum_st_node_cut` for details.

    Notes
    -----
    This is a flow based implementation of minimum node cut. The algorithm
    is based in solving a number of maximum flow computations to determine
    the capacity of the minimum cut on an auxiliary directed network that
    corresponds to the minimum node cut of G. It handles both directed
    and undirected graphs. This implementation is based on algorithm 11
    in [1]_.

    See also
    --------
    :meth:`minimum_st_node_cut`
    :meth:`minimum_cut`
    :meth:`minimum_edge_cut`
    :meth:`stoer_wagner`
    :meth:`node_connectivity`
    :meth:`edge_connectivity`
    :meth:`maximum_flow`
    :meth:`edmonds_karp`
    :meth:`preflow_push`
    :meth:`shortest_augmenting_path`

    References
    ----------
    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf

    """
    if (s is not None and t is None) or (s is None and t is not None):
        raise nx.NetworkXError('Both source and target must be specified.')

    # Local minimum node cut.
    if s is not None and t is not None:
        if s not in G:
            raise nx.NetworkXError(f"node {s} not in graph")
        if t not in G:
            raise nx.NetworkXError(f"node {t} not in graph")
        return minimum_st_node_cut(G, s, t, flow_func=flow_func)

    # Global minimum node cut.
    # Analog to the algorithm 11 for global node connectivity in [1].
    if G.is_directed():
        if not nx.is_weakly_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        iter_func = itertools.permutations

        def neighbors(v):
            return itertools.chain.from_iterable([G.predecessors(v),
                                                  G.successors(v)])
    else:
        if not nx.is_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        iter_func = itertools.combinations
        neighbors = G.neighbors

    # Reuse the auxiliary digraph and the residual network.
    H = build_auxiliary_node_connectivity(G)
    R = build_residual_network(H, 'capacity')
    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)

    # Choose a node with minimum degree.
    v = min(G, key=G.degree)
    # Initial node cutset is all neighbors of the node with minimum degree.
    min_cut = set(G[v])
    # Compute st node cuts between v and all its non-neighbors nodes in G.
    for w in set(G) - set(neighbors(v)) - {v}:
        this_cut = minimum_st_node_cut(G, v, w, **kwargs)
        if len(min_cut) >= len(this_cut):
            min_cut = this_cut
    # Also for non adjacent pairs of neighbors of v.
    for x, y in iter_func(neighbors(v), 2):
        if y in G[x]:
            continue
        this_cut = minimum_st_node_cut(G, x, y, **kwargs)
        if len(min_cut) >= len(this_cut):
            min_cut = this_cut

    return min_cut
コード例 #45
0
ファイル: cuts.py プロジェクト: nickp60/Ragout
def minimum_edge_cut(G, s=None, t=None):
    r"""Returns a set of edges of minimum cardinality that disconnects G.

    If source and target nodes are provided, this function returns the 
    set of edges of minimum cardinality that, if removed, would break 
    all paths among source and target in G. If not, it returns a set of 
    edges of minimum cardinality that disconnects G.
    
    Parameters
    ----------
    G : NetworkX graph

    s : node
        Source node. Optional (default=None)

    t : node
        Target node. Optional (default=None)

    Returns
    -------
    cutset : set
        Set of edges that, if removed, would disconnect G. If source 
        and target nodes are provided, the set contians the edges that
        if removed, would destroy all paths between source and target.

    Examples
    --------
    >>> # Platonic icosahedral graph has edge connectivity 5
    >>> G = nx.icosahedral_graph()
    >>> len(nx.minimum_edge_cut(G))
    5
    >>> # this is the minimum over any pair of nodes
    >>> from itertools import combinations
    >>> for u,v in combinations(G, 2):
    ...     assert(len(nx.minimum_edge_cut(G,u,v)) == 5)
    ... 

    Notes
    -----
    This is a flow based implementation of minimum edge cut. For
    undirected graphs the algorithm works by finding a 'small' dominating
    set of nodes of G (see algorithm 7 in [1]_) and computing the maximum
    flow between an arbitrary node in the dominating set and the rest of
    nodes in it. This is an implementation of algorithm 6 in [1]_.

    For directed graphs, the algorithm does n calls to the max flow function.
    This is an implementation of algorithm 8 in [1]_. We use the Ford and
    Fulkerson algorithm to compute max flow (see ford_fulkerson).

    See also
    --------
    node_connectivity
    edge_connectivity
    minimum_node_cut
    max_flow
    ford_fulkerson

    References
    ----------
    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf

    """
    # reuse auxiliary digraph
    H = _aux_digraph_edge_connectivity(G)
    # Local minimum edge cut if s and t are not None
    if s is not None and t is not None:
        if s not in G:
            raise nx.NetworkXError('node %s not in graph' % s)
        if t not in G:
            raise nx.NetworkXError('node %s not in graph' % t)
        return minimum_st_edge_cut(H, s, t)
    # Global minimum edge cut
    # Analog to the algoritm for global edge connectivity
    if G.is_directed():
        # Based on algorithm 8 in [1]
        if not nx.is_weakly_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        # Initial cutset is all edges of a node with minimum degree
        deg = G.degree()
        min_deg = min(deg.values())
        node = next(n for n, d in deg.items() if d == min_deg)
        min_cut = G.edges(node)
        nodes = G.nodes()
        n = len(nodes)
        for i in range(n):
            try:
                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i + 1])
                if len(this_cut) <= len(min_cut):
                    min_cut = this_cut
            except IndexError:  # Last node!
                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0])
                if len(this_cut) <= len(min_cut):
                    min_cut = this_cut
        return min_cut
    else:  # undirected
        # Based on algorithm 6 in [1]
        if not nx.is_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        # Initial cutset is all edges of a node with minimum degree
        deg = G.degree()
        min_deg = min(deg.values())
        node = next(n for n, d in deg.items() if d == min_deg)
        min_cut = G.edges(node)
        # A dominating set is \lambda-covering
        # We need a dominating set with at least two nodes
        for node in G:
            D = dominating_set(G, start_with=node)
            v = D.pop()
            if D: break
        else:
            # in complete graphs the dominating set will always be of one node
            # thus we return min_cut, which now contains the edges of a node
            # with minimum degree
            return min_cut
        for w in D:
            this_cut = minimum_st_edge_cut(H, v, w)
            if len(this_cut) <= len(min_cut):
                min_cut = this_cut
        return min_cut
コード例 #46
0
def minimum_edge_cut(G, s=None, t=None, flow_func=None):
    r"""Returns a set of edges of minimum cardinality that disconnects G.

    If source and target nodes are provided, this function returns the
    set of edges of minimum cardinality that, if removed, would break
    all paths among source and target in G. If not, it returns a set of
    edges of minimum cardinality that disconnects G.

    Parameters
    ----------
    G : NetworkX graph

    s : node
        Source node. Optional. Default value: None.

    t : node
        Target node. Optional. Default value: None.

    flow_func : function
        A function for computing the maximum flow among a pair of nodes.
        The function has to accept at least three parameters: a Digraph,
        a source node, and a target node. And return a residual network
        that follows NetworkX conventions (see :meth:`maximum_flow` for
        details). If flow_func is None, the default maximum flow function
        (:meth:`edmonds_karp`) is used. See below for details. The
        choice of the default function may change from version
        to version and should not be relied on. Default value: None.

    Returns
    -------
    cutset : set
        Set of edges that, if removed, would disconnect G. If source
        and target nodes are provided, the set contains the edges that
        if removed, would destroy all paths between source and target.

    Examples
    --------
    >>> # Platonic icosahedral graph has edge connectivity 5
    >>> G = nx.icosahedral_graph()
    >>> len(nx.minimum_edge_cut(G))
    5

    You can use alternative flow algorithms for the underlying
    maximum flow computation. In dense networks the algorithm
    :meth:`shortest_augmenting_path` will usually perform better
    than the default :meth:`edmonds_karp`, which is faster for
    sparse networks with highly skewed degree distributions.
    Alternative flow functions have to be explicitly imported
    from the flow package.

    >>> from networkx.algorithms.flow import shortest_augmenting_path
    >>> len(nx.minimum_edge_cut(G, flow_func=shortest_augmenting_path))
    5

    If you specify a pair of nodes (source and target) as parameters,
    this function returns the value of local edge connectivity.

    >>> nx.edge_connectivity(G, 3, 7)
    5

    If you need to perform several local computations among different
    pairs of nodes on the same graph, it is recommended that you reuse
    the data structures used in the maximum flow computations. See
    :meth:`local_edge_connectivity` for details.

    Notes
    -----
    This is a flow based implementation of minimum edge cut. For
    undirected graphs the algorithm works by finding a 'small' dominating
    set of nodes of G (see algorithm 7 in [1]_) and computing the maximum
    flow between an arbitrary node in the dominating set and the rest of
    nodes in it. This is an implementation of algorithm 6 in [1]_. For
    directed graphs, the algorithm does n calls to the max flow function.
    The function raises an error if the directed graph is not weakly
    connected and returns an empty set if it is weakly connected.
    It is an implementation of algorithm 8 in [1]_.

    See also
    --------
    :meth:`minimum_st_edge_cut`
    :meth:`minimum_node_cut`
    :meth:`stoer_wagner`
    :meth:`node_connectivity`
    :meth:`edge_connectivity`
    :meth:`maximum_flow`
    :meth:`edmonds_karp`
    :meth:`preflow_push`
    :meth:`shortest_augmenting_path`

    References
    ----------
    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf

    """
    if (s is not None and t is None) or (s is None and t is not None):
        raise nx.NetworkXError('Both source and target must be specified.')

    # reuse auxiliary digraph and residual network
    H = build_auxiliary_edge_connectivity(G)
    R = build_residual_network(H, 'capacity')
    kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H)

    # Local minimum edge cut if s and t are not None
    if s is not None and t is not None:
        if s not in G:
            raise nx.NetworkXError(f"node {s} not in graph")
        if t not in G:
            raise nx.NetworkXError(f"node {t} not in graph")
        return minimum_st_edge_cut(H, s, t, **kwargs)

    # Global minimum edge cut
    # Analog to the algorithm for global edge connectivity
    if G.is_directed():
        # Based on algorithm 8 in [1]
        if not nx.is_weakly_connected(G):
            raise nx.NetworkXError('Input graph is not connected')

        # Initial cutset is all edges of a node with minimum degree
        node = min(G, key=G.degree)
        min_cut = set(G.edges(node))
        nodes = list(G)
        n = len(nodes)
        for i in range(n):
            try:
                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i + 1], **kwargs)
                if len(this_cut) <= len(min_cut):
                    min_cut = this_cut
            except IndexError:  # Last node!
                this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs)
                if len(this_cut) <= len(min_cut):
                    min_cut = this_cut

        return min_cut

    else:  # undirected
        # Based on algorithm 6 in [1]
        if not nx.is_connected(G):
            raise nx.NetworkXError('Input graph is not connected')

        # Initial cutset is all edges of a node with minimum degree
        node = min(G, key=G.degree)
        min_cut = set(G.edges(node))
        # A dominating set is \lambda-covering
        # We need a dominating set with at least two nodes
        for node in G:
            D = nx.dominating_set(G, start_with=node)
            v = D.pop()
            if D:
                break
        else:
            # in complete graphs the dominating set will always be of one node
            # thus we return min_cut, which now contains the edges of a node
            # with minimum degree
            return min_cut
        for w in D:
            this_cut = minimum_st_edge_cut(H, v, w, **kwargs)
            if len(this_cut) <= len(min_cut):
                min_cut = this_cut

        return min_cut
コード例 #47
0
def FiedlerOptions(G, k, subgoal=False):
    no = 0

    X = nx.to_networkx_graph(G)
    if not nx.is_connected(X):
        cs = list(nx.connected_components(X))
        for c_ in cs:
            if len(c_) > 1:
                c = c_
                break
        Xsub = X.subgraph(c)
        A = nx.to_numpy_matrix(Xsub)
        print('connected comp =', c)
    else:
        A = G.copy()

    options = []

    eigenvalues = []
    eigenvectors = []

    while no < k:
        v = ComputeFiedlerVector(nx.to_networkx_graph(A))
        lmd = ComputeConnectivity(A)

        # maxv = np.amax(v)
        # maxs = []
        # for i, val in enumerate(v):
        #     if val > maxv - 0.02:
        #         maxs.append(i)
        #
        # minv = np.argmin(v)
        # mins = []
        # for i, val in enumerate(v):
        #     if val < minv + 0.02:
        #         mins.append(i)
        #
        # print('maxs=', maxs)
        maxs = [np.argmax(v)]
        mins = [np.argmin(v)]
        option = (maxs, mins)

        options.append(option)
        if subgoal:
            B = A.copy()
            B[:, option[1][0]] = 1
            B[option[1][0], :] = 1
        else:
            B = AddEdge(A, option[0][0], option[1][0])
        A = B
        no += 2
        eigenvalues.append(lmd)
        eigenvectors.append(v)

    # TODO: If A is a subgraph of G, convert the acquired eigenvectors to the original size.
    if not nx.is_connected(X):
        evecs = []
        for v in eigenvectors:
            newv = np.zeros(G.shape[0])
            i = 0
            j = 0
            while i < A.shape[0]:
                if j in c:
                    newv[j] = v[i]
                    i += 1
                j += 1
            evecs.append(newv)
    else:
        evecs = eigenvectors

    return A, options, eigenvalues, evecs
コード例 #48
0
def second_order_centrality(G):
    """Compute the second order centrality for nodes of G.

    The second order centrality of a given node is the standard deviation of
    the return times to that node of a perpetual random walk on G:

    Parameters
    ----------
    G : graph
      A NetworkX connected and undirected graph.

    Returns
    -------
    nodes : dictionary
       Dictionary keyed by node with second order centrality as the value.

    Examples
    --------
    >>> G = nx.star_graph(10)
    >>> soc = nx.second_order_centrality(G)
    >>> print(sorted(soc.items(), key=lambda x:x[1])[0][0]) # pick first id
    0

    Raises
    ------
    NetworkXException
        If the graph G is empty, non connected or has negative weights.

    See Also
    --------
    betweenness_centrality

    Notes
    -----
    Lower values of second order centrality indicate higher centrality.

    The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_.

    This code implements the analytical version of the algorithm, i.e.,
    there is no simulation of a random walk process involved. The random walk
    is here unbiased (corresponding to eq 6 of the paper [1]_), thus the
    centrality values are the standard deviations for random walk return times
    on the transformed input graph G (equal in-degree at each nodes by adding
    self-loops).

    Complexity of this implementation, made to run locally on a single machine,
    is O(n^3), with n the size of G, which makes it viable only for small
    graphs.

    References
    ----------
    .. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan
       "Second order centrality: Distributed assessment of nodes criticity in
       complex networks", Elsevier Computer Communications 34(5):619-628, 2011.
    """

    try:
        import numpy as np
    except ImportError:
        raise ImportError('Requires NumPy: http://scipy.org/')

    n = len(G)

    if n == 0:
        raise nx.NetworkXException("Empty graph.")
    if not nx.is_connected(G):
        raise nx.NetworkXException("Non connected graph.")
    if any(d.get('weight', 0) < 0 for u, v, d in G.edges(data=True)):
        raise nx.NetworkXException("Graph has negative edge weights.")

    # balancing G for Metropolis-Hastings random walks
    G = nx.DiGraph(G)
    in_deg = dict(G.in_degree(weight='weight'))
    d_max = max(in_deg.values())
    for i, deg in in_deg.items():
        if deg < d_max:
            G.add_edge(i, i, weight=d_max-deg)

    P = nx.to_numpy_matrix(G)
    P = P / P.sum(axis=1)  # to transition probability matrix

    def _Qj(P, j):
        P = P.copy()
        P[:, j] = 0
        return P

    M = np.empty([n, n])

    for i in range(n):
        M[:, i] = np.linalg.solve(np.identity(n) - _Qj(P, i),
                                  np.ones([n, 1])[:, 0])  # eq 3

    return dict(zip(G.nodes,
                    [np.sqrt((2*np.sum(M[:, i])-n*(n+1))) for i in range(n)]
                    ))  # eq 6
コード例 #49
0
ファイル: cuts.py プロジェクト: nickp60/Ragout
def minimum_node_cut(G, s=None, t=None):
    r"""Returns a set of nodes of minimum cardinality that disconnects G.

    If source and target nodes are provided, this function returns the 
    set of nodes of minimum cardinality that, if removed, would destroy 
    all paths among source and target in G. If not, it returns a set 
    of nodes of minimum cardinality that disconnects G.
    
    Parameters
    ----------
    G : NetworkX graph

    s : node
        Source node. Optional (default=None)

    t : node
        Target node. Optional (default=None)

    Returns
    -------
    cutset : set
        Set of nodes that, if removed, would disconnect G. If source 
        and target nodes are provided, the set contians the nodes that
        if removed, would destroy all paths between source and target.

    Examples
    --------
    >>> # Platonic icosahedral graph has node connectivity 5 
    >>> G = nx.icosahedral_graph()
    >>> len(nx.minimum_node_cut(G))
    5
    >>> # this is the minimum over any pair of non adjacent nodes
    >>> from itertools import combinations
    >>> for u,v in combinations(G, 2):
    ...     if v not in G[u]:
    ...         assert(len(nx.minimum_node_cut(G,u,v)) == 5)
    ... 

    Notes
    -----
    This is a flow based implementation of minimum node cut. The algorithm 
    is based in solving a number of max-flow problems (ie local st-node
    connectivity, see local_node_connectivity) to determine the capacity 
    of the minimum cut on an auxiliary directed network that corresponds 
    to the minimum node cut of G. It handles both directed and undirected 
    graphs.

    This implementation is based on algorithm 11 in [1]_. We use the Ford 
    and Fulkerson algorithm to compute max flow (see ford_fulkerson).

    See also
    --------
    node_connectivity
    edge_connectivity
    minimum_edge_cut
    max_flow
    ford_fulkerson 

    References
    ----------
    .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. 
        http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf

    """
    # Local minimum node cut
    if s is not None and t is not None:
        if s not in G:
            raise nx.NetworkXError('node %s not in graph' % s)
        if t not in G:
            raise nx.NetworkXError('node %s not in graph' % t)
        return minimum_st_node_cut(G, s, t)
    # Global minimum node cut
    # Analog to the algoritm 11 for global node connectivity in [1]
    if G.is_directed():
        if not nx.is_weakly_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        iter_func = itertools.permutations

        def neighbors(v):
            return itertools.chain.from_iterable(
                [G.predecessors_iter(v),
                 G.successors_iter(v)])
    else:
        if not nx.is_connected(G):
            raise nx.NetworkXError('Input graph is not connected')
        iter_func = itertools.combinations
        neighbors = G.neighbors_iter
    # Choose a node with minimum degree
    deg = G.degree()
    min_deg = min(deg.values())
    v = next(n for n, d in deg.items() if d == min_deg)
    # Initial node cutset is all neighbors of the node with minimum degree
    min_cut = set(G[v])
    # Reuse the auxiliary digraph
    H, mapping = _aux_digraph_node_connectivity(G)
    # compute st node cuts between v and all its non-neighbors nodes in G
    # and store the minimum
    for w in set(G) - set(neighbors(v)) - set([v]):
        this_cut = minimum_st_node_cut(G, v, w, aux_digraph=H, mapping=mapping)
        if len(min_cut) >= len(this_cut):
            min_cut = this_cut
    # Same for non adjacent pairs of neighbors of v
    for x, y in iter_func(neighbors(v), 2):
        if y in G[x]: continue
        this_cut = minimum_st_node_cut(G, x, y, aux_digraph=H, mapping=mapping)
        if len(min_cut) >= len(this_cut):
            min_cut = this_cut
    return min_cut
コード例 #50
0
def graph_connected(elcon):
    G = nx.Graph()
    for item in elcon:
        G.add_edge(item[0], item[1])

    return nx.is_connected(G)
コード例 #51
0
    while True:
        index_i = random.randint(0, graph.number_of_nodes() - 1)
        index_j = random.randint(0, graph.number_of_nodes() - 1)
        if index_i == index_j:
            index_j = (index_j + 1) % graph.number_of_nodes()
        index_i = int(list(graph.nodes())[index_i])
        index_j = int(list(graph.nodes())[index_j])
        if graph.has_edge(str(index_i), str(index_j)) == False: break

    listTrainningTest.append([
        str(index_i),
        str(index_j),
        graph.has_edge(str(index_i), str(index_j))
    ])

graph_connected = nx.is_connected(graph)

for i in range(1, sizeTrainningTest - len(listTrainningTest) + 1):
    degree_i = 0
    degree_j = 0
    index_i = 0
    index_j = 0

    count = 0
    while degree_i <= 1 or degree_j <= 1:
        rand_edge = random.randint(0, graph.number_of_edges() - 1)
        index_i = list(graph.edges())[rand_edge][0]
        index_j = list(graph.edges())[rand_edge][1]
        degree_i = graph.degree(str(index_i))
        degree_j = graph.degree(str(index_j))
        if degree_i > 1 and degree_j > 1:
コード例 #52
0
def network_simplex(G, demand='demand', capacity='capacity', weight='weight'):
    """Find a minimum cost flow satisfying all demands in digraph G.
    
    This is a primal network simplex algorithm that uses the leaving
    arc rule to prevent cycling.

    G is a digraph with edge costs and capacities and in which nodes
    have demand, i.e., they want to send or receive some amount of
    flow. A negative demand means that the node wants to send flow, a
    positive demand means that the node want to receive flow. A flow on
    the digraph G satisfies all demand if the net flow into each node
    is equal to the demand of that node.

    Parameters
    ----------
    G : NetworkX graph
        DiGraph on which a minimum cost flow satisfying all demands is
        to be found.

    demand: string
        Nodes of the graph G are expected to have an attribute demand
        that indicates how much flow a node wants to send (negative
        demand) or receive (positive demand). Note that the sum of the
        demands should be 0 otherwise the problem in not feasible. If
        this attribute is not present, a node is considered to have 0
        demand. Default value: 'demand'.

    capacity: string
        Edges of the graph G are expected to have an attribute capacity
        that indicates how much flow the edge can support. If this
        attribute is not present, the edge is considered to have
        infinite capacity. Default value: 'capacity'.

    weight: string
        Edges of the graph G are expected to have an attribute weight
        that indicates the cost incurred by sending one unit of flow on
        that edge. If not present, the weight is considered to be 0.
        Default value: 'weight'.

    Returns
    -------
    flowCost: integer, float
        Cost of a minimum cost flow satisfying all demands.

    flowDict: dictionary
        Dictionary of dictionaries keyed by nodes such that
        flowDict[u][v] is the flow edge (u, v).

    Raises
    ------
    NetworkXError
        This exception is raised if the input graph is not directed,
        not connected or is a multigraph.

    NetworkXUnfeasible
        This exception is raised in the following situations:
            * The sum of the demands is not zero. Then, there is no
              flow satisfying all demands.
            * There is no flow satisfying all demand.

    NetworkXUnbounded
        This exception is raised if the digraph G has a cycle of
        negative cost and infinite capacity. Then, the cost of a flow
        satisfying all demands is unbounded below.

    Notes
    -----
    This algorithm is not guaranteed to work if edge weights
    are floating point numbers (overflows and roundoff errors can 
    cause problems). 
        
    See also
    --------
    cost_of_flow, max_flow_min_cost, min_cost_flow, min_cost_flow_cost
               
    Examples
    --------
    A simple example of a min cost flow problem.

    >>> import networkx as nx
    >>> G = nx.DiGraph()
    >>> G.add_node('a', demand = -5)
    >>> G.add_node('d', demand = 5)
    >>> G.add_edge('a', 'b', weight = 3, capacity = 4)
    >>> G.add_edge('a', 'c', weight = 6, capacity = 10)
    >>> G.add_edge('b', 'd', weight = 1, capacity = 9)
    >>> G.add_edge('c', 'd', weight = 2, capacity = 5)
    >>> flowCost, flowDict = nx.network_simplex(G)
    >>> flowCost
    24
    >>> flowDict # doctest: +SKIP
    {'a': {'c': 1, 'b': 4}, 'c': {'d': 1}, 'b': {'d': 4}, 'd': {}}

    The mincost flow algorithm can also be used to solve shortest path
    problems. To find the shortest path between two nodes u and v,
    give all edges an infinite capacity, give node u a demand of -1 and
    node v a demand a 1. Then run the network simplex. The value of a
    min cost flow will be the distance between u and v and edges
    carrying positive flow will indicate the path.

    >>> G=nx.DiGraph()
    >>> G.add_weighted_edges_from([('s','u',10), ('s','x',5), 
    ...                            ('u','v',1), ('u','x',2), 
    ...                            ('v','y',1), ('x','u',3), 
    ...                            ('x','v',5), ('x','y',2), 
    ...                            ('y','s',7), ('y','v',6)])
    >>> G.add_node('s', demand = -1)
    >>> G.add_node('v', demand = 1)
    >>> flowCost, flowDict = nx.network_simplex(G)
    >>> flowCost == nx.shortest_path_length(G, 's', 'v', weight = 'weight')
    True
    >>> [(u, v) for u in flowDict for v in flowDict[u] if flowDict[u][v] > 0]
    [('x', 'u'), ('s', 'x'), ('u', 'v')]
    >>> nx.shortest_path(G, 's', 'v', weight = 'weight')
    ['s', 'x', 'u', 'v']

    It is possible to change the name of the attributes used for the
    algorithm.

    >>> G = nx.DiGraph()
    >>> G.add_node('p', spam = -4)
    >>> G.add_node('q', spam = 2)
    >>> G.add_node('a', spam = -2)
    >>> G.add_node('d', spam = -1)
    >>> G.add_node('t', spam = 2)
    >>> G.add_node('w', spam = 3)
    >>> G.add_edge('p', 'q', cost = 7, vacancies = 5)
    >>> G.add_edge('p', 'a', cost = 1, vacancies = 4)
    >>> G.add_edge('q', 'd', cost = 2, vacancies = 3)
    >>> G.add_edge('t', 'q', cost = 1, vacancies = 2)
    >>> G.add_edge('a', 't', cost = 2, vacancies = 4)
    >>> G.add_edge('d', 'w', cost = 3, vacancies = 4)
    >>> G.add_edge('t', 'w', cost = 4, vacancies = 1)
    >>> flowCost, flowDict = nx.network_simplex(G, demand = 'spam',
    ...                                         capacity = 'vacancies',
    ...                                         weight = 'cost')
    >>> flowCost
    37
    >>> flowDict  # doctest: +SKIP
    {'a': {'t': 4}, 'd': {'w': 2}, 'q': {'d': 1}, 'p': {'q': 2, 'a': 2}, 't': {'q': 1, 'w': 1}, 'w': {}}

    References
    ----------
    W. J. Cook, W. H. Cunningham, W. R. Pulleyblank and A. Schrijver.
    Combinatorial Optimization. Wiley-Interscience, 1998.

    """

    if not G.is_directed():
        raise nx.NetworkXError("Undirected graph not supported.")
    if not nx.is_connected(G.to_undirected()):
        raise nx.NetworkXError("Not connected graph not supported.")
    if G.is_multigraph():
        raise nx.NetworkXError("MultiDiGraph not supported.")
    if sum(d[demand] for v, d in G.nodes(data=True) if demand in d) != 0:
        raise nx.NetworkXUnfeasible("Sum of the demands should be 0.")

    # Fix an arbitrarily chosen root node and find an initial tree solution.
    H, T, y, artificialEdges, flowCost, r = \
            _initial_tree_solution(G, demand = demand, capacity = capacity,
                                   weight = weight)

    # Initialize the reduced costs.
    c = {}
    for u, v, d in H.edges_iter(data=True):
        c[(u, v)] = d.get(weight, 0) + y[u] - y[v]

    # Print stuff for debugging.
    # print('-' * 78)
    # nbIter = 0
    # print('Iteration %d' % nbIter)
    # nbIter += 1
    # print('Tree solution: %s' % T.edges())
    # print(' Edge %11s%10s' % ('Flow', 'Red Cost'))
    # for u, v, d in H.edges(data = True):
    #     flag = ''
    #     if (u, v) in artificialEdges:
    #         flag = '*'
    #     print('(%s, %s)%1s%10d%10d' % (u, v, flag, d.get('flow', 0),
    #                                    c[(u, v)]))
    # print('Distances: %s' % y)

    # Main loop.
    while True:
        newEdge = _find_entering_edge(H, c, capacity=capacity)
        if not newEdge:
            break  # Optimal basis found. Main loop is over.
        cycleCost = abs(c[newEdge])

        # Find the cycle created by adding newEdge to T.
        path1 = nx.shortest_path(T.to_undirected(), r, newEdge[0])
        path2 = nx.shortest_path(T.to_undirected(), r, newEdge[1])
        join = r
        for index, node in enumerate(path1[1:]):
            if index + 1 < len(path2) and node == path2[index + 1]:
                join = node
            else:
                break
        path1 = path1[path1.index(join):]
        path2 = path2[path2.index(join):]
        cycle = []
        if H[newEdge[0]][newEdge[1]].get('flow', 0) == 0:
            path2.reverse()
            cycle = path1 + path2
        else:  # newEdge is at capacity
            path1.reverse()
            cycle = path2 + path1

        # Find the leaving edge. Will stop here if cycle is an infinite
        # capacity negative cost cycle.
        leavingEdge, eps = _find_leaving_edge(H,
                                              T,
                                              cycle,
                                              newEdge,
                                              capacity=capacity)

        # Actual augmentation happens here. If eps = 0, don't bother.
        if eps:
            flowCost -= cycleCost * eps
            if len(cycle) == 3:
                u, v = newEdge
                H[u][v]['flow'] -= eps
                H[v][u]['flow'] -= eps
            else:
                for index, u in enumerate(cycle[:-1]):
                    v = cycle[index + 1]
                    if (u, v) in T.edges() + [newEdge]:
                        H[u][v]['flow'] = H[u][v].get('flow', 0) + eps
                    else:  # (v, u) in T.edges():
                        H[v][u]['flow'] -= eps

        # Update tree solution.
        T.add_edge(*newEdge)
        T.remove_edge(*leavingEdge)

        # Update distances and reduced costs.
        if newEdge != leavingEdge:
            forest = nx.DiGraph(T)
            forest.remove_edge(*newEdge)
            R, notR = nx.connected_component_subgraphs(forest.to_undirected())
            if r in notR.nodes():  # make sure r is in R
                R, notR = notR, R
            if newEdge[0] in R.nodes():
                for v in notR.nodes():
                    y[v] += c[newEdge]
            else:
                for v in notR.nodes():
                    y[v] -= c[newEdge]
            for u, v in H.edges():
                if u in notR.nodes() or v in notR.nodes():
                    c[(u, v)] = H[u][v].get(weight, 0) + y[u] - y[v]

        # Print stuff for debugging.
        # print('-' * 78)
        # print('Iteration %d' % nbIter)
        # nbIter += 1
        # print('Tree solution: %s' % T.edges())
        # print('New edge:      (%s, %s)' % (newEdge[0], newEdge[1]))
        # print('Leaving edge:  (%s, %s)' % (leavingEdge[0], leavingEdge[1]))
        # print('Cycle:         %s' % cycle)
        # print('eps:           %d' % eps)
        # print(' Edge %11s%10s' % ('Flow', 'Red Cost'))
        # for u, v, d in H.edges(data = True):
        #     flag = ''
        #     if (u, v) in artificialEdges:
        #         flag = '*'
        #     print('(%s, %s)%1s%10d%10d' % (u, v, flag, d.get('flow', 0),
        #                                    c[(u, v)]))
        # print('Distances: %s' % y)

    # If an artificial edge has positive flow, the initial problem was
    # not feasible.
    for u, v in artificialEdges:
        if H[u][v]['flow'] != 0:
            raise nx.NetworkXUnfeasible("No flow satisfying all demands.")
        H.remove_edge(u, v)

    for u in H.nodes():
        if not u in G:
            H.remove_node(u)

    flowDict = _create_flow_dict(G, H)

    return flowCost, flowDict
コード例 #53
0
    # save figures
#l    fig.savefig(os.path.join(figDir, f'{key}_causalimpact_information_impact.eps'))
#    sfig.savefig(os.path.join(figDir, f'{key}_raw_causalimpact_information_impact.eps'))

    aucs[key] = auc # update data
# %%

aucs = {}
fitParams    = dict(maxfev = int(1e6), \
                   bounds = (0, np.inf), p0 = p0,\
                   jac = 'cs')
for k, v in loadedData.items():
#    v = v.squeeze()
#    v = (v - v.min(0)) / (v.max(0) - v.min(0))
    g = nx.node_link_graph(settings[k].graph)
    if v.mean() > 1e-3 and nx.is_connected(g):
        try:
            s = v.shape
            v = v.reshape(*s[:-2], -1)
            
            MIN = np.nanmin(v, axis = -1)[..., None]
            MAX = np.nanmax(v, axis = -1)[..., None]
            v = (v - MIN) / (MAX - MIN)
            
            v = v.reshape(s).squeeze()
            
            t_aucs = np.zeros(v.shape[:-1])
            for idx, i in enumerate(v):
                coeffs, _ = plotz.fit(i, func, params = fitParams)
                for cidx, c in enumerate(coeffs):
                    t_aucs[idx, cidx], err = scipy.integrate.quad(lambda x: func(x, *c),  0, deltas)
コード例 #54
0
G.add_edge(1, 1 + anum)
G.add_edge(2, 2 + anum)
G.add_edge(3, 3 + anum)
G.add_edge(4, 4 + anum)
# G[4][5+anum].update()
G.add_edges_from([(1, 2 + anum), (3, 3 + anum), (4, 3 + anum)])
## convenient way to add weighted edges

# G.add_nodes_from([3,15,16,17], weight=0.4)

print G.nodes()
print G.edges()

# c = nx.connected_components(G)

print 'True if the graph is connected, False otherwise.: ', nx.is_connected(G)
print 'Number of connected components: ', nx.number_connected_components(G)

sub_graphs = nx.connected_component_subgraphs(G)
#for i, sg in enumerate(sub_graphs):
#    print "subgraph {} has {} nodes".format(i, sg.number_of_nodes())
#    print "\tNodes:", sg.nodes(data=True)
#    print "\tEdges:", sg.edges()

for i, sg in enumerate(sub_graphs):
    print "subgraph {} has {} nodes".format(i, sg.number_of_nodes())
    print "\tNodes:", sg.nodes(data=False)  # default=False

for i, sg in enumerate(sub_graphs):
    ggroup = sg.number_of_nodes()
コード例 #55
0
    def entityfct(self, AnnotationData, **kwargs):
        """
		The entityfct method for entity functional classification tool. This method 
		allows the partitioning of an entity (e.g. gene or protein) set into a set of 
		meaningful sub-class patterns using their semantic similarity scores computed
		based on entity-associated concepts and derived from a selected semantic sim-
		ilarity model. 
		
		Arguments:
		
			AnnotationData (dict): A dictionary with entity as key and set of concepts
			as value.
			 
			**kwargs can be used to set different parameters needed for processing the
			classification, including measure, mclust and nclust:
			
			measure (str or tuple): The entity semantic similarity measure to be used. 
			Refer to the Supplementary for more details on symbols used for different 
			measures.
			mclust (int 1, 2, 3): Classification model under consideration, and this 
			method implements three different models (mclust):
		  		- hierarchical clustering (mclust = 1)
		  		- Graph spectral clustering or kmeans (mclust = 2)
		  		- community detecting model by Thomas Aynaud, 2009 (mclust = 3)
		  	nclust (int): Number of clusters (nclust) applies only for the kmeans model 
		  	and it is set to 0 by default. In this case, if mclust is less than 2 then
			the comminity detecting model is applied instead of kmeans!
			
			score (float > 0.0): The threshold score providing the semantic similarity 
			degree at which entities are considered to be semantically close or similar in 
			the ontology structure and it is set to 0.3 by default.
			
			stream (int 0 or 1): An Enum parameter taking values of 0 or 1. It is set to 1
			to output results on the screen, to 0 to output results in a file.
			
			Other parameters which can be required depending on the entity semantic simila-
			rity measure used.

		Usage:
		------
			>>> entityfct(AnnotationData, measure=('bma', 'nunivers','unuversal'), score=0.3, mclust=1, nclust=0)

		Examples:
		---------
		    >>> background = {'Q5H9L2':['GO:0006355','GO:0006351'], 'P03891':['GO:0022904','GO:0044281','GO:0044237','GO:0006120'], 'Prot1':['GO:0006355', 'GO:0022904', 'GO:0044281'], 'Prot2':['GO:0044237', 'GO:0006120']}
			>>> entityfct(background, measure=('bma', 'nunivers','unuversal'), score=0.0)
		"""
        # Clustering process starts here
        measure = kwargs['measure'] if 'measure' in kwargs else None
        if not 'mclust' in kwargs: kwargs['mclust'] = 1  # hiererchical-based
        if not 'stream' in kwargs: kwargs['stream'] = 1
        if not 'nclust' in kwargs: kwargs['nclust'] = 0

        self.entitySim(AnnotationData, measures=measure, **kwargs)
        g = nx.Graph()
        agree = kwargs['score'] if 'score' in kwargs else 0.3
        data = self.fouts[self.measures[-1]]
        for p in data:  # Constructing graph goes here!
            if agree == 0.0 and data[p] > 0.0:
                g.add_edge(p[0], p[1], weight=1.0 - data[p])
            elif 0 < agree < 1.0 and data[p] >= agree:
                g.add_edge(p[0], p[1], weight=1.0 - data[p])
            elif agree == 1.0 and data[p] == 1.0:
                g.add_edge(p[0], p[1], weight=1.0)
        models = [
            'Hierarchical', 'Graph spectral based (kmeans)', 'Model-based'
        ]
        now = time.time()
        if g:
            # Outputting different results
            print("Entity classification based on functional similarity")
            print(
                "# The number of entities and entity pairs detected are %d and %d, respectively."
                % (len(g), g.size()))
            print(
                "The distance is based on functional similarity measure  : %s"
                % self.comments[self.measures[0]])
            print(
                "The clustering model used is                            : %s %s"
                % (models[kwargs['mclust'] - 1], 'approach'))
            if kwargs['stream']:
                print(
                    "\nDifferent clusters are displayed in the table below or in the following figure.\nIf possible, use full screen mode for more convenient visualization:"
                )
            else:
                outputfile = 'ConceptSSFile%d' % (random.randint(0, 100000), )
                if kwargs['mclust']: outputfile += '.png'
                else: outputfile += '.txt'
                print(
                    "Different clusters can be found in the file       : [%s]"
                    % (outputfile, ))
            classes = {}
            if kwargs['mclust'] == 3:
                partition = best_partition(g)
                j = 0
                for i in set(partition.values()):
                    classes[j] = [
                        nodes for nodes in partition.keys()
                        if partition[nodes] == i
                    ]
                    j += 1
            elif kwargs['mclust'] == 1:  # Hierarchical approach
                Index = g.nodes()
                n = len(Index)
                distances = zeros((n, n))
                path_length = nx.all_pairs_dijkstra_path_length(g)
                for u, p in path_length.items():
                    for v, d in p.items():
                        distances[Index.index(u)][Index.index(v)] = d
                        distances[Index.index(v)][Index.index(u)] = d
                sd = distance.squareform(distances)
                hier = hierarchy.average(sd)
                fig = plt.figure()
                plt.clf()
                hierarchy.dendrogram(hier,
                                     orientation='right',
                                     labels=Index[:])
                plt.grid()
                if kwargs['stream']: plt.show()
                else: plt.savefig(outputfile, format="png")
            elif kwargs['mclust'] == 2:  # Graph spectral based (kmeans)
                d = kwargs['nclust']
                classes = {}  # Number of presumed clusters
                if d < 2:
                    partition = best_partition(g)
                    j = 0
                    for i in set(partition.values()):
                        classes[j] = [
                            nodes for nodes in partition.keys()
                            if partition[nodes] == i
                        ]
                        j += 1
                else:
                    W = nx.adj_matrix(g)
                    D = diag([sum(sum(array(w))) for w in W])
                    L = D - W
                    S, V, D = svd(L)
                    N = g.nodes()
                    test = True
                    d = 2
                    while test:
                        cidx = []
                        res, idx = kmeans2(S[:, -d + 1:], d,
                                           minit='random')  #-d+1:-1
                        cidx = unique(idx)
                        classes = {}
                        for i in xrange(len(N)):
                            if idx[i] in classes: classes[idx[i]].append(N[i])
                            else: classes[idx[i]] = [N[i]]

                        k = 0  # Checking if nodes in the identified class are connected!
                        for iclass in classes.itervalues():
                            if not nx.is_connected(nx.subgraph(g, iclass)):
                                k = 1
                                break
                        if not k: test = False
            if classes:
                outs = []
                for i in sorted(classes.keys()):
                    st = str(classes[i])[1:-1]
                    outs.append(('%d' % (i + 1, ), '%d' % (len(classes[i]), ),
                                 st[:78]))
                    for i in range(1, int(ceil(len(st) / 78.0))):
                        try:
                            outs.append(('', '', st[i * 78:(i + 1) * 78 + 1]))
                        except:
                            pass
                    outs.append(('', '', '\n'))
                headers = ['# Cluster', '# of Proteins', 'Protein identifiers']
                if kwargs['stream']:  # Print on the screen
                    print('%s' % tabs(outs[:-1],
                                      headers,
                                      tablefmt='grid',
                                      floatfmt=".5f",
                                      stralign="center"))
                else:
                    try:
                        fp = open(outputfile, 'w')
                        fp.write(
                            "# Clustering proteins based on functional similarity using [%s] approach\n"
                            % (models[kwargs['mclust'] - 1], ))
                        fp.write(
                            "# The number of possible entities and entity pairs detected are %d and %d, respectively.\n"
                            % (len(g), g.size()))
                        fp.write(
                            "# The distance is based on [%s] functional similarity measure\n\n"
                            % (appnames[kwargs['measure']], ))
                        fp.write('%s' % tabs(outs[:-1],
                                             headers,
                                             tablefmt='plain',
                                             floatfmt=".5f",
                                             stralign="left"))
                        fp.close()
                    except IOError:
                        print(
                            "File cannot be opened in writing. Check possibly the writing permission and try again ..."
                        )
                        sys.exit(8)
        else:
            print(
                'Trying %s clustering approach using the distance inferred from\n %s functional similarity measure has failed. Please, check your presumed\n list and options you have selected, and try again!'
                % (models[kwargs['mclust'] - 1], appnames[kwargs['measure']]))
            sys.exit(9)
        print("\nProcessing accomplished on %s" %
              str(time.asctime(time.localtime())))
        print("Total time elapsed is approximately %.2f %s" %
              (time.time() - now, 'seconds'))
        print(
            "\n*****************************************************************\n"
        )
コード例 #56
0
def Simulator2(params):
    # Simulation parameters
    # srv_num: Number of servers
    # cache_sz: Cache size of each server (expressed in number of files)
    # file_num: Total number of files in the system

    srv_num, cache_sz, file_num, graph_type = params

    if graph_type == 'RGG':  # if the graph is random geometric graph
        rgg_radius = sqrt(5 / 4 * log(srv_num)) / sqrt(srv_num)
        # Generate a random geometric graph.
        #print('------------------------------')
        print('Start generating a random geometric graph with {} nodes...'.
              format(srv_num))
        conctd = False
        while not conctd:
            G = nx.random_geometric_graph(srv_num, rgg_radius)
            conctd = nx.is_connected(G)

        print(
            'Succesfully generates a connected random geometric graph with {} nodes...'
            .format(srv_num))
    elif graph_type == 'Lattice':
        print(
            'Start generating a square lattice graph with {} nodes...'.format(
                srv_num))
        G = Gen2DLattice(srv_num)
        print('Succesfully generates a square lattice graph with {} nodes...'.
              format(srv_num))
    else:
        print("Error: the graph type is not known!")
        sys.exit()
    # Draw the graph
    #nx.draw(G)
    #plt.show()

    # Find all the shortest paths in G
    all_sh_path_len_G = nx.shortest_path_length(G)

    #    all_sh_path_G = nx.shortest_path(G)
    #    all_sh_path_len_G = {}
    #    for src_nd in all_sh_path_G:
    #        in_dict = {}
    #        for dst_nd in all_sh_path_G[src_nd]:
    #            in_dict.update({dst_nd : len(all_sh_path_G[src_nd][dst_nd])-1})
    #        all_sh_path_len_G[src_nd] = in_dict
    #    print(all_sh_path_G)
    #    print(all_sh_path_len_G)
    #quit()

    # Create 'srv_num' servers from the class server
    srvs = [Server(i) for i in range(srv_num)]

    # List of sets of servers containing each file
    file_sets = [[] for i in range(file_num)]

    # Randomly places cache_sz files in each servers
    print('Randomly places {} files in each server'.format(cache_sz))
    # First put randomly one copy of each file in a subset of servers (assuming file_num =< srv_num)
    lst = np.random.permutation(srv_num)[0:file_num]
    for i, s in enumerate(lst):
        file_sets[i].append(s)
        srvs[s].set_files_list([i])
    # Then fills the rest of empty cache places
    for s in range(srv_num):
        #print(s)
        srvlst = srvs[s].get_files_list()
        list = np.random.permutation(file_num)[0:cache_sz - len(srvlst)]
        srvs[s].set_files_list(srvlst.extend(list))
        for j in range(len(list)):
            if s not in file_sets[list[j]]:
                file_sets[list[j]].append(s)

#    chk = False
#    placement_tries = 0
#    while chk == False:
#        placement_tries += 1
#        print('File placement: try {}'.format(placement_tries))
#        file_sets = [[] for i in range(file_num)] # list of sets of servers containing each file
#        for i in range(srv_num):
#            #print(i)
#            list = np.random.permutation(file_num)[0:cache_sz]
#            srvs[i].set_files_list(list)
#            for j in range(len(list)):
#                if i not in file_sets[list[j]]:
#                    file_sets[list[j]].append(i)
#        chk = True
#        for l in file_sets:
#            if len(l) == 0:
#                chk = False
#                break

    print(
        'Done with randomly placing {} files in each server'.format(cache_sz))

    #    print(file_sets)

    # Main loop of the simulator. We throw n balls requests into the servers.
    # Each request randomly pick a server and a file.
    print('The simulator 2 is starting...')
    total_cost = 0  # Measured in number of hops.
    for i in range(srv_num):
        #print(i)
        incoming_srv = np.random.randint(srv_num)  # Random incoming server
        rqstd_file = np.random.randint(file_num)  # Random requested file
        if incoming_srv in file_sets[rqstd_file]:
            srv0 = incoming_srv
        else:
            # Find the nearest server that has the requested file
            dmin = 2 * srv_num  # some large number!
            for nd in file_sets[rqstd_file]:
                #d = nx.shortest_path_length(G, source=incoming_srv, target=nd)
                d = all_sh_path_len_G[incoming_srv][nd]
                if d < dmin:
                    dmin = d
                    srv0 = nd


#            srv0 = file_sets[rqstd_file][np.random.randint(len(file_sets[rqstd_file])+1) - 1]

        srv1 = srv0
        if len(file_sets[rqstd_file]) > 1:
            while srv1 == srv0:
                srv1 = file_sets[rqstd_file][np.random.randint(
                    len(file_sets[rqstd_file]))]

        # Implement power of two choices
        load0 = srvs[srv0].get_load()
        load1 = srvs[srv1].get_load()
        if load0 > load1:
            srvs[srv1].add_load()
            #total_cost += nx.shortest_path_length(G, source=incoming_srv, target=srv1)
            total_cost += all_sh_path_len_G[incoming_srv][srv1]
        elif load0 < load1:
            srvs[srv0].add_load()
            if srv0 != incoming_srv:
                #total_cost += nx.shortest_path_length(G, source=incoming_srv, target=srv0)
                total_cost += all_sh_path_len_G[incoming_srv][srv0]
        elif np.random.randint(2) == 0:
            srvs[srv0].add_load()
            if srv0 != incoming_srv:
                #total_cost += nx.shortest_path_length(G, source=incoming_srv, target=srv0)
                total_cost += all_sh_path_len_G[incoming_srv][srv0]
        else:
            srvs[srv1].add_load()
            #total_cost += nx.shortest_path_length(G, source=incoming_srv, target=srv1)
            total_cost += all_sh_path_len_G[incoming_srv][srv1]

    print('The simulator 2 is done.')
    #    print('------------------------------')

    # At the end of simulation, find maximum load, etc.
    loads = [srvs[i].get_load() for i in range(srv_num)]
    maxload = max(loads)
    avgcost = total_cost / srv_num

    #    print(loads)
    #    print(maxload)
    #    print(total_cost)

    return {'loads': loads, 'maxload': maxload, 'avgcost': avgcost}
コード例 #57
0
from core.net import Net
from core.sfc import SFC
from core.vnf import VNF
import random

# create a new substrate network
substrate_network = Net()

number_of_nodes = 50
probability = 0.2
topology = nx.erdos_renyi_graph(number_of_nodes,
                                probability,
                                seed=None,
                                directed=False)
network_create_counter = 0
while not nx.is_connected(topology):
    if network_create_counter >= 10000:
        break
    network_create_counter += 1
    topology = nx.erdos_renyi_graph(number_of_nodes,
                                    probability,
                                    seed=None,
                                    directed=False)

for edge in topology.edges():
    bw = random.randint(50, 100)
    substrate_network.init_bandwidth_capacity(edge[0], edge[1], bw)
    lt = random.uniform(1, 5)
    substrate_network.init_link_latency(edge[0], edge[1], lt)

for node in topology.nodes():
コード例 #58
0
def motifs(k) -> [nx.Graph]:
    """
    return list of motifs
    :param k:
    :return:
    """
    if k == 2:
        # 1
        G = nx.Graph()
        G.add_edges_from([(0, 1)])
        return [G]
    elif k == 3:
        motifs = []
        # 1
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2)])
        motifs.append(G)
        # 2
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (0, 2)])
        motifs.append(G)
        return motifs
    elif k == 4:
        motifs = []
        # 1
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3)])
        motifs.append(G)
        # 2
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (1, 3)])
        motifs.append(G)
        # 3
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 3)])
        motifs.append(G)
        # 4
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3), (0, 3)])
        motifs.append(G)
        # 5
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3), (0, 3), (0, 2)])
        motifs.append(G)
        # 6
        G = nx.Graph()
        G.add_edges_from([(0, 1), (1, 2), (2, 3), (0, 3), (0, 2), (1, 3)])
        motifs.append(G)
        return motifs
    elif k == 5:
        motifs = []
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (1, 4), (2, 3)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (2, 3), (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 2), (1, 2), (2, 3), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (0, 4), (2, 3), (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 4), (1, 2), (1, 3), (1, 4), (2, 3)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 2), (1, 2), (1, 3), (2, 4), (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 2)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (1, 2), (2, 4), (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (0, 4), (1, 3), (2, 3), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (0, 4), (1, 3), (1, 4), (2, 3)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (2, 3)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 4), (1, 2), (1, 3), (2, 4), (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 4), (1, 2), (1, 3), (1, 4), (2, 3), (2, 4),
                          (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 3), (2, 3),
                          (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 4), (1, 3), (1, 4), (2, 3),
                          (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (2, 4),
                          (3, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3),
                          (1, 4), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4),
                          (2, 3), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3),
                          (1, 4), (2, 3), (2, 4)])
        motifs.append(G)
        G = nx.Graph()
        G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3),
                          (1, 4), (2, 3), (2, 4), (3, 4)])
        motifs.append(G)
        return motifs

    else:
        raise NotImplementedError("too big")
        # 6 112
        # 7 853
        # if you want to enumerate motifs then run the following commands
        num_motifs = 112
        motifs = []
        start = time.time()
        while len(motifs) < num_motifs:
            p_min = (k - 1) / (k**2)
            G = nx.erdos_renyi_graph(n=k,
                                     p=random.random() * (1 - p_min) + p_min)
            if nx.is_connected(G):
                is_new = True
                for F in motifs:
                    if nx.is_isomorphic(G, F):
                        is_new = False
                        break
                if is_new:
                    motifs.append(G)
                    print('len:', len(motifs))
        print(u_time.time2str(time.time() - start))

        # sort
        motifs = sorted(motifs, key=lambda G: len(nx.edges(G)))

        for F in motifs:
            print('G = nx.Graph()')
            print('G.add_edges_from(', F.edges, ')')
            print('motifs.append(G)')

        exit()

        return motifs
コード例 #59
0
def generate_random_graph(N, seed, degree_seq, kmax=0):
    """generate scale-free network using configuration model with given gamma, kmin, kmax. 

    :N: TODO
    :seed: TODO
    :gamma: TODO
    :kmin: TODO
    :kmax: TODO
    :returns: TODO

    """
    i = 0
    while np.sum(degree_seq) % 2:
        i += 1
        degree_seq[-1] = np.random.RandomState(seed=seed[1] + N + i).choice(
            k, size=1, p=pk)

    degree_original = degree_seq.copy()

    G = nx.empty_graph(N)
    "generate scale free network using configuration model"
    no_add = 0
    degree_change = 1
    j = 0
    while np.sum(degree_seq) and no_add < 10:

        stublist = nx.generators.degree_seq._to_stublist(degree_seq)
        M = len(stublist) // 2  # the number of edges

        random_state = np.random.RandomState(seed[1] + j)
        random_state.shuffle(stublist)
        out_stublist, in_stublist = stublist[:M], stublist[M:]
        if degree_change == 0:
            no_add += 1
        else:
            no_add = 0
        G.add_edges_from(zip(out_stublist, in_stublist))

        G = nx.Graph(G)  # remove parallel edges
        G.remove_edges_from(list(nx.selfloop_edges(
            G)))  # remove self loops (networkx version is not the newest one)
        if nx.is_connected(G) == False:
            G = G.subgraph(max(nx.connected_components(G), key=len)).copy()
        degree_alive = np.array(
            [G.degree[i] if i in G.nodes() else 0 for i in range(N)])
        degree_former = np.sum(degree_seq)
        degree_seq = degree_original - degree_alive
        degree_now = np.sum(degree_seq)
        degree_change = degree_now - degree_former
        j += 1
        if kmax == 1 or kmax == N - 2:
            break
    A = nx.to_numpy_array(G)
    A_index = np.where(A > 0)
    A_interaction = A[A_index]
    index_i = A_index[0]
    index_j = A_index[1]
    degree = np.sum(A > 0, 1)
    cum_index = np.hstack((0, np.cumsum(degree)))

    return A, A_interaction, index_i, index_j, cum_index
コード例 #60
0
def main():
    avg_betweenness = {}
    graph_diameter = {}
    graph_radius = {}
    graph_avg_distance = {}
    avg_closeness = {}
    avg_eccentricity = {}
    avg_degree = {} 
    graph_density = {}
    avg_eigen = {}
    avg_graph_clustering={}
    assortivity_coefficient={}
    no_bridges = {}
    number_of_nodes = {}
    eigen_hubs = {}
    
    
    for i in range (1, len(sys.argv)-1):
        #iterating through files
        filename=sys.argv[i]
        #change to name of the directory these files reside in
        dataFile=filename
        #set "path" to the path to the aforementioned directory
        path="S:\course work\Adv topics in databases\gitrepo\CSE6331_project\Final_Submission\Data\Layers"    

        #last input is the name of the output file        
        outputfile = sys.argv[-1]
        
        Vertices,Edges,number_of_verts=sanityCheck(filename) 
        #generating a graph        
        G = nx.Graph()
        G.add_nodes_from(Vertices)
        for edge in Edges:
            G.add_edge(edge[0],edge[1],weight=edge[2])

       #calculating average betweeness centrality of the current layer 
        c_count= 0
        _sum = 0
        cm = CentralityMeasure(edge_input=path+"\\"+dataFile,centrality='Between')
        C_values,time = cm.computeCentrality()
        for key in C_values:
            c_count += 1
            _sum += C_values[key]
        average_betweeness = _sum/c_count

        #calculating average closeness centrality of the current layer
        _sum = 0                    
        c_count = 0
        cm = CentralityMeasure(edge_input=path+"\\"+dataFile,centrality='close')
        C_values,time = cm.computeCentrality()
        for key in C_values:
                c_count += 1
                _sum += C_values[key]
        average_closeness = _sum/c_count
        
        
       #calculating average degree centrality of the current layer
        _sum = 0                    
        c_count = 0
        cm = CentralityMeasure(edge_input=path+"\\"+dataFile,centrality='degree')
        C_values,time = cm.computeCentrality()
        for key in C_values:
                c_count += 1
                _sum += C_values[key]
        average_degree = _sum/c_count 
        
        #calculating average eigen centrality of the current layer
        _sum = 0                    
        c_count = 0
        cm = CentralityMeasure(edge_input=path+"\\"+dataFile,centrality='eigen')
        C_values,time = cm.computeCentrality()
        eigen_hubs_list,average = cm.get_hubs(C_values);
        for key in C_values:
                c_count += 1
                _sum += C_values[key]
        average_eigen = _sum/c_count
        
        
        f = open(path+"\\"+dataFile,"r")
        Lines = f.readlines() 
        layer_name = Lines[0].strip()

        #calculating diameter of connected graphs
        #if the graph is not connected then find the maximum of diameter among all the components
        diams = []
        avg_short_path = []
        if(nx.is_connected(G)):
            graph_diameter[layer_name] = nx.diameter(G)
            graph_avg_distance[layer_name] = nx.average_shortest_path_length(G)
        else:
            
            diams = []
            avg_short_path = []
            components = (nx.connected_components(G))
            
            for c in components:
                diams.append(get_subgraph_diameter(c,Edges))
                avg_short_path.append(get_subgraph_asp(c,Edges))
            graph_diameter[layer_name]=max(diams)
            graph_avg_distance[layer_name] = max(avg_short_path)
            
        
        
        avg_betweenness[layer_name] = average_betweeness              
        avg_closeness[layer_name] = average_closeness        
        avg_degree[layer_name] = average_degree
        graph_density[layer_name] = nx.density(G)
        avg_eigen[layer_name] = average_eigen
        avg_graph_clustering[layer_name] = nx.average_clustering(G)
        assortivity_coefficient[layer_name] = nx.degree_assortativity_coefficient(G)
        number_of_nodes[layer_name] = len(Vertices)
        eigen_hubs[layer_name] = len(eigen_hubs_list)

    #sorting the dictionaries containing centrality values             
    sorted_eigen = dict(sorted(avg_eigen.items(), key=operator.itemgetter(1), reverse=True)) 
    sorted_betweeness = dict(sorted(avg_betweenness.items(), key=operator.itemgetter(1), reverse=True))
    sorted_closeness = dict(sorted(avg_closeness.items(), key=operator.itemgetter(1), reverse=True))
    sorted_degree = dict(sorted(avg_degree.items(), key=operator.itemgetter(1), reverse=True)) 

    #writing calculated graph characteristics to file containing output for betweenness centrality    
    betweenness_list = []
    for graph, value in sorted_betweeness.items():
            key = graph

            b_list = []
            b_list.append(key)
            b_list.append(number_of_nodes[key])
            b_list.append(avg_betweenness[key]) 
            b_list.append(graph_diameter[key])
            b_list.append(avg_graph_clustering[key])
            betweenness_list.append(b_list)
            
    
    betweenness_df = pd.DataFrame(betweenness_list,columns = ["Layer","No. of nodes","avg_betweenness","graph diameter","clustering coefficient"])
    #set output_path to the path to the output directory
    output_path = "S:\\course work\\Adv topics in databases\\gitrepo\CSE6331_project\\Final_Submission\\Data\\Layers\\SingleLayer-TestData\\Outputs\\"
    print("Writing all outputs to "+output_path+outputfile)
     
    betweenness_df.to_csv(output_path+outputfile+"_between.csv")                    

    #writing calculated graph characteristics to file containing output for closeness centrality
    closeness_list = []    
    for graph, value in sorted_closeness.items():
            key = graph
           
            
            c_list = []
            c_list.append(key)
            c_list.append(number_of_nodes[key])
            c_list.append(avg_closeness[key])
            c_list.append(graph_avg_distance[key])
            c_list.append(graph_diameter[key])
            c_list.append(avg_graph_clustering[key])
            closeness_list.append(c_list)
    
    closeness_df = pd.DataFrame(closeness_list,columns = ["Layer","No. of nodes","avg_closeness","graph_avg_distance","graph diameter","clustering coefficient"])    
    closeness_df.to_csv(output_path+outputfile+"_close.csv")                    

   #writing calculated graph characteristics to file containing output for degree centrality
    degree_list=[]
    for graph, value in sorted_degree.items():
            key = graph
            
            d_list = []
            d_list.append(key)
            d_list.append(number_of_nodes[key])
            d_list.append(avg_degree[key])
            d_list.append(graph_density[key]) 
            d_list.append(assortivity_coefficient[key])
            d_list.append(graph_diameter[key])   
            d_list.append(avg_graph_clustering[key])
            degree_list.append(d_list)
        
    degree_df = pd.DataFrame(degree_list,columns = ["Layer","No. of nodes","avg_degree","density","assortivity_coefficient","graph diameter","clustering coefficient"])    
    degree_df.to_csv(output_path+outputfile+"_degree.csv")                    

    #writing calculated graph characteristics to file containing output for eigen centrality
    eigen_list = []
    for graph, value in sorted_eigen.items():
            key = graph
            
            e_list = []
            e_list.append(key)
            e_list.append(number_of_nodes[key])
            e_list.append(avg_eigen[key]) 
            e_list.append(assortivity_coefficient[key])
            e_list.append(eigen_hubs[key])
            e_list.append(avg_graph_clustering[key])
            eigen_list.append(e_list)
    eigen_df = pd.DataFrame(eigen_list,columns = ["Layer","No. of nodes","avg_eigen","assortivity_coefficient","number of hubs","clustering coefficient"])    
    eigen_df.to_csv(output_path+outputfile+"_eigen.csv")