示例#1
0
def create_graph(num_clusters,num_nodes,pin,pout,num_graphs,pnew):
    G1 = nx.planted_partition_graph(num_clusters, num_nodes, pin, pout, seed=42)
    A = [np.array( nx.adjacency_matrix(G1).todense()) ]

    for i in range(num_graphs):
        G = nx.planted_partition_graph(num_clusters, num_nodes, pnew, pnew)
        A.append( A[i] + np.array( nx.adjacency_matrix(G).todense()) )
    return A
示例#2
0
def balanced_stochastic_blockmodel(communities=2, groupsize=3, p_in=1.0, p_out=0.0, seed=None):
    """gives dense adjacency matrix representaiton of randomly generated SBM with balanced community size"""

    G = nx.planted_partition_graph(l=communities, k=groupsize, p_in=p_in, p_out =p_out, seed=seed)
    A = nx.adjacency_matrix(G).todense()
    
    return A
示例#3
0
def generate_Fan(params={
    'w_in': 1.5,
    'l': 4,
    'g': 32,
    'p_in': 0.125,
    'p_out': 0.125
},
                 seed=None):

    if seed is not None:
        params['seed'] = seed

    G = nx.planted_partition_graph(params['l'], params['g'], params['p_in'],
                                   params['p_out'], params['seed'])

    for i, j in G.edges:
        if G.nodes[i]['block'] == G.nodes[j]['block']:
            G[i][j]['weight'] = params['w_in']
        else:
            G[i][j]['weight'] = 2 - params['w_in']

    labels_gt = []
    for i in range(params['l']):
        labels_gt = np.append(labels_gt, i * np.ones(params['g']))

    for n in G.nodes:
        G.nodes[n]['block'] = labels_gt[n - 1]

    G = nx.convert_node_labels_to_integers(G, label_attribute='old_label')

    return G, None
示例#4
0
文件: gn.py 项目: liupenggl/hybrid
def main(argv):
    
    # graph_fn="./data/7.txt"
    # G = nx.Graph()  #let's create the graph first
    # buildG(G, graph_fn)
    k=5
    G=nx.planted_partition_graph(k,10,0.8,0.02)
    # G.clear()
    bg(G)
    from test import da
    da(G)

    print G.nodes()
    print G.number_of_nodes()
    g=G.copy()
    n = G.number_of_nodes()    #|V|
    A = nx.adj_matrix(G)    #adjacenct matrix

    m_ = 0.0    #the weighted version for number of edges
    for i in range(0,n):
        for j in range(0,n):
            m_ += A[i,j]
    m_ = m_/2.0
    print "m: %f" % m_

    #calculate the weighted degree for each node
    Orig_deg = {}
    Orig_deg = UpdateDeg(A, G.nodes())

    #run Newman alg
    res=runGirvanNewman(G, Orig_deg, m_)
    print res
    shs(g,res)
示例#5
0
def generatePlantedPartition(num, size, p_in, p_out, fmt="graph"):
    g = nx.planted_partition_graph(num, size, p_in, p_out)
    if fmt == "matrix":
        return nx.to_numpy_matrix(g)
    if fmt == "sparse_matrix":
        return nx.to_scipy_sparse_matrix(g)
    if fmt == "graph":
        return g
    def _buildPpiMtx(self) -> Type[sp.csr_matrix]:
        plantedGraph = nx.planted_partition_graph(self.numProteins // 10,
                                                  10,
                                                  0.2,
                                                  0.05,
                                                  seed=42)

        return RelationCsrMatrix(nx.adjacency_matrix(plantedGraph))
def generate_pp_graphs(n, v, p_1, multiplier):
    p_2 = multiplier * p_1
    q_2 = 2. * p_1 - p_2 - 2. * (p_1 - p_2) / n
    graphs = []
    for i in xrange(args.n_graphs):
        G = nx.planted_partition_graph(2, n / 2, p_2, q_2)
        graphs.append(G)

    return graphs
def init_graph(num_clust, nodes_clust, p_intra, p_inter):
        G = nx.planted_partition_graph( num_clust, nodes_clust, p_intra, p_inter, seed=42)
        #G = nx.grid_graph(dim=[int(math.sqrt(nodes_clust)), int(math.sqrt(nodes_clust))])
        A = np.array( nx.adjacency_matrix(G).todense() ) 

        ### Embed the initial graph in a matrix that is much larger
        N = A.shape[0]
        Amat = np.zeros([6*N,6*N]); Amat[0:N,0:N] = A
        clust_memb = np.zeros([6*N])
        for j in range(num_clust):
            clust_memb[j*nodes_clust:(j+1)*nodes_clust] = j+1

        return Amat, clust_memb
示例#9
0
def get_graph_by_type(graph_type: str, num_nodes: int, **kwargs) -> nx.Graph:
    if graph_type == "E-R":
        return nx.erdos_renyi_graph(n=num_nodes, **kwargs)
    elif graph_type == "Random Tree":
        return nx.random_tree(n=num_nodes, **kwargs)
    elif graph_type == "r-ary":
        return nx.full_rary_tree(n=num_nodes, **kwargs)
    elif graph_type == "Planted Partition":
        return nx.planted_partition_graph(**kwargs)
    elif graph_type == "Line":
        return nx.path_graph(n=num_nodes, **kwargs)
    elif graph_type == "Barabási–Albert":
        return nx.barabasi_albert_graph(n=num_nodes, **kwargs)
示例#10
0
def test(graph="barbell", algorithm="o_fl", k=-1, v=False, kmin=3, kmax=5):
    """Runs a quic demo
       graph = 'karate', 'barbell', 'women', 'florentine'
         if not a specific graph it will be used as a seed for a random graph
       
       algorithm as
         'fl' = async fluid detection, requires k
         'o_fl' = optimizing fl, requires kmin and kmax
         'gn' = garvin_newman
       
       k = number of communities to look for if applicable
       v = verbose flag
    """
    #generate demo graph
    G = 0
    if graph == "karate":
        G = nx.karate_club_graph()
    elif graph == "barbell":
        G = nx.barbell_graph(5, 1)
    elif graph == "women":
        G = nx.davis_southern_women_graph()
    elif graph == "florentine":
        G = nx.florentine_families_graph()
    else:
        G = nx.planted_partition_graph(5, 10, .85, .1, seed=graph)
    #switch on algorithm
    bestcom = 0

    if algorithm == "fl":
        if k != -1:
            bestcom = async_fluid(G, k)
        else:
            bestcom = async_fluid(G)
    elif algorithm == "o_fl":  #optimized fl
        bestcom = opt_async_fluid(G, kmin, kmax)
    elif algorithm == "gn":
        bestcom = girvan_newman(G, v)

    #Label the data and export in gephi readable format
    comlabel = 1
    for c in bestcom:
        for n in c:
            G.node[n]['community'] = str(comlabel)
        comlabel += 1

    rw.write_file(G, "test.gexf")
def grab_data(i, null=True):

    G1 = nx.erdos_renyi_graph(n, p)
    if null:
        G2 = nx.erdos_renyi_graph(n, p)
    else:
        G2 = nx.planted_partition_graph(2, n // 2, pp, qq)
    A1, A2 = [nx.adjacency_matrix(G).todense() for G in [G1, G2]]

    adj_distances = pd.Series(
        [distance(dfun, A1, A2) for dfun in distances],
        index=labels,
        name="Adjacency Distances",
    )

    data = pd.concat([adj_distances], axis=1)

    return data
示例#12
0
def generate_GN(params={'l': 4, 'g': 32, 'p_in': 0.4, 'p_out': 0.2}, seed=0):

    if seed is not None:
        params['seed'] = seed

    G = nx.planted_partition_graph(params['l'],
                                   params['g'],
                                   params['p_in'],
                                   params['p_out'],
                                   seed=params['seed'])

    labels_gt = []
    for i in range(params['l']):
        labels_gt = np.append(labels_gt, i * np.ones(params['g']))

    for n in G.nodes:
        G.nodes[n]['block'] = labels_gt[n - 1]

    return G, None
示例#13
0
def generate_GN(params={"l": 4, "g": 32, "p_in": 0.4, "p_out": 0.2}, seed=0):

    if seed is not None:
        params["seed"] = seed

    G = nx.planted_partition_graph(params["l"],
                                   params["g"],
                                   params["p_in"],
                                   params["p_out"],
                                   seed=params["seed"])

    labels_gt = []
    for i in range(params["l"]):
        labels_gt = np.append(labels_gt, i * np.ones(params["g"]))

    for n in G.nodes:
        G.nodes[n]["block"] = labels_gt[n - 1]

    return G, None
def plot_top_level():
    num_groups = 15
    per_group = 6
    G = nx.planted_partition_graph(l=num_groups,
                                   k=per_group,
                                   p_in=0.99,
                                   p_out=0.01)
    tree, cost = find_approx(G)
    print(f"Min Cost Us: {cost}")
    print(f"Min Tree:")
    tree.show()

    groups = []

    for child in tree.children(tree.root):
        group = []
        for leaf in tree.leaves():
            if tree.is_ancestor(child.identifier, leaf.identifier):
                group.append(leaf.identifier)
        groups.append(group)

    label_dict = {n: i for i, group in enumerate(groups) for n in group}

    nx.set_node_attributes(G, label_dict, "group")
    plt.figure(figsize=(14, 8))

    # coloring
    groups = set(nx.get_node_attributes(G, "group").values())
    mapping = dict(zip(sorted(groups), count()))
    nodes = G.nodes()
    node_colors = [mapping[G.nodes()[n]["group"]] for n in nodes]
    node_sizes = [14 for n in nodes]
    cmap = plt.cm.get_cmap('rainbow')

    nx.draw(G,
            node_color=node_colors,
            width=0.1,
            cmap=cmap,
            labels={i: i
                    for i, _ in enumerate(G.nodes())})

    print(groups)
    plt.show()
示例#15
0
def _createSBM(N, Nc, k, kin, output='nx', seed=None):

    l = N // Nc
    kout = k - kin
    pin = kin / (Nc - 1)
    pout = kout / (N - Nc)

    G = nx.planted_partition_graph(l, Nc, pin, pout, seed=seed, directed=False)

    if output == 'edgelist':
        return create_edgelist(G)
    if output == 'ig':
        edgelist = create_edgelist(G)
        H = ig.Graph()
        H.add_vertices(N)
        H.add_edges(edgelist)
        return H

    return G
def main():
    N_nodes = 200
    p_out = .1
    monte_carlo = 100
    results = []

    for N_clusters in clusters_generator():
        results.append([])
	print N_clusters
        for p_in in np.linspace(0,1,20):
            AVG = []
            for mc in range( monte_carlo ):

                #Random Initial state
                theta0  = np.random.normal( loc = 0, scale = .01, size = (1,N_nodes) )[0] #Random small phase angles
                dtheta0 = np.random.normal( loc = 0, scale = .01, size = (1,N_nodes) )[0] #Random small frequencies
                Y0 = np.append( theta0, dtheta0 )
                Y0 = np.append( Y0, 1 )

                #Random Initial state
                thetaf  = np.random.normal( loc = 0, scale = .01, size = (1,N_nodes) )[0] #Random small phase angles
                dthetaf = np.random.normal( loc = 0, scale = .01, size = (1,N_nodes) )[0] #Random small frequencies
                Yf = np.append( thetaf, dthetaf )
                Yf = np.append( Yf, 1 )

                G = nx.planted_partition_graph(N_clusters, N_nodes/N_clusters, p_in, p_out)
                adja = np.array( nx.to_numpy_matrix( G ) )
                L = control2.get_Laplacian( adja )
                Psource       = generate_random_power_distribution( N_nodes )
                Pmax          = generate_random_capacity_distribution( N_nodes, adja )
                K             =  Pmax  / float( I * Omega )
                A             = control2.build_transition_matrix( Psource, L, Omega, I, K, alpha, Dt )
                A_powers = [ np.identity( A.shape[0] ), A ]
                for k in range( control_time + 1 )[2:]:
                    A_powers.append( np.dot( A, A_powers[-1] ) )
                storage_level = 100 * np.ones((1,N_nodes))[0]

                drivers = control2.rank_submodular_greedy_lazy( A_powers, adja, range( N_nodes ), control_time, storage_level, max_capacity, r, I, Omega, Y0, Yf, True )
                AVG.append( len(drivers) )
            results[-1].append( AVG  )
			
    with open('./results3.json', 'w') as f:
        json.dump(results,f)
示例#17
0
    def plotgraph_tikz(self, G=None, Gbus=None, G_cols=None):
        """plotgraph with tikz

        Use tikz to plot the graph. This is work in progress, the idea
        is to use tikz to draw a fancy smp_graph with tikz merging the
        matplotlib plots with the graph itself.

        .. warning:: Work in progress

        TODO:
         - def convert_nxgraph_to_igraph
         - subclass TikzGraphDrawer with custom moves
        """
        from tikz_network import plot
        from tikz_network import TikzGraphDrawer

        import networkx as nx
        import igraph as ig
        # G = nx.fast_gnp_random_graph(11, 0.28)
        # G = nx.fast_gnp_random_graph(11, 0.28)
        g_ = nx.planted_partition_graph(5, 5, 0.9, 0.1, seed=3)
        g = G
        logger.debug("nx.to_edgelist(g) = %s", nx.to_edgelist(g))
        logger.debug("zip(*nx.to_edgelist(g)) = %s", zip(*nx.to_edgelist(g)))
        logger.debug("zip(*zip(*nx.to_edgelist(g))[:2])= %s",
                     zip(*zip(*nx.to_edgelist(g))[:2]))

        logger.debug("nx.to_numpy_matrix(g) = %s", nx.to_numpy_matrix(g))
        logger.debug("nx.to_numpy_matrix(g) > 0 = %s",
                     (nx.to_numpy_matrix(g) > 0))

        # g = g_
        # g1 = ig.Graph(len(g), zip(*zip(*nx.to_edgelist(g))[:2]))
        # g1.get_adjacency()
        # plot(g1, 'nxgraph_tikz.tex')

        # convert via adjacency matrix
        g2 = ig.Graph.Adjacency((nx.to_numpy_matrix(g) > 0).tolist())
        g2.get_adjacency()
        plot(g2, 'nxgraph_tikz.tex')
示例#18
0
def test_planted_partition_graph():
    G = nx.planted_partition_graph(4, 3, 1, 0)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(list(G.edges())), 12)

    G = nx.planted_partition_graph(4, 3, 0, 1)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(list(G.edges())), 54)

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42)
    C = G.graph['partition']
    assert_equal(len(C), 10)
    assert_equal(len(G), 40)
    # number of edges is random, so can't be tested for exact value?
    # assert_equal(len(list(G.edges())),108)

    G = nx.planted_partition_graph(4, 3, 1, 0, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(list(G.edges())), 24)

    G = nx.planted_partition_graph(4, 3, 0, 1, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(list(G.edges())), 108)

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 10)
    assert_equal(len(G), 40)
    assert_equal(len(list(G.edges())), 218)

    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 1.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, -0.1,
                  0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, 1.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1,
                  -0.1)
示例#19
0
def community_graphs():
    print("Community graphs for social networks")
    print("Caveman graph")
    G = nx.caveman_graph(2, 13)
    draw_graph(G)
    print(" Connected Caveman graph")
    G = nx.connected_caveman_graph(2, 3)
    draw_graph(G)
    print("Relaxed caveman")
    G = nx.relaxed_caveman_graph(2, 5, 0.2)
    draw_graph(G)
    print("Random partition graph")
    G = nx.random_partition_graph([10, 10, 10], .25, .01)
    draw_graph(G)
    print(len(G))
    partition = G.graph['partition']
    print(len(partition))
    print("Planted partition graph")
    G = nx.planted_partition_graph(4, 3, 0.5, 0.1, seed=42)
    draw_graph(G)
    print("Gaussian random partition graph")
    G = nx.gaussian_random_partition_graph(40, 10, 10, .25, .1)
    print(len(G))
    draw_graph(G)
示例#20
0
def test_planted_partition_graph():
    G = nx.planted_partition_graph(4, 3, 1, 0)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(G.edges()), 12)

    G = nx.planted_partition_graph(4, 3, 0, 1)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(G.edges()), 54)

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42)
    C = G.graph['partition']
    assert_equal(len(C), 10)
    assert_equal(len(G), 40)
    assert_equal(len(G.edges()), 108)

    G = nx.planted_partition_graph(4, 3, 1, 0, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(G.edges()), 24)

    G = nx.planted_partition_graph(4, 3, 0, 1, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 4)
    assert_equal(len(G), 12)
    assert_equal(len(G.edges()), 108)

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42, directed=True)
    C = G.graph['partition']
    assert_equal(len(C), 10)
    assert_equal(len(G), 40)
    assert_equal(len(G.edges()), 197)

    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 1.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, -0.1,
                  0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, 1.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1,
                  -0.1)
示例#21
0
def test_planted_partition_graph():
    G = nx.planted_partition_graph(4,3,1,0)
    C = G.graph['partition']
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(list(G.edges())),12)

    G = nx.planted_partition_graph(4,3,0,1)
    C = G.graph['partition']
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(list(G.edges())),54)

    G = nx.planted_partition_graph(10,4,.5,.1,seed=42)
    C = G.graph['partition']
    assert_equal(len(C),10)
    assert_equal(len(G),40)
    # number of edges is random, so can't be tested for exact value?
    # assert_equal(len(list(G.edges())),108)

    G = nx.planted_partition_graph(4,3,1,0,directed=True)
    C = G.graph['partition']
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(list(G.edges())),24)

    G = nx.planted_partition_graph(4,3,0,1,directed=True)
    C = G.graph['partition']
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(list(G.edges())),108)

    G = nx.planted_partition_graph(10,4,.5,.1,seed=42,directed=True)
    C = G.graph['partition']
    assert_equal(len(C),10)
    assert_equal(len(G),40)
    assert_equal(len(list(G.edges())),218)

    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 1.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3,-0.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, 1.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1,-0.1)
示例#22
0
def test_planted_partition_graph():
    G = nx.planted_partition_graph(4,3,1,0)
    C = G.graph['partition'] 
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(G.edges()),12)

    G = nx.planted_partition_graph(4,3,0,1)
    C = G.graph['partition'] 
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(G.edges()),54)

    G = nx.planted_partition_graph(10,4,.5,.1,seed=42)
    C = G.graph['partition']
    assert_equal(len(C),10)
    assert_equal(len(G),40)
    assert_equal(len(G.edges()),108)

    G = nx.planted_partition_graph(4,3,1,0,directed=True)
    C = G.graph['partition'] 
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(G.edges()),24)

    G = nx.planted_partition_graph(4,3,0,1,directed=True)
    C = G.graph['partition'] 
    assert_equal(len(C),4)
    assert_equal(len(G),12)
    assert_equal(len(G.edges()),108)

    G = nx.planted_partition_graph(10,4,.5,.1,seed=42,directed=True)
    C = G.graph['partition'] 
    assert_equal(len(C),10)
    assert_equal(len(G),40)
    assert_equal(len(G.edges()),218)

    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 1.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3,-0.1, 0.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1, 1.1)
    assert_raises(nx.NetworkXError, nx.planted_partition_graph, 3, 3, 0.1,-0.1)
示例#23
0
def test_planted_partition_graph():
    G = nx.planted_partition_graph(4, 3, 1, 0, seed=42)
    C = G.graph['partition']
    assert len(C) == 4
    assert len(G) == 12
    assert len(list(G.edges())) == 12

    G = nx.planted_partition_graph(4, 3, 0, 1)
    C = G.graph['partition']
    assert len(C) == 4
    assert len(G) == 12
    assert len(list(G.edges())) == 54

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42)
    C = G.graph['partition']
    assert len(C) == 10
    assert len(G) == 40

    G = nx.planted_partition_graph(4, 3, 1, 0, directed=True)
    C = G.graph['partition']
    assert len(C) == 4
    assert len(G) == 12
    assert len(list(G.edges())) == 24

    G = nx.planted_partition_graph(4, 3, 0, 1, directed=True)
    C = G.graph['partition']
    assert len(C) == 4
    assert len(G) == 12
    assert len(list(G.edges())) == 108

    G = nx.planted_partition_graph(10, 4, .5, .1, seed=42, directed=True)
    C = G.graph['partition']
    assert len(C) == 10
    assert len(G) == 40

    ppg = nx.planted_partition_graph
    pytest.raises(nx.NetworkXError, ppg, 3, 3, 1.1, 0.1)
    pytest.raises(nx.NetworkXError, ppg, 3, 3, -0.1, 0.1)
    pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, 1.1)
    pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, -0.1)
示例#24
0
def main():
    C = 8
    E = 0
    N = 200
    L = 2
    PMAX = 0.3
    PMIN = 0.1
    PSTEP = .001
    ITERATIONSONP = 30  # number of iterations for each p value
    ITERATIONSPERGRAPH = 30  #iterations on each graph
    Emin = 12
    Emax = 17

    size = ((PMAX - PMIN) / PSTEP) + 1
    epidemicSize = np.zeros((size))
    epidemicLength = np.zeros((size))
    pValues = np.zeros((size))

    E = Emin
    while (E < Emax):
        c = float(C)
        k = int(N / L)  #k=vertices per group
        c_in = 2 * C + E
        c_out = 2 * C - E
        p_in = (.5 * c_in) / N
        p_out = (.5 * c_out) / N
        p = PMIN
        counter = 0
        while (p < PMAX):  #for all p = [0,1]
            print p
            start = time.time()
            pValues[
                counter] = p  #use this p on multiple generated graphs (multiple times)
            sizeArray = np.zeros(
                (ITERATIONSONP
                 ))  #store size results for runs on multiple graphs
            lengthArray = np.zeros(
                (ITERATIONSONP
                 ))  #store length results for runs on multiple graphs
            for index in range(0, ITERATIONSONP):  #iterate on a p value
                graphInfectionSize = np.zeros(
                    (ITERATIONSPERGRAPH)
                )  #store size results for multiple infections on one graph
                graphInfectionLength = np.zeros(
                    (ITERATIONSPERGRAPH)
                )  #store length results for multiple infections on one graph
                graph = nx.planted_partition_graph(
                    L, k, p_in,
                    p_out)  #run simulation on this graph multiple times
                for node in graph:
                    graph.node[node][
                        'C'] = 1  #no contageous nodes at start, all susceptible
                for index2 in range(0,
                                    ITERATIONSPERGRAPH):  #iterate on a graph
                    tempGraph = graph.copy(
                    )  #make a copy, or iterating on the same graph does nothing after first time...
                    graphInfectionSize[index2], graphInfectionLength[
                        index2] = infectThePopulation(tempGraph,
                                                      p)  #run scenario on copy
                sizeArray[index] = np.sum(
                    graphInfectionSize
                ) / ITERATIONSPERGRAPH  #average size of infection from the given graph
                lengthArray[index] = (
                    np.sum(graphInfectionLength)
                ) / ITERATIONSPERGRAPH  #average length from given graph
            epidemicLength[counter] = (
                np.sum(lengthArray)
            ) / ITERATIONSONP  #average infection length from multiple graphs for given value of p
            epidemicSize[counter] = np.sum(
                sizeArray
            ) / ITERATIONSONP  #average infection size from multiple graphs for given value of p
            p += PSTEP
            counter += 1
            print time.time() - start
        np.savetxt('./eggHunt/{}length.txt'.format(E),
                   epidemicLength,
                   fmt='%d')
        np.savetxt('./eggHunt/{}size.txt'.format(E), epidemicSize, fmt='%d')
        plotResults(epidemicSize, epidemicLength, pValues, N, E, C)
        E += .1
示例#25
0
import matplotlib.pyplot as plt
import networkx as nx

G0 = nx.planted_partition_graph(3, 20, 0.9, 0.01)
G1 = nx.path_graph(3)

# Compute degree assortativity of graph.
r0 = nx.degree_assortativity_coefficient(G0)
r1 = nx.degree_assortativity_coefficient(G1)

print nx.degree_histogram(G0)

print nx.average_clustering(G0)
示例#26
0
文件: fastq1.py 项目: dguelde/python
def main():
	#important variables
	ITERATIONSONP=250 # number of iterations for each p value
	ITERATIONSPERGRAPH=250 #iterations on each graph
	Emin=15.0 #range of epsilon values to consider
	Emax=16.0
	ESTEP=.1
	PMIN=0
	PMAX=1#range of p values to consider
	PSTEP=.1
	C=8
	N=200
	L=2
	size=((PMAX-PMIN)/PSTEP)+1
	epidemicSize=np.zeros((size)) #hold results from outer loop
	epidemicLength=np.zeros((size))
	pValues=np.zeros((size))
	E=Emin
	while (E < Emax): #iterate on a range of epsilon values
		c=float(C)
		k=int(N/L) #k=vertices per group
		c_in=2*C+E
		c_out=2*C-E
		p_in=(.5*c_in)/N
		p_out=(.5*c_out)/N
		p=PMIN
		counter=0
		while (p<PMAX): #next inner loop, over p values
			start = time.time()
			pValues[counter]=p #use this p on multiple generated graphs (multiple times)
			sizeArray=np.zeros((ITERATIONSONP)) #store size results for runs on multiple graphs
			lengthArray=np.zeros((ITERATIONSONP)) #store length results for runs on multiple graphs
			for index in range(0,ITERATIONSONP): 
				#store size results for multiple infections on one graph
				graphInfectionSize=np.zeros((ITERATIONSPERGRAPH)) 
				#store length results for multiple infections on one graph
				graphInfectionLength=np.zeros((ITERATIONSPERGRAPH)) 
				g = nx.planted_partition_graph(L,k,p_in,p_out) #generate planted partition graph
				AssociationMatrix = nx.to_numpy_matrix(g)
				neighbors=[]
				counter2=0
				for item in g:
					neighbors.append(nx.neighbors(g,counter2))
					counter2+=1
				neighborMatrix = np.asarray(neighbors)
				for index2 in range(0,ITERATIONSPERGRAPH): #iterate on a graph
					graphInfectionSize[index2],graphInfectionLength[index2]=
						infectThePopulation(neighborMatrix,p,N) #run scenario on graph
				sizeArray[index]=np.sum(graphInfectionSize)/ITERATIONSPERGRAPH #average size of infection 
																				#from the given graph
				#average length from given graph
				lengthArray[index]=(np.sum(graphInfectionLength))/ITERATIONSPERGRAPH 
			#average infection length from multiple graphs for given value of p
			epidemicLength[counter]=(np.sum(lengthArray))/ITERATIONSONP 
			#average infection size from multiple graphs for given value of p
			epidemicSize[counter]=np.sum(sizeArray)/ITERATIONSONP	
			p+=PSTEP #end of p-loop
			counter+=1
			print "E",E,"p",p,time.time()-start
		np.savetxt('./q1c/E{}length.txt'.format(E),epidemicLength)
		np.savetxt('./q1c/E{}size.txt'.format(E),epidemicSize)
		plotResults(epidemicSize,epidemicLength,pValues,N,E,C)
		E+=ESTEP #end of e-loop
示例#27
0
###########################################################
#
# Load and preprocess data (this is a dummy toy example!)
#
###########################################################

####
# These are random and very small networks - expect poor performance
# as these random networks do not have any interesting structure
# The purpose of main.py is to show how to use the code!
####

n_genes = 500
n_drugs = 400
n_drugdrug_rel_types = 10
gene_net = nx.planted_partition_graph(50, 10, 0.2, 0.05, seed=42)

gene_adj = nx.adjacency_matrix(gene_net)
gene_degrees = np.array(gene_adj.sum(axis=0)).squeeze()

gene_drug_adj = sp.csr_matrix((np.random.rand(n_genes, n_drugs) > 0.7).astype(int))
drug_gene_adj = gene_drug_adj.transpose(copy=True)

drug_drug_adj_list = [
    nx.adjacency_matrix(nx.planted_partition_graph(20, 20, 0.2, 0.05))
    for _ in range(n_drugdrug_rel_types)]
drug_degrees_list = [np.array(drug_adj.sum(axis=0)).squeeze() for drug_adj in drug_drug_adj_list]


# data representation
adj_mats_orig = {
示例#28
0
def generateWithNX():
    G = nx.planted_partition_graph(5, 50, 0.5, 0.001)
    return G
def run(seed, l_, g_, k_in, k_out):
    graph = nx.planted_partition_graph(l_,
                                       g_,
                                       k_in / g_,
                                       k_out / g_,
                                       seed=seed)
    largest_cc = max(nx.connected_components(graph), key=len)
    graph = graph.subgraph(largest_cc)
    graph = nx.convert_node_labels_to_integers(graph)

    laplacian = cv._construct_laplacian(graph, use_spectral_gap=False)
    w, v = sp.linalg.eigs(laplacian, k=100, which='SM')
    #w, v = np.linalg.eig(laplacian.toarray()) #, k=50, which='SM')

    C_1 = [node for node in graph.nodes if graph.nodes[node]["block"] == 0]
    C_2 = [node for node in graph.nodes if graph.nodes[node]["block"] == 1]
    edges = np.array(graph.edges)
    _e = edges[np.isin(edges[:, 0], C_1)]
    out_edges = _e[np.isin(_e[:, 1], C_2)]

    gc = np.array([graph.nodes[node]["block"] for node in graph.nodes])
    gc[gc == 0] = -1
    diffs = []
    corr_int = []
    corr = []
    for s in range(len(w)):
        diffs.append(
            abs(np.sum([v[edge[0], s] - v[edge[1], s] for edge in out_edges])))
        v_c_int = np.array(v[:, s])
        v_c_int[v_c_int < 0] = -1
        v_c_int[v_c_int > 0] = 1
        corr_int.append(abs(np.dot(gc, v_c_int)) / len(gc))
        corr.append(abs(np.dot(gc, v[:, s]) / len(gc)))

    #w = w[1:]
    #v = v[:, 1:]
    #corr = corr[1:]
    #corr_int = corr_int[1:]
    #diffs = diffs[1:]

    v_c = v[:, np.argmax(diffs)]
    v_c_best = v[:, np.argmax(corr_int)]
    best_diff, best_corr = np.argmax(diffs), np.argmax(corr_int)

    v_c_int = np.array(v_c)
    v_c_int[v_c < 0] = -1
    v_c_int[v_c > 0] = 1
    v_c_int /= g_
    v_c_best_int = np.array(v_c_best)
    v_c_best_int[v_c_best < 0] = -1
    v_c_best_int[v_c_best > 0] = 1
    plot = True
    if plot:
        plt.figure(figsize=(4, 3))
        norm_corr = np.array(corr_int) / np.max(corr_int)
        plt.scatter(w,
                    np.array(diffs) / max(diffs),
                    s=10 + 20 * norm_corr,
                    c=cm.turbo(norm_corr))  #'k')#, marker=".")
        plt.savefig('diff_vs_eig.pdf', bbox_inches='tight')

        plt.figure(figsize=(4, 3))
        plt.plot(w, corr_int, ".", c='k')
        plt.xlabel(r'$|\sum_{ij} (\phi_s(i) - \phi_s(j))\,\delta(C_i,C_j)|$')
        plt.ylabel(r'$corr$')

        plt.figure(figsize=(4, 3))
        plt.scatter(diffs, corr_int, c=cm.turbo(norm_corr))
        # plt.scatter(diffs[best_diff], corr_int[best_diff], label="diff")
        plt.legend()
        plt.savefig('example_plot_1.pdf', bbox_inches='tight')

        plt.figure(figsize=(4, 3))
        plt.plot(-v_c, ".", label="diff", c='k', ms=0.5)
        plt.axis([0, 10000, -.02, .02])
        #plt.plot(v_c_best, ".", label="corr")
        plt.xlabel('node id')
        plt.ylabel(r'$\phi_\mathrm{best}(i)$')
        plt.legend()
        # ax2 = plt.twinx()
        # ax2.set_xlim(0, 10000)
        #ax2.plot(v_c_int, ".", label="diff_int")
        #ax2.plot(0.8 * v_c_best_int, ".", label="corr_int")
        plt.plot(gc / np.sqrt(2 * g_), c="r", lw=0.5)
        plt.savefig('best_eig.pdf', bbox_inches='tight')

        plt.figure(figsize=(4, 3))
        plt.plot(v[:, 1], ".", label="diff", c='k', ms=0.5)
        plt.axis([0, 10000, -.02, .02])
        #plt.plot(v_c_best, ".", label="corr")
        plt.xlabel('node id')
        plt.ylabel(r'$\phi_2(i)$')
        plt.legend()
        # ax2 = plt.twinx()
        # ax2.set_xlim(0, 10000)
        #ax2.plot(v_c_int, ".", label="diff_int")
        #ax2.plot(0.8 * v_c_best_int, ".", label="corr_int")
        plt.plot(gc / np.sqrt(2 * g_), c="r", lw=0.5)
        plt.savefig('second_eig.pdf')
    return corr_int[best_diff], corr_int[best_corr], corr_int[0]
示例#30
0
def get_dd_planted_partition(n, q, c, epsilon):
    # generate graphs
    p_in = (1 / n) * (c + epsilon[0] / 2)
    p_out = (1 / n) * (c - epsilon[0] / 2)
    nx.planted_partition_graph(int(q), int(n / q), p_in, p_out, seed=42)
示例#31
0
        if str(sys.argv[3]) == 'BA':
            G = nx.barabasi_albert_graph(N, m)

        #Create random graph with a power-law degree distribution
        # G = nx.expected_degree_graph(create_power_law_degree_sequence(
        #     N,kmin,kmax,exponent), selfloops=False)

        #Create the Watts-Strogatz random graph
        if str(sys.argv[3]) == 'WS':
            G = nx.watts_strogatz_graph(N, k_neigh, p_WS, seed=None)
        #print(len(G.edges()))
        #Create the planted partion random graph // Number of groups,  Number of vertices in each group, prob. of connecting vertices within a group, prob. of connected vertices between groups
        if str(sys.argv[3]) == 'PP':
            G = nx.planted_partition_graph(N_modules,
                                           N_in_G,
                                           p_in,
                                           p_out,
                                           seed=95)

        if str(sys.argv[3]) == 'STAR':
            G = nx.star_graph(N)

        if str(sys.argv[3]) == 'EXTREME':
            N = 1000
            degree_seq = [3 for i in range(N)]
            #put 5 nodes with very large degree
            for i in range(5):
                degree_seq[i] = N - 1
            #Create random graph with a power-law degree distribution
            G = nx.expected_degree_graph(degree_seq, selfloops=False)
示例#32
0
def generate_network(topology):
    # Define parameter options dictionaries
    topology_models = pd.DataFrame()
    topology_models['Description'] = pd.Series({
        'ER_n_p':
        'Erdős–Rényi model with a fixed number of nodes and link probability',
        'ER_n_m':
        'Erdős–Rényi model with a fixed number of nodes and links',
        'ER_n_in':
        'Erdős–Rényi model with a fixed number of nodes and expected in-degree',
        'WS':
        'Watts–Strogatz model',
        'BA':
        'Barabási–Albert model',
        'planted_partition':
        'Planted partition model',
        'ring':
        'Ring model',
        'lattice':
        'Regular lattice model',
        'star':
        'Star model',
        'complete':
        'Complete graph model',
        'explicit':
        'Explicit topology provided by the user'
    })
    topology_models['Required parameters'] = pd.Series({
        'ER_n_p': ['nodes_n', 'ER_p'],
        'ER_n_m': ['nodes_n', 'ER_m'],
        'ER_n_in': ['nodes_n', 'in_degree_expected'],
        'WS': ['nodes_n', 'WS_k', 'WS_p'],
        'BA': ['nodes_n', 'BA_m'],
        'planted_partition': ['nodes_n', 'partitions_n', 'p_in', 'p_out'],
    })
    try:
        # Ensure that a topology model has been specified
        if 'model' not in topology:
            raise ParameterMissing('model')
        # Ensure that the provided model is implemented
        if topology.model not in topology_models.index:
            raise ParameterValue(topology.model)
        # Ensure that all the parameters required by the model
        # have been provided
        par_required = topology_models['Required parameters'][topology.model]
        for par in par_required:
            if par not in topology:
                raise ParameterMissing(par)
    except ParameterMissing as e:
        print(e.msg, e.par_names)
        raise
    except ParameterValue as e:
        print(e.msg, e.par_value)
        raise
    else:
        model = topology.model
        if model == 'ER_n_p':
            # Read required parameters
            nodes_n = topology.nodes_n
            ER_p = topology.ER_p
            # Generate network
            return nx.gnp_random_graph(nodes_n, ER_p, seed=None, directed=True)
        elif model == 'ER_n_m':
            # Read required parameters
            nodes_n = topology.nodes_n
            ER_m = topology.ER_m
            # Generate network
            return nx.gnm_random_graph(nodes_n, ER_m, seed=None, directed=True)
        elif model == 'ER_n_in':
            # Read required parameters
            nodes_n = topology.nodes_n
            in_degree_expected = topology.in_degree_expected
            # Generate network
            ER_p = in_degree_expected / (nodes_n - 1)
            return nx.gnp_random_graph(nodes_n, ER_p, seed=None, directed=True)
        elif model == 'WS':
            # Read required parameters
            nodes_n = topology.nodes_n
            WS_k = topology.WS_k
            WS_p = topology.WS_p
            # Generate network
            #return nx.connected_watts_strogatz_graph(
            return connected_watts_strogatz_graph(nodes_n,
                                                  WS_k,
                                                  WS_p,
                                                  tries=200)
        elif model == 'BA':
            # Read required parameters
            nodes_n = topology.nodes_n
            BA_m = topology.BA_m
            # Generate network
            #return directed_barabasi_albert_graph(
            return nx.barabasi_albert_graph(nodes_n, BA_m)
        elif model == 'planted_partition':
            # Read required parameters
            nodes_n = topology.nodes_n
            partitions_n = topology.partitions_n
            p_in = topology.p_in
            p_out = topology.p_out
            # Generate network
            return nx.planted_partition_graph(partitions_n,
                                              int(nodes_n / partitions_n),
                                              p_in,
                                              p_out,
                                              directed=True)
        else:
            raise ParameterValue(model,
                                 msg='Topology model not yet implemented')
示例#33
0
def run(seed, l_, g_, k_in, k_out):
    graph = nx.planted_partition_graph(l_,
                                       g_,
                                       k_in / g_,
                                       k_out / g_,
                                       seed=seed)
    largest_cc = max(nx.connected_components(graph), key=len)
    graph = graph.subgraph(largest_cc)
    graph = nx.convert_node_labels_to_integers(graph)

    laplacian = cv._construct_laplacian(graph, use_spectral_gap=False)
    w, v = sp.linalg.eigs(laplacian, k=50, which='SM')

    C_1 = [node for node in graph.nodes if graph.nodes[node]["block"] == 0]
    C_2 = [node for node in graph.nodes if graph.nodes[node]["block"] == 1]
    edges = np.array(graph.edges)
    _e = edges[np.isin(edges[:, 0], C_1)]
    out_edges = _e[np.isin(_e[:, 1], C_2)]

    gc = np.array([graph.nodes[node]["block"] for node in graph.nodes])
    gc[gc == 0] = -1
    diffs = []
    corr_int = []
    corr = []
    for s in range(len(w)):
        diffs.append(
            abs(np.mean([v[edge[0], s] - v[edge[1], s]
                         for edge in out_edges])))
        v_c_int = np.array(v[:, s])
        v_c_int[v_c_int < 0] = -1
        v_c_int[v_c_int > 0] = 1
        corr_int.append(abs(np.dot(gc, v_c_int)) / len(gc))
        corr.append(abs(np.dot(gc, v[:, s]) / len(gc)))

    w = w[1:]
    v = v[:, 1:]
    corr = corr[1:]
    corr_int = corr_int[1:]
    diffs = diffs[1:]

    v_c = v[:, np.argmax(diffs)]
    v_c_best = v[:, np.argmax(corr_int)]
    best_diff, best_corr = np.argmax(diffs), np.argmax(corr_int)

    v_c_int = np.array(v_c)
    v_c_int[v_c < 0] = -1
    v_c_int[v_c > 0] = 1
    v_c_best_int = np.array(v_c_best)
    v_c_best_int[v_c_best < 0] = -1
    v_c_best_int[v_c_best > 0] = 1
    plot = False
    if plot:
        plt.figure()
        plt.plot(w, diffs, ".")

        plt.figure()
        # plt.plot(w, corr, '.')
        plt.plot(w, corr_int, ".")

        plt.figure()
        # plt.plot(diffs, corr, 'b.')
        plt.plot(diffs, corr_int, ".")
        plt.plot(diffs[best_corr], corr_int[best_corr], "ro", label="corr")
        plt.plot(diffs[best_diff], corr_int[best_diff], "go", label="diff")
        plt.legend()

        plt.figure()
        plt.plot(v_c, ".", label="diff")
        plt.plot(v_c_best, ".", label="corr")
        plt.legend()
        ax2 = plt.twinx()
        ax2.plot(v_c_int, ".", label="diff_int")
        ax2.plot(0.8 * v_c_best_int, ".", label="corr_int")
        ax2.plot(gc, c="r")
    return corr_int[best_diff], corr_int[best_corr], corr_int[0]
示例#34
0
####
# The following code uses artificially generated and very small networks.
# Expect less than excellent performance as these random networks do not have any interesting structure.
# The purpose of main.py is to show how to use the code!
#
# All preprocessed datasets used in the drug combination study are at: http://snap.stanford.edu/decagon:
# (1) Download datasets from http://snap.stanford.edu/decagon to your local machine.
# (2) Replace dummy toy datasets used here with the actual datasets you just downloaded.
# (3) Train & test the model.
####

val_test_size = 0.05
n_genes = 500
n_drugs = 400
n_drugdrug_rel_types = 3
gene_net = nx.planted_partition_graph(50, 10, 0.2, 0.05, seed=42)
# 作为NP-hard问题的图的二分问题就是将一个无向图分为两个相同大小的组而是的他们跨组的边的数量最小。
# 更一般地,图的l-分割问题就是将一个无向图分割成l个相同大小的组从而使得组与组之间的边最小。
# 这里,我认为是随机产生一个有链接的图
# 生成50个组 每个组10个定点 点和点之间的连接概率0.2 组 0.05

gene_adj = nx.adjacency_matrix(gene_net)  #500*500的邻接矩阵
gene_degrees = np.array(
    gene_adj.sum(axis=0)).squeeze()  #基因之间 求和 删除一个纬度 之后的链接度 500维向量

gene_drug_adj = sp.csr_matrix(
    (10 * np.random.randn(n_genes, n_drugs) > 15).astype(int))  #500*400
# scr_matrix是把一个稀疏的np.array压缩
drug_gene_adj = gene_drug_adj.transpose(copy=True)  #400*500 转向 药物和基因的稀疏矩阵

drug_drug_adj_list = []  #400*400*3
def main():

    N_nodes = 200
    p_out = 0.05
    monte_carlo = 100
    results = []

    p_values = [0.1, 0.3, 0.5, 0.7, 0.9]
    c_values = [2, 4, 5, 8, 10, 20, 25, 40, 50]
    val = 0

    with progress.Bar(label="Progress: ", expected_size=len(p_values) * len(c_values) * monte_carlo) as bar:
        for p_in in [0.1, 0.3, 0.5, 0.7, 0.9]:
            results.append([])

            for N_clusters in [2, 4, 5, 8, 10, 20, 25, 40, 50]:
                AVG = []

                for mc in range(monte_carlo):

                    # Random Initial state
                    theta0 = np.random.normal(loc=0, scale=0.01, size=(1, N_nodes))[0]  # Random small phase angles
                    dtheta0 = np.random.normal(loc=0, scale=0.01, size=(1, N_nodes))[0]  # Random small frequencies
                    Y0 = np.append(theta0, dtheta0)
                    Y0 = np.append(Y0, 1)

                    # Random Initial state
                    thetaf = np.random.normal(loc=0, scale=0.01, size=(1, N_nodes))[0]  # Random small phase angles
                    dthetaf = np.random.normal(loc=0, scale=0.01, size=(1, N_nodes))[0]  # Random small frequencies
                    Yf = np.append(thetaf, dthetaf)
                    Yf = np.append(Yf, 1)

                    G = nx.planted_partition_graph(N_clusters, N_nodes / N_clusters, p_in, p_out)
                    adja = np.array(nx.to_numpy_matrix(G))
                    L = control2.get_Laplacian(adja)
                    Psource = generate_random_power_distribution(N_nodes)
                    Pmax = generate_random_capacity_distribution(N_nodes, adja)
                    K = Pmax / float(I * Omega)
                    A = control2.build_transition_matrix(Psource, L, Omega, I, K, alpha, Dt)
                    A_powers = [np.identity(A.shape[0]), A]
                    for k in range(control_time + 1)[2:]:
                        A_powers.append(np.dot(A, A_powers[-1]))
                    storage_level = 100 * np.ones((1, N_nodes))[0]

                    drivers = control2.rank_submodular_greedy_lazy(
                        A_powers,
                        adja,
                        range(N_nodes),
                        control_time,
                        storage_level,
                        max_capacity,
                        r,
                        I,
                        Omega,
                        Y0,
                        Yf,
                        True,
                    )
                    AVG.append(len(drivers))
                    val += 1
                    bar.show(val)

                results[-1].append(np.mean(AVG))

    with open("./results.json", "w") as f:
        json.dump(results, f)
示例#36
0
    
N=300
m=[20]
cluster_node_num=30
p_in=0.35
p_out=0.1
for N in [300, 500, 1000, 1200, 1500, 1700,2000,2500, 3000, 3500, 4000, 4500, 5000, 5500, 6000]: #, 2500, 3000, 3500, 4000, 4500, 5000, 5500, 6000
    for mm in m:
        mm=N/cluster_node_num
        for i in range(0,1):
            root="F:/workspace/git/S2GraphMP/data/DenseGraph/raw/rawData"+str(N)
            adpmRoot="F:/workspace/git/S2GraphMP/data/DenseGraph/Dense_APDM/cluster"+str(N)
            if not os.path.exists(root):
                os.makedirs(root)
            if not os.path.exists(adpmRoot):
                os.makedirs(adpmRoot)
            root="F:/workspace/git/S2GraphMP/data/DenseGraph/raw/rawGAMer"
            rawFile=open(root+"/"+str(N)+"_Cluster_"+str(mm)+"_in_"+str(p_in)+"_out_"+str(p_out)+"_15_case_"+str(i)+".txt","w")
            G = nx.planted_partition_graph(mm,cluster_node_num,p_in,p_out)
            print "Edge Num: ",len(G.edges())
            print "Node Num: ",len(G.nodes())
            for edge in G.edges():
                rawFile.write(str(edge[0])+" "+str(edge[1])+"\n")
            partition = G.graph['partition']
            for p in partition:            
                rawFile.write(" ".join(map(str, list(p)))+"\n")
            rawFile.close()
        
# nx.draw(G)
# plt.draw()
# plt.show()
示例#37
0
    pos=nx.spring_layout(g)
    nx.draw_networkx_edges(g,pos,alpha=0.4)
    nx.draw_networkx_nodes(g,pos,nodelist=colList.keys(),
		    node_color=colList.values(),
		    cmap=plt.cm.rainbow)
    nx.draw_networkx_labels(g,pos,font_size=10,font_family='sans-serif')
    plt.axis('off')
    plt.title(g.name)
    import time
    #plt.savefig("ssss.png")
    plt.show()


if __name__=="__main__":
    k=5
    g=nx.planted_partition_graph(k,10,0.8,0.02)

    result=spectral_clustering(g,k)
    shs(g,result)

    #k=8
    #g=nx.Graph()
    #filepath=r'D:\data\prandom\polbooks.txt'
    #read_file_txt(g,path=filepath)
    #result=spectral_clustering(g,k)
    #shs(g,result)


#g=nx.karate_club_graph()

#nodeNum=len(g.nodes())