def rand(self): er = nx.dense_gnm_random_graph(24, 27) for e in er.edges() : f,t=e self.add_edge(f,t) er = nx.dense_gnm_random_graph(24, 7) for e in er.edges() : f,t=e self.add_edge(t,f) return self
def generate_input(n, m): randinput = open('rinput', 'w') randinput.truncate(0) randinput.write("{0} {1}\n".format(n, m)) G = nx.dense_gnm_random_graph(n, m) while nx.is_connected(G) == False: G = nx.dense_gnm_random_graph(n, m) for (u, v) in G.edges(): G.edge[u][v]['weight'] = random.randint(1, 100) w = G.edge[u][v]['weight'] randinput.write("{0} {1} {2}\n".format(u + 1, v + 1, w)) randinput.write("0 \n") randinput.close()
def generate_random_edge_graph(num_nodes, num_edges): """ Generates a networkx graph and removes any isolated nodes in the graph :param num_nodes: number of nodes in the generated graph :param num_edges: number of edges in the generated graph :return: Graph """ graph = nx.dense_gnm_random_graph(num_nodes, num_edges) graph.remove_nodes_from(list(nx.isolates(graph))) while not nx.is_connected(graph): graph = nx.dense_gnm_random_graph(num_nodes, num_edges) graph.remove_nodes_from(list(nx.isolates(graph))) return graph
def __init__(self, N, m, seed=None, nodetype=SimpleNode, L=5.): positions = np.random.uniform(-L, L, size=(N,2)) graph = nx.dense_gnm_random_graph(N, m, seed=seed) adj = nx.adjacency_matrix(graph).todense() super().__init__(adj, positions, node_type=nodetype)
def GenerateGraphs(size, base): dic=GenerateDic(size,base) for i in range(1,5): for j in range(10): G=AddEdges(nx.wheel_graph(size)) for k in range (5): nodes=RandNodes(size-1) for l in range(5): dic["Edmond"+str(size)+"wheel"].append(Edmond(G,nodes[0],nodes[1])) dic["Din"+str(size)+"wheel"].append(Din(G,nodes[0],nodes[1])) dic["Boyk"+str(size)+"wheel"].append(Boyk(G,nodes[0],nodes[1])) G=AddEdges(nx.circular_ladder_graph(size)) for k in range (5): nodes=RandNodes(size-1) for l in range(5): dic["Edmond"+str(size)+"circ"].append(Edmond(G,nodes[0],nodes[1])) dic["Din"+str(size)+"circ"].append(Din(G,nodes[0],nodes[1])) dic["Boyk"+str(size)+"circ"].append(Boyk(G,nodes[0],nodes[1])) G=AddEdges(nx.dense_gnm_random_graph(size,int(size*size*0.2 ))) for k in range (5): nodes=RandNodes(size-1) for l in range(5): dic["Edmond"+str(size)+"comp"].append(Edmond(G,nodes[0],nodes[1])) dic["Din"+str(size)+"comp"].append(Din(G,nodes[0],nodes[1])) dic["Boyk"+str(size)+"comp"].append(Boyk(G,nodes[0],nodes[1])) size*=base; df=pd.DataFrame(dic) df.to_csv("matrix.csv")
def test035_dense_gnm_random_graph(self): """ Larger random graph picked from set of all graphs with n nodes and m edges. """ g = nx.dense_gnm_random_graph(300, 10000) mate1 = mv.max_cardinality_matching(g) mate2 = nx.max_weight_matching(g, True) self.assertEqual(len(mate1), len(mate2))
def get_qubit_op(num_qubits, seed=23412341234): G = nx.Graph() # """ HAVE DATA -- VIGO n = num_qubits SEED = seed np.random.seed(SEED % (2 ** 31 - 1)) G = nx.dense_gnm_random_graph(n, n ** 2, seed=SEED) for (u, v, w) in G.edges(data=True): w['weight'] = np.random.rand() * 0.5 # 0.1 np.random.seed(int(time.time())) w = np.zeros([n, n]) for i in range(n): for j in range(n): temp = G.get_edge_data(i, j, default=0) if temp != 0: w[i, j] = temp['weight'] # print(w) qubitOp, offset = max_cut.get_max_cut_qubitops(w) algo_input = EnergyInput(qubitOp) pos = nx.spring_layout(G) exact = ExactEigensolver(qubitOp).run() exact_energy = exact['energy'] return qubitOp, exact_energy
def computeBasicIndex(graph): print '---------------------------------------------------' print 'Graph name =', graph.name nodes = len(graph.nodes()) edges = len(graph.edges()) print '|V|=', len(graph.nodes()) print '|E|=', len(graph.edges()) print '<k>=', len(graph.edges()) * 1.0 / len(graph.nodes()) randomGraph = nx.dense_gnm_random_graph(nodes, edges) print 'average_shortest_path_length', nx.average_shortest_path_length( graph) print '随机网络average_shortest_path_length of random graph with same node number and edge number', nx.average_shortest_path_length( randomGraph) #print 'average clustering:', nx.average_clustering(graph) #print '随机网络average clustering of random graph with same node number and edge number :', nx.average_clustering(randomGraph) if nx.is_directed(graph): un_digraph = graph.to_undirected() print 'average clustering:', nx.average_clustering(un_digraph) un_randomdiGraph = randomGraph.to_undirected() print '随机网络average clustering of random graph with same node number and edge number :', nx.average_clustering( randomGraph) else: print 'average clustering:', nx.average_clustering(graph) print '随机网络average clustering of random graph with same node number and edge number :', nx.average_clustering( randomGraph) print 'degree_assortativity_coefficient:', nx.degree_assortativity_coefficient( graph) # print 'average_neighbor_degree:', nx.average_neighbor_degree(graph) print '---------------------------------------------------'
def generate_graph(size, graph_type): if graph_type == 'random': G = nx.dense_gnm_random_graph(size, size * 5, seed=SEED) elif graph_type == 'small_world': G = nx.watts_strogatz_graph(size, 8, 0.25, seed=SEED) elif graph_type == 'small_world_sparse': G = nx.watts_strogatz_graph(size, size / 8, 0.25, seed=SEED) elif graph_type == 'scale_free': # regular expts G = nx.barabasi_albert_graph(size, 8, seed=SEED) # implementation, celer expts - 10 node graph # G = nx.barabasi_albert_graph(size, 5, seed=12) elif graph_type == 'scale_free_sparse': G = nx.barabasi_albert_graph(size, size / 8, seed=SEED) elif graph_type == 'tree': G = nx.random_tree(size, seed=SEED) # remove self loops and parallel edges G.remove_edges_from(G.selfloop_edges()) G = nx.Graph(G) print('Generated a ', graph_type, ' graph') print('number of nodes: ', G.number_of_nodes()) print('Number of Edges: ', G.number_of_edges()) print('Number of connected components: ', nx.number_connected_components(G)) return G
def random_graph(n=100, m=2500): """ランダムグラフ :param n: node num :param m: edge num :return: dense_gnm_random_graph instance """ return nx.dense_gnm_random_graph(n, m)
def substrate_gen(snet_type='random', rand_topo_info = (10, 16)): ''' generate substrate network ''' # create 10 nodes, with 14 edges, a fixed topology if snet_type == 'random': num_nodes, num_edges = rand_topo_info G = nx.dense_gnm_random_graph(num_nodes, num_edges, 3) G.remove_edge(0,7) G.remove_edge(6,9) # if snet_type == 'I2': # G = nx.Graph() # G.add_edges_from(topo_type.I2_topo.keys()) # if snet_type == 'GEANT': # geant_ip = topo_type.geant_ip # geant_mapping, geant_geo = \ # topo_type.relabel_nodeinfo(topo_type.geant_node_mapping, # topo_type.geant_node_geoinfo, # geant_ip) # geant_link = topo_type.relabel_linkinfo(geant_ip, geant_mapping) # G = nx.Graph() # G.add_edges_from(geant_link.keys()) if snet_type != 'random': xml_file = TOPO_PATH + snet_type + ".xml" print xml_file node_dict, link_dict, node_dict_new = topo_xml.run(xml_file) G = nx.Graph() G.add_edges_from(link_dict.values()) return G
def get_random_circuit(n, type='general', min_r=1, max_r=1000, min_v=1, max_v=1000, density=0.5): G = nx.Graph() if type == 'erdos': G = nx.erdos_renyi_graph(n, density) while not nx.is_connected(G): G = nx.erdos_renyi_graph(n, density) elif type[:7] == 'regular': d = int(type[7:]) G = nx.random_regular_graph(d, n) elif type == 'general': G = nx.dense_gnm_random_graph(n, n**2 - n) elif type == 'bridge': A = nx.erdos_renyi_graph(n // 2, density) while not nx.is_connected(A): A = nx.erdos_renyi_graph(n // 2, density) B = nx.erdos_renyi_graph(n // 2, density) while not nx.is_connected(B): B = nx.erdos_renyi_graph(n // 2, density) an = A.number_of_nodes() A.add_nodes_from([an + i for i in B.nodes()]) A.add_edges_from([(an + x, an + y) for x, y in B.edges()]) A.add_edge(1, an + 1) G = A V = len(G.nodes()) L = [] for i in G.nodes(): for k, v in G[i].items(): L.append((i, k, randint(min_r, max_r))) return Circuit(V, L, 0, V - 1, randint(min_v, max_v))
def ErdosRenyi(n, m, d, display=None, seed=None): """ n: the number of nodes m: the number of edges """ # counter = 0 # naive way to generate connected graph while True: if m <= 30: G = nx.gnm_random_graph(n, m, seed=None, directed=False) else: G = nx.dense_gnm_random_graph(n, m, seed=None) maxDegree = max([item for item in G.degree().values()]) if nx.is_connected(G) and maxDegree <= d: break # if maxDegree > d: # counter += 1 # avgDegree = 2 * nx.number_of_edges(G) / nx.number_of_nodes(G) # print avgDegree if display: nx.draw(G) img_name = 'erdos_renyi_%i_%i.png' % (n, m) plt.savefig(img_name) sh.open(img_name) adjacencyMatrix = generateAdjacencyMatrix(G) return adjacencyMatrix
def random_connected_graph(n): """ Generates a Erdos-Renyi G_{n,m} random graph """ g = nx.Graph([(0, 1), (2, 3)]) while not nx.is_connected(g): edges = randint(n - 1, n * (n - 1) // 2) g = nx.dense_gnm_random_graph(n, edges) return g
def test_is_bipartite(): for i in range(1, 10): for j in range(1, i * i): x = nx.dense_gnm_random_graph(i, j) y = nqs.graph.CustomGraph(x.edges) # if len(x) == len(set((i for (i, j) in x.edges)) | set((j for (i, j) in x.edges))): assert y.is_bipartite == nx.is_bipartite(x)
def generate_random_graphs(numberOfNodes, outputFile, graphType, degree=None): sparseResult = open(outputFile, "w") #first writing the number of nodes if (graphType == "degreeBased"): G = nx.random_regular_graph( degree, numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) if (graphType == "completeChaos"): G = nx.gnm_random_graph(numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) if (graphType == "dense"): G = nx.dense_gnm_random_graph( numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) sparseResult.write( str(numberOfNodes) + " " + str(nx.number_of_edges(G)) + "\n") semiSparseRep = nx.to_dict_of_lists(G) #print semiSparseRep for element in semiSparseRep: if len(semiSparseRep[element]) == 0: return 0 for j in semiSparseRep[element]: sparseResult.write(str(j + 1) + " ") sparseResult.write("\n") return 1
def random_expected_modularity_frac(num_nodes, num_edges): gnx = dense_gnm_random_graph(num_nodes, num_edges).to_undirected() partition = best_partition(gnx) modularity(partition, gnx) in_partition = len([ 1 for u, v in gnx.edges() if partition[u] == partition[v] ]) / len(gnx.edges) return in_partition - modularity(partition, gnx)
def ranDAG(self, n_nodes, n_edges): er = nx.dense_gnm_random_graph(n_nodes, n_nodes) for e in er.edges(): f, t = e self.add_edge(f, t) if 0 not in self.nodes(): self.add_edge(0, 1) return self
def get_graph_triangles(args): N_nodes, rnd = args N_edges = int((rnd.rand() + 1) * N_nodes) G = nx.dense_gnm_random_graph(N_nodes, N_edges, seed=None) A = nx.to_numpy_array(G) A_cube = A.dot(A).dot(A) label = int(np.trace(A_cube) / 6.) # number of triangles return A.astype(np.bool), label, N_edges, G
def getGraph(type="given"): if type == "given": g = nx.read_edgelist("pagerank.txt") elif type == "erdos_renyi": g = nx.dense_gnm_random_graph(40, 200) elif type == "poisson": g = poissongraph(40, 5) return g
def random_graphs(): print("Random graphs") print("fast GNP random graph") G = nx.fast_gnp_random_graph(n=9, p=0.4) draw_graph(G) print("GNP random graph") G = nx.gnp_random_graph(n=9, p=0.1) draw_graph(G) print("Dense GNM random graph") G = nx.dense_gnm_random_graph(n=19, m=28) draw_graph(G) print("GNM random graph") G = nx.gnm_random_graph(n=11, m=14) draw_graph(G) print("Erdős Rényi graph") G = nx.erdos_renyi_graph(n=11, p=0.4) draw_graph(G) print("Binomial graph") G = nx.binomial_graph(n=45, p=0.4) draw_graph(G) print("Newman Watts Strogatz") G = nx.newman_watts_strogatz_graph(n=9, k=5, p=0.4) draw_graph(G) print("Watts Strogatz") G = nx.watts_strogatz_graph(n=9, k=2, p=0.4) draw_graph(G) print("Watts Strogatz") G = nx.watts_strogatz_graph(n=9, k=2, p=0.4) draw_graph(G) print("Connected Watts Strogatz") G = nx.connected_watts_strogatz_graph(n=8, k=2, p=0.1) draw_graph(G) print("Random Regular Graph") G = nx.random_regular_graph(d=2, n=9) draw_graph(G) print("Barabasi Albert Graph") G = nx.barabasi_albert_graph(n=10, m=2) draw_graph(G) print("Powerlow Cluster Graph") G = nx.powerlaw_cluster_graph(n=10, m=2, p=0.2) draw_graph(G) print("Duplication Divergence Graph") G = nx.duplication_divergence_graph(n=10, p=0.2) draw_graph(G) print("Random lobster Graph") G = nx.random_lobster(n=10, p1=0.2, p2=0.8) draw_graph(G) print("Random shell Graph") constructor = [(10, 20, 0.8), (20, 40, 0.8)] G = nx.random_shell_graph(constructor) draw_graph(G) print("Random Powerlow Tree") G = nx.random_powerlaw_tree(n=24, gamma=3) draw_graph(G) print("Random Powerlow Tree Sequence") G = nx.random_powerlaw_tree(n=13, gamma=3) draw_graph(G)
def pure_random_graph(num_nodes, num_edges, num_labels, vocab_size, num_words, attr_noise): G = nx.dense_gnm_random_graph(num_nodes, num_edges) attributes = [] for u in list(G.nodes): label = rd.randint(0,num_labels-1) attributes.append(get_attributes(label,num_labels,vocab_size,num_words,attr_noise)) attributes = np.array(attributes) attributes = sp.coo_matrix(attributes) return G, attributes
def test_is_bipartite(): for i in range(1, 10): for j in range(1, i * i): x = nx.dense_gnm_random_graph(i, j) y = Graph.from_networkx(x) if len(x) == len( set((i for (i, j) in x.edges())) | set((j for (i, j) in x.edges())) ): assert y.is_bipartite() == nx.is_bipartite(x)
def test_is_bipartite(): for i in range(1, 10): for j in range(1, i * i): x = nx.dense_gnm_random_graph(i, j) y = nk.graph.Graph(nodes=list(x.nodes()), edges=list(x.edges())) if len(x) == len( set((i for (i, j) in x.edges())) | set((j for (i, j) in x.edges()))): assert y.is_bipartite() == nx.is_bipartite(x)
def RandomGNM(n, m, dense=False, seed=None): """ Returns a graph randomly picked out of all graphs on n vertices with m edges. INPUT: - ``n`` - number of vertices. - ``m`` - number of edges. - ``dense`` - whether to use NetworkX's dense_gnm_random_graph or gnm_random_graph EXAMPLES: We show the edge list of a random graph on 5 nodes with 10 edges. :: sage: graphs.RandomGNM(5, 10).edges(labels=False) [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] We plot a random graph on 12 nodes with m = 12. :: sage: gnm = graphs.RandomGNM(12, 12) sage: gnm.show() # long time We view many random graphs using a graphics array:: sage: g = [] sage: j = [] sage: for i in range(9): ... k = graphs.RandomGNM(i+3, i^2-i) ... g.append(k) ... sage: for i in range(3): ... n = [] ... for m in range(3): ... n.append(g[3*i + m].plot(vertex_size=50, vertex_labels=False)) ... j.append(n) ... sage: G = sage.plot.graphics.GraphicsArray(j) sage: G.show() # long time """ if seed is None: seed = current_randstate().long_seed() import networkx if dense: return graph.Graph(networkx.dense_gnm_random_graph(n, m, seed=seed)) else: return graph.Graph(networkx.gnm_random_graph(n, m, seed=seed))
def main(args): print('--- Prediction by existing algorithm ---') print('Index\t|V|\t|E|\ttw(G)\ttime\tevaltw\tprunenum\tfunccallnum') output_file = "exist.dat" result = ["Index\t|V|\t|E|\ttw\ttime\tevaltw\tfunccallnum"] # make some graphs graphs = [] for idx in range(0, args.data_num): n = np.random.randint(5, 10) # [5, 10] |V| m = np.random.randint(n - 1, (n * (n - 1)) // 2) # |E| g = nx.dense_gnm_random_graph(n, m) while not nx.is_connected(g): g = nx.dense_gnm_random_graph(n, m) graphs.append(g) for idx, eval_graph in enumerate(graphs): # exact computation tw = dnx.treewidth_branch_and_bound(eval_graph)[0] for calc_type in ["upper", "lower"]: graphstat = "{3}\t{0}\t{1}\t{2}".format( eval_graph.number_of_nodes(), eval_graph.number_of_edges(), tw, str(idx).rjust(5)) print('{0}\t{1}\t{2}\t{3}'.format( str(idx).rjust(5), eval_graph.number_of_nodes(), eval_graph.number_of_edges(), tw), end='\t') try: tm, evtw, fcn = linear_search(eval_graph, calc_type) res = "{0}\t{1}\t{2}".format(tm, evtw, fcn) except TimeoutError: res = "TimeOut" print(res) assert evtw == tw result.append(graphstat + "\t" + res) # write results to a file if args.out_to_file: with open("./{}/".format(args.out) + output_file, "w") as f: f.write('\n'.join(result))
def test_is_connected(): for i in range(5, 10): for j in range(i + 1, i * i): x = nx.dense_gnm_random_graph(i, j) y = nqs.graph.CustomGraph(x.edges) if len(x) == len( set((i for (i, j) in x.edges)) | set((j for (i, j) in x.edges))): assert y.is_connected == nx.is_connected(x) else: assert not nx.is_connected(x)
def generate_graph(nodes,edges): random_graph = nx.dense_gnm_random_graph(nodes,edges) possible_weight = [-1,1] for (u, v, w) in random_graph.edges(data=True): w['weight'] = random.choice(possible_weight) for (u, v, w) in random_graph.edges(data=True): print u,v,w return random_graph
def generate_random_graph(n, e): G = nx.dense_gnm_random_graph(n, e) for (u, v) in G.edges(): G.edge[u][v]['weight'] = random.randint(1, 10) pos = nx.circular_layout(G) nx.draw_networkx(G, pos, node_size=700) labels = nx.get_edge_attributes(G, 'weight') nx.draw_networkx_edge_labels(G, pos, edge_labels=labels) plt.savefig('graph.png') plt.show(G) return G
def __init__(self, T, n_user, D, c, graph_density): self.T = T self.n_user = n_user self.D = D self.c = c self.V = nx.dense_gnm_random_graph(n_user, graph_density) self.m = len(list(nx.connected_component_subgraphs(self.V))) self.U = self.init_user() self.list_i = [] self.list_m = [] self.list_C = []
def test_is_connected(): for i in range(5, 10): for j in range(i + 1, i * i): x = nx.dense_gnm_random_graph(i, j) y = nk.graph.Graph(nodes=list(x.nodes()), edges=list(x.edges())) if len(x) == len( set((i for (i, j) in x.edges)) | set((j for (i, j) in x.edges))): assert y.is_connected() == nx.is_connected(x) else: assert not nx.is_connected(x)
def generateGraph (n,m,filename='',pw=.75,maxw=5): G = nx.dense_gnm_random_graph(n,m) for e in G.edges(): if random.random() < pw: G[e[0]][e[1]]['weight'] = 1 else: G[e[0]][e[1]]['weight'] = random.randint(2,maxw) if filename: with open(filename, 'w+') as f: f.write('%s %s%s' %(len(G.nodes()), len(G.edges()), os.linesep)) for v1,v2,edata in G.edges(data=True): for it in range(edata['weight']): f.write('%s %s%s' %(v1, v2, os.linesep))
def generateGraph(n, m, filename='', pw=.75, maxw=5): G = nx.dense_gnm_random_graph(n, m) for e in G.edges(): if random.random() < pw: G[e[0]][e[1]]['weight'] = 1 else: G[e[0]][e[1]]['weight'] = random.randint(2, maxw) if filename: with open(filename, 'w+') as f: f.write('%s %s%s' % (len(G.nodes()), len(G.edges()), os.linesep)) for v1, v2, edata in G.edges(data=True): for it in range(edata['weight']): f.write('%s %s%s' % (v1, v2, os.linesep))
def random_weighted_graph(size): """Creates random graph :size: int: number of nodes in graph :returns: nx.Graph object """ nodes = size edges = nodes* (nodes/3) MAX_WEIGHT = 100 G = nx.dense_gnm_random_graph(n=nodes, m=edges) # give our graph some random weights for edge in G.edges(): G.add_edge(*edge, weight = rr(MAX_WEIGHT)) assert nx.is_connected(G) == True # make sure its connected return G
def generate_random_graphs(numberOfNodes, outputFile, graphType, degree=None): sparseResult = open(outputFile, "w") # first writing the number of nodes if graphType == "degreeBased": G = nx.random_regular_graph(degree, numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) if graphType == "completeChaos": G = nx.gnm_random_graph(numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) if graphType == "dense": G = nx.dense_gnm_random_graph(numberOfNodes, numberOfNodes * int(math.sqrt(numberOfNodes))) sparseResult.write(str(numberOfNodes) + " " + str(nx.number_of_edges(G)) + "\n") semiSparseRep = nx.to_dict_of_lists(G) # print semiSparseRep for element in semiSparseRep: if len(semiSparseRep[element]) == 0: return 0 for j in semiSparseRep[element]: sparseResult.write(str(j + 1) + " ") sparseResult.write("\n") return 1
def probability_reliability(): from itertools import combinations import networkx as nx import random G=nx.dense_gnm_random_graph(5,10) total_Edges=G.edges() probability=0 while probability<1.02: reliability=0 for i in xrange(0,11): for subset in combinations(total_Edges,i): count=0 for j in subset: if len(subset)!=0: count=count+1 G.remove_edge(j[0],j[1]) if (len(list(nx.all_simple_paths(G,0,2))) > 0): reliability+=(pow(1-probability,count))*(pow(probability,(10-count))) for k in subset: G.add_edge(k[0],k[1]) print probability,reliability probability+=0.02
def __init__(self, num_switches=None): super(DenseTopology, self).__init__() if not num_switches: num_switches = 8 num_hosts=num_switches*4 # build graphman graph graph = nx.dense_gnm_random_graph(num_switches, min(num_switches - 1,10)* num_switches) # Add switches for s in graph: log.debug("Added switch %x" % s) self.add_switch(s+1) # Add edges for s1, s2 in graph.edges(): self.add_link(s1+1, s2+1) # Add hosts hostoffset = num_switches+2 for h in range(1,num_hosts+1): # Add host host = h + hostoffset self.add_host(host) # Connect to a "random" switch s = graph.nodes()[h % len(graph)] self.add_link(host,s+1) # Globally connected host # self.add_host(999) # for switch in graph: # self.add_link(999, switch+1) self.finalize()
random.seed(seed) print("RandomSeed=" + str(seed)) size =6 filename="/tmp/test.gnf" #Monosat().init("-decide-graph-bv -no-decide-theories -no-decide-graph-rnd -lazy-maxflow-decisions -conflict-min-cut -conflict-min-cut-maxflow -reach-underapprox-cnf ") Monosat().setOutputFile(open(filename,'w')) print("Writing file to " + filename) graphs = [nx.diamond_graph(), nx.complete_graph(2), nx.complete_graph(3), nx.complete_graph(4), nx.cycle_graph(5), nx.grid_2d_graph(4,4),nx.grid_2d_graph(4,4,True), nx.dense_gnm_random_graph(4,8,123), nx.dense_gnm_random_graph(4,8,124), nx.dense_gnm_random_graph(4,16,125) ] #Directed graphs: graphs+=[nx.gnp_random_graph(4,8,123,True),nx.gnp_random_graph(4,8,124,True),nx.gnp_random_graph(4,8,125,True),nx.gnp_random_graph(4,8,126,True), nx.gnp_random_graph(4,16,127,True), nx.gnp_random_graph(4,32,128,True), nx.gnp_random_graph(9,16,127,True), ] for i,nxg in enumerate(graphs): nxg = convert_node_labels_to_integers(nxg) g = Graph() print("Graph %d"%(i))
time = [] source = [] dst = [] tt1 = np.zeros((7, 9)) paramaters = [] l1 = [] num_packets = 7 overlap = [] path = [] reduced_transmission = [] #temp = 0 path_in_order = {6: 24} fill_num = 25 G = nx.dense_gnm_random_graph(10, 50) pos = nx.spring_layout(G) H = G.number_of_edges() colors = range(H) # nx.draw(G,pos,node_color='#A0CBE2',edge_color=colors,width=4,edge_cmap=plt.cm.Reds,with_labels=True) nx.draw(G, pos, node_color='#A0CBE2', width=0.4, edge_cmap=plt.cm.Reds, with_labels=True) # plt.savefig("edge_colormap.png", dpi=300) # save as png # plt.show() # separate the string and packets. def copy_source_dst_time(x): source_c = [] dst_c = []
#!/usr/bin/env python import networkx as nx N = 100 E = int(round((N**2)/1.5)) dense = nx.dense_gnm_random_graph(N, E) nx.write_edgelist(dense, 'dense.dat', data=False) karate = nx.karate_club_graph() nx.write_edgelist(karate, 'karate.dat', data=False) plaw = nx.powerlaw_cluster_graph(N * 10, N/10, 0.3) nx.write_edgelist(plaw, 'plaw.dat', data=False)
def SEN_dense_gnm_random_graph(n1,m1,n2,m2,n3,m3,pa,pb): """Creates a SEN model with an actor network of n1 elements, with a neighbourhood connectivity of p1, a user network of n2 elements, with a neighbourhood connectivity of p2, and a ecological network of n3 elements with a neighbourhood connectivity of p3. The probability of connections between the actor network and the user netwrok, and between the user network and the ecological network, are respectively passed through pa and pb""" # Actor network A = nx.dense_gnm_random_graph(n1, m1) A.graph['Network']='Actors' # Adding id for i in range(len(A)): A.node[i]['num'] = i+1 # Subnetwork for i in range(len(A)): A.node[i]['subnetwork'] = 1 # Adding random class (office dwellers/field people) p_class_bureau = 0.75 for i in range(len(A)): if random.random() <= p_class_bureau: A.node[i]['group'] = 0 else: A.node[i]['group'] = 0 # Adding randow weight # for n,nbrs in A.adjacency_iter(): # for nbr,eattr in nbrs.items(): # A[n][nbr]['weight'] = int(random.random()*8) # User network U = nx.dense_gnm_random_graph(n2, m2) U.graph['Network']='Actors' # Adding id for i in range(len(U)): U.node[i]['num'] = i+1001 # Subnetwork for i in range(len(U)): U.node[i]['subnetwork'] = 2 # Adding random class (office dwellers/field people) for i in range(len(U)): rnd=random.random() if rnd <= .2: U.node[i]['group'] = 1 if rnd > .2 and rnd <= .4: U.node[i]['group'] = 1 if rnd > .4 and rnd <= .6: U.node[i]['group'] = 1 if rnd > .6 and rnd <= .8: U.node[i]['group'] = 1 if rnd > .8: U.node[i]['group'] = 1 # Adding randow weight # for n,nbrs in U.adjacency_iter(): # for nbr,eattr in nbrs.items(): # U[n][nbr]['weight'] = int(random.random()*8) # Ecological network E = nx.dense_gnm_random_graph(n3, m3) E.graph['Network']='Actors' # Adding id for i in range(len(E)): E.node[i]['num'] = i+10001 # Subnetwork for i in range(len(E)): E.node[i]['subnetwork'] = 3 # Adding class for i in range(len(E)): E.node[i]['group'] = 2 # Adding weight # for n,nbrs in E.adjacency_iter(): # for nbr,eattr in nbrs.items(): # E[n][nbr]['weight'] = 5 # joint the three subnetworks G = nx.disjoint_union_all([A,U,E]) # link some actors to some users for i in range(0, len(G)): for j in range(0, len(G)): if i != j: if G.node[i]['subnetwork'] == 1 and G.node[j]['subnetwork'] == 2: #if G.node[i]['group'] == 1 and G.node[j]['group'] == 2: if random.random() < pa: G.add_edge(i,j) # link some users to some patches for i in range(0, len(G)): G.node[i]['degreeold'] = G.degree(i) for i in range(0, len(G)): for j in range(0, len(G)): if i != j: #print str(j) + " " + str(G.degree(j)) if G.node[i]['subnetwork'] == 2 and G.node[j]['subnetwork'] == 3 and G.degree(j) - G.node[j]['degreeold'] < 4: if G.degree(j) - G.node[j]['degreeold'] < 4: if random.random() < pb: G.add_edge(i,j) #remove lone nodes for i in range(0, len(G)): if G.degree(i) == 0: G.remove_node(i) # write json formatted data d = json_graph.node_link_data(G) json.dump(d, open('/Users/Rodolphe/Dropbox/Public/d3/examples/force/force.json','w')) nx.write_graphml(G, "graph.graphml") return G
[ic, "itertive_compression"], # [rn, "randomized_opt"], # disabled because too slow [mif, "maximum_induced_forest"]] graph = nx.erdos_renyi_graph(9, 0.5) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(10, 0.7) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(11, 0.6) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(12, 0.3) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(13, 0.5) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(15, 0.8) test_algorithms(algorithms, graph) graph = nx.erdos_renyi_graph(16, 0.8) test_algorithms(algorithms, graph) graph = nx.dense_gnm_random_graph(10, 25) test_algorithms(algorithms, graph) graph = nx.newman_watts_strogatz_graph(11, 5, 0.2) test_algorithms(algorithms, graph)
#!/usr/bin/env python import networkx as nx import random import graph from annealing import Annealing from exact import is_graph_secure if __name__ == '__main__': g = nx.dense_gnm_random_graph(50, 200) nodes = g.nodes() ss = set() for i in range(20): ss.add(random.choice(nodes)) graph = graph.GraphWithSecureSet(g, ss) a = Annealing(graph) #TODO: 3/p times print a.security() print is_graph_secure(graph)
def grow_graph(reverserandom = False, outgoingrandom = False, incomingrandom = False, totalrandom = False, usenx= True, force_connected = True, sparse = True, plot = False, directed = True, randomgrowth=False, wholegrowth=False,growthfactor=100, num_measurements = 10, verbose = True, connected = True, plotx = 'nodegrowth', ploty = 'maxclique', ploty2 = 'modval',drawgraph = 'triangulated', draw= True, drawspectral = True, getgraph = True): random = False #set as false, gets made to truth later if the person passes a type of random graph they want #make sure user does not want to draw and plot at the same time. if plot == True: draw = False if draw == True: plot = False #you cannot plot and draw at the same time # initialize database graph_db = database() #get the pickled dictionary of nodes and node names (for the database) that is made while the csv file is loaded. try: nodes = open('nodes.p','r') nodes = pickle.load(nodes) except: print 'Your graph is empty. Please load a graph into the database.' 1/0 data = [] if graph_db.size < 2: print 'Your graph is empty. Please load a graph into the database.' 1/0 # this will grow the whole graph. Takes a while for big ones! if wholegrowth == True: growthfactor = len(nodes) #make a list of all the nodes in the database to randomly choose from, but only if force connected graph option is false. if force_connected == False: possiblenodes = nodes print 'Graph will not be fully connected, use "force_connected = True" if you want it to be fully connected' # here we figure out at what points to measure if the user wants a spare measumement or to measure every time a node is added. This speeds up big graph growths a lot! if sparse == True: sparsemeasurements = [10,15,growthfactor] measurements = np.logspace(1, (int(len(str(growthfactor))))-1, num_measurements) for x in measurements: sparsemeasurements.append(int(x)) else: sparsemeasurements = range(2,growthfactor) # this will actually only work for directed graph because we are moralizing, but I want to leave the option for later. Perhaps I can just skip moralization for undirected graphs. if directed: graph = nx.DiGraph() else: graph = nx.graph() # pick an initial node to start from initial_node = np.random.choice(nodes.keys()) #randomly get a node to add from dictionary if verbose: print 'Starting from ' + initial_node initial_used = 0 #keep track of how many times the initial node was used nodegrowth = graph.number_of_nodes() edgegrowth = graph.number_of_edges() while nodegrowth < growthfactor: # make sure we aren't above how many nodes we want to measure in the graph #start off finding a node to add to the graph. if force_connected == True: try: possiblenodes = graph.nodes() # get all nodes in graph. fromnode = Random.choice(possiblenodes) #pick random node in graph to grow from(add one of its nieghbors). This uses the random module, not np.random if verbose: print 'Using ' + str(fromnode) + ' to find new node' except: #this is because you can't do random from one node at the start. fromnode = initial_node graph.add_node(fromnode) if verbose: print 'using initial node' initial_used = initial_used+1 if initial_used > 5: 1/0 fromnode = nodes[fromnode] #get DB version of the node. #get all relationships in graph DB for that node so we can pick new_node_rels = list(graph_db.match(end_node = fromnode, bidirectional=True)) new_rel = Random.choice(new_node_rels) #randomly pick one of them, thus picking a node to add to graph. This uses the random module, not np.random # is the new node a parent or child of the node we are growing from? if new_rel.end_node == fromnode: new_node = new_rel.start_node if new_rel.start_node == fromnode: new_node = new_rel.end_node assert new_node != fromnode if force_connected == False: # if not connected, we can just pick from the pickled dictionary of nodes in the database new_node = np.random.choice(possiblenodes.values()) print 'adding' + str(new_node) #add the nodes to the graph, connecting it to nodes in the graph that it is connected to. # go through the list of edges that have the new node as a part of it, and only add the edge if they are between the new node and a node in the graph already. rels = list(graph_db.match(start_node=new_node)) #query graph for edges from that node for edge in rels: #get the string name of the node newnodename = edge.start_node.get_properties() newnodename = newnodename.get('name') newnodename = newnodename.encode() endnodename = edge.end_node.get_properties() endnodename = endnodename.get('name') endnodename = endnodename.encode() if newnodename not in graph: #check to see if new node is in graph graph.add_node(newnodename) # add if not if verbose: print 'added ' + str(newnodename) if endnodename in graph: #check to see if end node is in graph graph.add_edge(newnodename, endnodename) #add it if it is if verbose: print 'connected ' + newnodename +' to '+ endnodename rels = list(graph_db.match(end_node=new_node)) #query graph for edges to that node for edge in rels: newnodename = edge.end_node.get_properties() newnodename = newnodename.get('name') newnodename = newnodename.encode() startnodename = edge.start_node.get_properties() startnodename = startnodename.get('name') startnodename = startnodename.encode() if newnodename not in graph: #check to see if new node is in graph graph.add_node(newnodename) # add if not if verbose: print 'added ' + str(newnodename) if startnodename in graph: #check to see if end node is in graph graph.add_edge(startnodename, newnodename) #add it if it is if verbose: print 'connected ' + startnodename +' to '+ newnodename #measure the nodegrowth and edge growth nodegrowth = len(graph.nodes()) edgegrowth = len(graph.edges()) if verbose: print 'Graph has ' + str(nodegrowth) + ' nodes.' if verbose: print 'Graph has ' + str(edgegrowth) + ' edges.' #here I make a few random graphs to compare the growth of the real graph. They are all the same size and same edges, but different controls. # Reverse the direction of the edges. The in-degree distribution in this graph is power-law, but the out-degree is exponential tailed. So this is just a check that degree distribution is irrelevant. if reverserandom == True: random_graph = graph.reverse() random = True # Keep the number of outgoing links for each node the same, but randomly allocating their destinations. This should break modularity, put preserves out-degree. if outgoingrandom == True: random_graph = graph.copy() for edge in random_graph.edges(): parent = edge[0] child = edge[1] random_graph.remove_edge(parent,child) newchild = parent while newchild == parent: #so that we don't get a self loop. newchild = np.random.choice(graph.nodes()) random_graph.add_edge(parent,newchild) random = True # Same thing, but this time fixing the number of incoming links and randomly allocating their origins. Likewise, but preserves in-degree. if incomingrandom ==True: random_graph = graph.copy() for edge in random_graph.edges(): parent = edge[0] child = edge[1] random_graph.remove_edge(parent,child) newparent = child while newparent == child: newparent = np.random.choice(graph.nodes()) random_graph.add_edge(newparent,child) random = True #gives a graph picked randomly out of the set of all graphs with n nodes and m edges. This preserves overall degree, and number of nodes/edges, but is completeley random to outdegree/indegree. if totalrandom == True: numrandomedges = graph.number_of_edges() numrandomnodes = graph.number_of_nodes() random_graph = nx.dense_gnm_random_graph(numrandomnodes, numrandomedges) random_graph = random_graph.to_directed() random = True #here is where we measure everything about the graph if nodegrowth in sparsemeasurements: #we only measure every now and then in sparse. start_time = time() if nodegrowth > 5: modgraph = nx.Graph(graph) #make it into a undirected networkx graph. This measures moduilarity on undirected version of graph! partition = best_partition(modgraph) #find the partition with maximal modularity modval = modularity(partition,modgraph) #calculate modularity with that partition sleep(2) if random == True: random_modgraph = random_graph.to_undirected() #make it into a undirected networkx graph. This measures moduilarity on undirected version of graph! random_partition = best_partition(random_modgraph) #find the partition with maximal modularity random_modval = modularity(random_partition,random_modgraph) #calculate modularity with that partition #option to use python nx to moralize and triangulate if usenx == True: moralized = moral_graph(graph) if random == True: random_moralized = moral_graph(random_graph) else: #use Octave to run Matlab Bayes Net Toolbox, NOT SET UP FOR RANDOM GRAPHS YET. moralized = nx.to_numpy_matrix(graph) # make nx graph into a simple matrix ismoral = False tries = 0 while ismoral == False and tries < 5: #sometimes oct2py takes too long to return I think try: moralized, moral_edges = octave.moralize(moralized) ismoral = True except: ismoral = False print 'Octave Error, trying again!' tries = tries + 1 order = range(len(moralized)+1) # BNT needs order of nodes to triangulate order = order[1:] random_order = range(len(random_moralized)+1) # BNT needs order of nodes to triangulate random_order = random_order[1:] # I think you have to shift the space because you are going from 0 index to 1 idndex istriangulated = False tries = 0 while istriangulated == False and tries < 5: #sometimes oct2py takes too long to return I think try: moralized = nx.to_numpy_matrix(moralized) # have to make it into matrix. triangulated, cliques, fill_ins = octave.triangulate(moralized,order) if random == True: random_moralized = nx.to_numpy_matrix(random_moralized) # have to make it into matrix. random_triangulated, random_cliques, random_fill_ins = octave.triangulate(random_moralized, random_order) istriangulated = True tries = 5 except: istriangulated = False print 'Octave Error, trying again!' tries = tries + 1 #loop through cliques and get the size of them cliquesizes = [] #empty array to keep clique sizes in for x in cliques: size = len(x) cliquesizes.append(size) maxclique = max(cliquesizes) #get the biggest clique avgclique = np.mean(cliquesizes) # get the mean of the clique sizes #do the same for random graph cliques if random == True: random_cliquesizes = [] #empty array to keep clique sizes in #loop through cliques and get the size of them for x in random_cliques: size = len(x) random_cliquesizes.append(size) random_maxclique = max(random_cliquesizes) #get the biggest clique random_avgclique = np.mean(random_cliquesizes) # get the mean of the clique sizes end_time = time() #get end time run_time = end_time - start_time #time how long all the took #store the data! if random == True: data.append([nodegrowth, edgegrowth, modval, random_modval, maxclique, random_maxclique, avgclique, random_avgclique, run_time]) #store results if random == False: data.append([nodegrowth, edgegrowth, modval, maxclique,avgclique,run_time]) #store results sparsemeasurements.remove(nodegrowth) #so we don't calculate clique size more than once! if verbose: print 'took ' + str(run_time) + ' to run last computation' #this will always print, basic status update everytime clique size is measured. if random == True: print str(nodegrowth) + ' nodes, ', str(edgegrowth) + ' edges; ' + 'Modularity: ' + str(modval) + ', Random Modularity: ' +str(random_modval) + ', Largest Clique: ' + str(maxclique) + ', Largest Random Clique: ' + str(random_maxclique) if random == False: print str(nodegrowth) + ' nodes, ', str(edgegrowth) + ' edges; ' + 'Modularity: ' + str(modval) + ', Largest Clique: ' + str(maxclique) #this will redraw the plot everytime a computation is done. if plot == True: if random == False: df = pd.DataFrame(data, columns= ('nodegrowth','edgegrowth', 'modularity','maxclique','avgclique','run_time')) plt.close() fig = plt.figure(figsize=(24,16)) ax = fig.add_subplot(1,1,1) ax2 = ax.twinx() y1 = df[ploty] #user input, default to clique size y2 = df[ploty2] #user input, default to modularity x = df[plotx] #user input, default to nodes in graph. ax.set_xlabel('%s in Graph' %(plotx),fontsize=20) line1, = ax.plot(x, y1, label = ploty) line2, = ax2.plot(x, y2, label = ploty2, color = 'red') ax.set_ylabel(ploty,fontsize=20) ax2.set_ylabel(ploty2, fontsize=20) plt.suptitle('Graph Growth', fontsize=30) plt.legend((line1,line2),(ploty , ploty2), loc='upper center', frameon=False, fontsize=20) plt.show() if random == True: df = pd.DataFrame(data, columns= ('nodegrowth', 'edgegrowth', 'modval','random_modval', 'maxclique', 'random_maxclique', 'avgclique', 'random_avgclique', 'run_time')) plt.close() fig = plt.figure(figsize=(18,10)) ax = fig.add_subplot(1,1,1) ax2 = ax.twinx() ax3 = ax.twinx() y1 = df[ploty] y2 = df[ploty2] y3def = str('random_'+ploty) # i just add random to whatever the user inputs as the y stuff they want to plot y3 = df[y3def] y4def = str('random_'+ploty2) y4 = df[y4def] x = df[plotx] ax.set_xlabel('%s in Graph' %(plotx),fontsize=20) line1, = ax.plot(x, y1, label = ploty, color = 'blue') line2, = ax2.plot(x, y2, label = ploty2, color = 'green') line3, = ax.plot(x,y3, label = y3def, color = 'red') line4, = ax3.plot(x,y4, label = y4def, color = 'cyan') ax.set_ylabel(ploty,fontsize=20) ax2.set_ylabel(ploty2, fontsize=20) plt.suptitle('Graph Growth', fontsize=30) plt.legend((line1,line2,line3,line4), (ploty,ploty2,y3def,y4def),loc='upper center', frameon=False, fontsize=20)# plt.show() #draw the graph if draw == True: if drawgraph == 'triangulated': G = nx.from_numpy_matrix(triangulated) elif drawgraph == 'moralized': G = nx.from_numpy_matrix(moralized) elif drawgraph == 'directed': G = graph plt.close() if drawspectral == True: nx.draw_random(G, prog='neato') else: pos = nx.random_layout(G) # find node near center (0.5,0.5) dmin=1 ncenter=0 for n in pos: x,y=pos[n] d=(x-0.5)**2+(y-0.5)**2 if d<dmin: ncenter=n dmin=d # color by path length from node near center p=nx.single_source_shortest_path_length(G,ncenter) plt.figure(figsize=(15,15)) plt.suptitle(drawgraph + ' graph') nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.2) nx.draw_networkx_nodes(G,pos,nodelist=p.keys(), node_size=20, node_color=p.values(), cmap=plt.cm.Reds_r) plt.xlim(-0.05,1.05) plt.ylim(-0.05,1.05) plt.axis('off') plt.show() if random == True: df = pd.DataFrame(data, columns= ('nodegrowth', 'edgegrowth', 'modval','random_modval', 'maxclique', 'random_maxclique', 'avgclique', 'random_avgclique', 'run_time')) if random == False: df = pd.DataFrame(data, columns= ('nodegrowth','edgegrowth', 'modval','maxclique','avgclique','run_time')) if getgraph == True: return (graph, df) else: return(df)
from itertools import combinations import networkx as nx from random import choice G=nx.dense_gnm_random_graph(5,10) t=G.edges() p=0.9 a=0 r=0 s='nothing' num=[] def main(): for i in xrange(0,11): for subset in combinations(t,i): count=0 for j in subset: if len(subset)!=0: count=count+1 G.remove_edge(j[0],j[1]) if (len(list(nx.all_simple_paths(G,0,2))) > 0): s='up' else: s='down' r=(pow(1-p,count))*(pow(p,(10-count))) for k in subset: G.add_edge(k[0],k[1]) num.append([r,s]) #print num for i in range(2,3): r1=0