def test_random_partition_graph(): G = nx.random_partition_graph([3, 3, 3], 1, 0, seed=42) C = G.graph['partition'] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 9 G = nx.random_partition_graph([3, 3, 3], 0, 1) C = G.graph['partition'] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 27 G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True) C = G.graph['partition'] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 18 G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True) C = G.graph['partition'] assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] assert len(G) == 9 assert len(list(G.edges())) == 54 G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1) C = G.graph['partition'] assert C == [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}, {10, 11, 12, 13, 14}] assert len(G) == 15 rpg = nx.random_partition_graph pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 1.1, 0.1) pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], -0.1, 0.1) pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, 1.1) pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, -0.1)
def test_random_partition_graph(): G = nx.random_partition_graph([3,3,3],1,0) C = G.graph['partition'] assert_equal(C,[set([0,1,2]), set([3,4,5]), set([6,7,8])]) assert_equal(len(G),9) assert_equal(len(list(G.edges())),9) G = nx.random_partition_graph([3,3,3],0,1) C = G.graph['partition'] assert_equal(C,[set([0,1,2]), set([3,4,5]), set([6,7,8])]) assert_equal(len(G),9) assert_equal(len(list(G.edges())),27) G = nx.random_partition_graph([3,3,3],1,0,directed=True) C = G.graph['partition'] assert_equal(C,[set([0,1,2]), set([3,4,5]), set([6,7,8])]) assert_equal(len(G),9) assert_equal(len(list(G.edges())),18) G = nx.random_partition_graph([3,3,3],0,1,directed=True) C = G.graph['partition'] assert_equal(C,[set([0,1,2]), set([3,4,5]), set([6,7,8])]) assert_equal(len(G),9) assert_equal(len(list(G.edges())),54) G = nx.random_partition_graph([1,2,3,4,5], 0.5, 0.1) C = G.graph['partition'] assert_equal(C,[set([0]), set([1,2]), set([3,4,5]), set([6,7,8,9]), set([10,11,12,13,14])]) assert_equal(len(G),15) assert_raises(nx.NetworkXError, nx.random_partition_graph,[1,2,3],1.1,0.1) assert_raises(nx.NetworkXError, nx.random_partition_graph,[1,2,3],-0.1,0.1) assert_raises(nx.NetworkXError, nx.random_partition_graph,[1,2,3],0.1,1.1) assert_raises(nx.NetworkXError, nx.random_partition_graph,[1,2,3],0.1,-0.1)
def generateProbabilisticGraph(): #G = nx.random_partition_graph([30,50,80],0.4,0.05) G = nx.random_partition_graph([30, 50, 120], 0.2, 0.02) #G = nx.gaussian_random_partition_graph(150,50,2,0.4,0.05) for nodeA, nodeB in G.edges(): G[nodeA][nodeB]['weight'] = -np.random.uniform() + 1 return G
def test_graph_generator(): n = 400 m = 5 seed = 0 # G = nx.barabasi_albert_graph(n, m, seed) G = nx.random_partition_graph([100, 100, 200], .25, .01) sizes = [10, 90, 300] probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] G = nx.stochastic_block_model(sizes, probs, seed=0) G = nx.newman_watts_strogatz_graph(400, 5, 0.5) A = nx.to_numpy_array(G) print(A) plt.pcolormesh(A) plt.show() s = sorted(G.degree, key=lambda x: x[1], reverse=True) # newmap = {s[i][0]:i for i in range(len(s))} # H= nx.relabel_nodes(G,newmap) # newmap = generate_node_mapping(G, type='community') # H = networkX_reorder_nodes(G, newmap) H = networkx_reorder_nodes(G, 'community') # B = nx.to_numpy_array(H) # # plt.pcolormesh(B) # plt.imshow(B) # plt.show() visualize_graph_matrix(H)
def generate_dsjc(path, seed, num_vertices, k): """ Cooked graphs from Optimization by simulated annealing: an experimental evaluation; part {II}, DS Johnson, 1991: 1) split vertices to k color classes 2) edges to pairs from different colors with p = k/(2(k-1)) => avg degree n/2 (max edges n * (k-1)/k for 1 vertex, wanted = p * max => p = wanted / max) 3) select representant of each color & add one K-clique """ coloring = np.random.randint(0, k, num_vertices) _, partition = np.unique(coloring, return_counts=True) used_k = len(partition) graph = nx.random_partition_graph(partition, p_in=0, p_out=1. * used_k / (2 * (used_k - 1)), seed=seed) representants = np.cumsum(partition) - 1 clique = nx.complete_graph(used_k) nx.relabel_nodes(clique, dict(zip(range(used_k), representants)), copy=False) graph.add_edges_from(clique.edges) save_networkx_graph_to_in(graph, path) store_parameters(path, method=generate_dsjc.__name__, seed=seed, num_vertices=num_vertices, k=k, other=dict(partition=partition.tolist()))
def partition_graph(n, samples, p_in, p_out, directed=False, seed=None, per_population=False): if p_in == p_out: return nx.erdos_renyi_graph(n, p_in) sizes = partition_sizes(n, samples, per_population=per_population) if (p_in, p_out) == (1, 0): return build_nx_graph(sizes) return nx.random_partition_graph(sizes, p_in, p_out, seed=seed, directed=directed)
def generateProbabilisticGraph(): G = nx.random_partition_graph([30, 50, 120], 0.2, 0.02) for nodeA, nodeB in G.edges(): G[nodeA][nodeB]['weight'] = -np.random.uniform() + 1 G[nodeA][nodeB]['length'] = np.random.uniform(1, 3) #G[nodeA][nodeB]['length'] = 1 return G
def build_graph(self): # make a partite network self.graph = nx.random_partition_graph(self.nodes, 0, 0) if self.p == 0: return lp = math.log(self.p) for p1, p2 in pairwise(self.graph.graph['partition']): p1 = sorted(p1) p2 = sorted(p2) for i in p1: for j in p2: if int(self.p) == 1: self.graph.add_edge(i, j) continue lr = math.log(1.0 - random.random()) if lr/lp >= 1: self.graph.add_edge(i, j) # prune unnconnected nodes if self.prune: for node in self.graph.nodes(): if nx.is_isolate(self.graph, node): self.graph.remove_node(node) self.node_labels_to_ints() self.colour()
def gen_sim_graph(g, iters=1): graphs = [] for _ in range(iters): szs, p_in, p_btw = get_comm_stats(g, verbose=False) gen_gr = nx.random_partition_graph(szs, p_in, p_btw) graphs.append(gen_gr) return graphs
def generate_ssbm_graphs(num_nodes: int, num_communities: int, num_graphs: int, avg_degree_inside: float, avg_degree_between: float, seed: int = 0): """ Generates graphs using Symmetric Stochastic Block Model (SSBM) with specified parameters num_nodes - number of nodes num_communities - number of communities (should be k >= 5) num_graphs - number of graphs to generate avg_degree_inside - average node degree inside communities avg_degree_between - average node degree between communities seed - seed used to generate graphs """ community_size = num_nodes // num_communities probability_inside_community = avg_degree_inside / num_nodes probability_between_communities = avg_degree_between / num_nodes for i in range(num_graphs): graph = nx.random_partition_graph( [community_size for _ in range(num_communities)], probability_inside_community, probability_between_communities, seed=seed + i) yield graph
def generate_sbm_partition(name: str): sizes, p_in, p_out, seed = parse.parse('SBM_sizes_{}_p_in_{}_p_out_{}_seed_{}', name) return nx.random_partition_graph( [int(x) for x in sizes.split('_')], float(p_in), float(p_out), int(seed) ).graph['partition']
def generate_sbm(sizes, p_in: float, p_out: float, seed, weighted=False): G = nx.random_partition_graph(sizes, p_in, p_out, seed) if weighted: for edge in G.edges(): G[edge[0]][edge[1]]['weight'] = 1 return GraphInfo(G, 'SBM_sizes_{}_p_in_{}_p_out_{}_seed_{}'.format( '_'.join(str(x) for x in sizes), p_in, p_out, seed ))
def test_random_partition_graph(): G = nx.random_partition_graph([3, 3, 3], 1, 0) C = G.graph['partition'] assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) assert_equal(len(G), 9) assert_equal(len(list(G.edges())), 9) G = nx.random_partition_graph([3, 3, 3], 0, 1) C = G.graph['partition'] assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) assert_equal(len(G), 9) assert_equal(len(list(G.edges())), 27) G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True) C = G.graph['partition'] assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) assert_equal(len(G), 9) assert_equal(len(list(G.edges())), 18) G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True) C = G.graph['partition'] assert_equal(C, [set([0, 1, 2]), set([3, 4, 5]), set([6, 7, 8])]) assert_equal(len(G), 9) assert_equal(len(list(G.edges())), 54) G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1) C = G.graph['partition'] assert_equal(C, [ set([0]), set([1, 2]), set([3, 4, 5]), set([6, 7, 8, 9]), set([10, 11, 12, 13, 14]) ]) assert_equal(len(G), 15) assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 1.1, 0.1) assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], -0.1, 0.1) assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 0.1, 1.1) assert_raises(nx.NetworkXError, nx.random_partition_graph, [1, 2, 3], 0.1, -0.1)
def social_graph(groups, inner, outer, plot): # create graph G = nx.random_partition_graph(groups, inner, outer) # plot graph if plot == True: nx.draw_networkx(G) plt.show() # print numpy array of social graph # ones means edge between nodes, otherwise zero c = nx.to_numpy_matrix(G) return c
def get_edge_list(): du = GC.d_or_u == 'd' cn = random_partition_graph(GC.rpg_sizes, GC.rpg_p_in, GC.rpg_p_out, directed=du, seed=GC.random_number_seed) if GC.random_number_seed is not None: GC.random_number_seed += 1 out = GC.nx2favites(cn, GC.d_or_u) f = gopen(expanduser("%s/contact_network.txt.gz" % GC.out_dir),'wb',9) f.write('\n'.join(out).encode()); f.write(b'\n') f.close() f = gopen(expanduser("%s/contact_network_partitions.txt.gz" % GC.out_dir),'wb',9) f.write(str(cn.graph['partition']).encode()); f.write(b'\n') f.close() GC.cn_communities = [{str(n) for n in c} for c in cn.graph['partition']] return out
def generateRandomGraph(clusterSize, clusterNums, pIntra, pInter): while True: clusters = [clusterSize] * clusterNums G = nx.random_partition_graph(clusters, pIntra, pInter) if nx.is_connected(G): break partition = G.graph['partition'] idx = 0 for cluster in partition: cluster = list(cluster) for node in cluster: G.node[node]['x'] = (idx + 1) idx += 1 return G
def get_random_partition_graph(left, right, p_in=0.9, p_out=0.2, seed=42, **kwargs): right = int(right) # kwargs are ignored print( 'Generating random partition graph with first partition of size {} and second of size {}' .format(left, right)) sizes = [left, right] G = nx.random_partition_graph(sizes, p_in, p_out, seed=seed) solution_bitstring = [-1] * sizes[0] + [1] * sizes[1] return G, solution_bitstring
def load_SBM(block_sizes, p_in, p_out, seed): nb_nodes = np.sum(block_sizes) A = np.zeros((nb_nodes, nb_nodes), dtype=bool) ys = np.zeros(nb_nodes, dtype=int) G = nx.random_partition_graph(block_sizes, p_in, p_out, seed=seed) for node, ad in G.adjacency(): A[node, list(ad.keys())] = True for cls, points in enumerate(G.graph["partition"]): ys[list(points)] = cls return A, ys, G
def my_gaussian_random_partition_graph(n, s, v, p_in, p_out): if s > n: raise nx.NetworkXError("s must be <= n") assigned = 0 sizes = [] while True: #size = int(None.gauss(s, float(s) / v + 0.5)) size = int(random.normalvariate(s, v)) if size < 1: # how to handle 0 or negative sizes? continue if assigned + size >= n: sizes.append(n - assigned) break assigned += size sizes.append(size) return nx.random_partition_graph(sizes, p_in, p_out, False, None)
def random_networks(): RANDOM_SEED = 0 # Dictionary to store all nx graphs nx_graphs = {} # Small graphs # N_SMALL = 200 # nx_graphs['er-small'] = nx.erdos_renyi_graph(n=N_SMALL, p=.03, seed=RANDOM_SEED) # Erdos-Renyi # nx_graphs['ws-small'] = nx.watts_strogatz_graph(n=N_SMALL, k=11, p=.1, seed=RANDOM_SEED) # Watts-Strogatz # nx_graphs['ba-small'] = nx.barabasi_albert_graph(n=N_SMALL, m=6, seed=RANDOM_SEED) # Barabasi-Albert # nx_graphs['pc-small'] = nx.powerlaw_cluster_graph(n=N_SMALL, m=6, p=.02, seed=RANDOM_SEED) # Powerlaw Cluster # nx_graphs['sbm-small'] = nx.random_partition_graph(sizes=[N_SMALL // 10] * 10, p_in=.1, p_out=.01, # seed=RANDOM_SEED) # Stochastic Block Model # Larger graphs N_LARGE = 2000 # nx_graphs['er-large'] = nx.erdos_renyi_graph(n=N_LARGE, p=.03, seed=RANDOM_SEED) # Erdos-Renyi # nx_graphs['ws-large'] = nx.watts_strogatz_graph(n=N_LARGE, k=11, p=.1, seed=RANDOM_SEED) # Watts-Strogatz nx_graphs['ba-large'] = nx.barabasi_albert_graph( n=N_LARGE, m=6, seed=RANDOM_SEED) # Barabasi-Albert nx_graphs['pc-large'] = nx.powerlaw_cluster_graph( n=N_LARGE, m=6, p=.02, seed=RANDOM_SEED) # Powerlaw Cluster nx_graphs['sbm-large'] = nx.random_partition_graph( sizes=[N_LARGE // 10] * 10, p_in=.05, p_out=.005, seed=RANDOM_SEED) # Stochastic Block Model # Remove isolates from random graphs for g_name, nx_g in nx_graphs.items(): isolates = nx.isolates(nx_g) if len(list(isolates)) > 0: for isolate_node in isolates: nx_graphs[g_name].remove_node(isolate_node) for name, g in nx_graphs.items(): if nx.number_connected_components(g) > 1: print('Unconnected graph: ', name) visualization_file_name = './visualizations/{0}-visualization.png'.format( name) statistics_file_name_pkl = './network-statistics/{0}-statistics.pkl'.format( name) statistics_file_name_json = './network-statistics/{0}-statistics.json'.format( name) title = "Random NetworkX Graph: " + name save_visualization(g, visualization_file_name, title)
def household_scalefree(hhsizes, lam, kcrit): """ Build a scalefree network with cutoff, based on fully connected cliques (households), where every node has k out-of- household contacts, where k is Poisson distributed with parameter p. The out-of-household contacts are grown sequentially, with an adapted Barabasi-Albert rule with exponential cut-off with parameter kcrit. :param hhsizes: list of ints, sizes of the fully-connected households :param lam: expected value of Poisson distribution for number of out-of-household contacts :param kcrit: critical number of contacts for cut-off :return: """ G = nx.random_partition_graph(hhsizes, 1, 0) hhsizescumsum = np.array(hhsizes).cumsum() nx.set_node_attributes( G, {node: hhsizes[bisect(hhsizescumsum, node)] for node in G}, name='householdsize') n = G.number_of_nodes() rng = npr.default_rng() ks = rng.poisson(lam=lam, size=n) weights = np.ones(n, dtype=float) nodes = np.arange(n) for i, k in enumerate(ks): while k > 0: target = rn.choices(nodes, weights=weights)[0] while G.has_edge(i, target) or i == target: if G.degree(i) >= n - 1: break target = rn.choices(nodes, weights=weights)[0] else: G.add_edge(i, target) hhsize = G.nodes[i]['householdsize'] keff = G.degree(i) + 2 - hhsize weights[i] = keff * exp(-keff / kcrit) hhsize = G.nodes[target]['householdsize'] keff = G.degree(target) + 2 - hhsize weights[target] = keff * exp(-keff / kcrit) k -= 1 return G
def get_initial_node(params): # get input value from dictionary comm_size = int(params['comm_size']) num_comms = int(params['num_comms']) p_in = params['p_in'] p_out = params['p_out'] latent_days_threshold = params['latent_days_threshold'] infected_days_threshold = params['infected_days_threshold'] symp_prob_threshold = params['symp_prob_threshold'] # initial the social network G = nx.random_partition_graph([comm_size] * num_comms, p_in, p_out) # initialise network attributes, assign some attributes to each nodes with initial values nx.set_node_attributes(G, 'S', 'state') # disease state nx.set_node_attributes(G, None, 't_I') # time of infection nx.set_node_attributes(G, None, 's_I') # source of infection nx.set_node_attributes(G, 0, 'latent_period') # latent time nx.set_node_attributes(G, 0, 'infected_period') # infected time nx.set_node_attributes(G, None, 'symptomatic') # whether symptomatic nx.set_node_attributes(G, 0, 'c_observed') # the color of observed infected time nx.set_node_attributes(G, 0, 'c_true') # the color of true infected time nx.set_node_attributes(G, 0, 't_observed') # infected time of observed # choose a single node as the initial infected node of all nodes, assign it status I seed_node = np.random.randint(num_comms * comm_size) G.node[seed_node]['state'] = 'I' G.node[seed_node][ 't_I'] = 1 # means that observation starts at its beginning G.node[seed_node]['s_I'] = seed_node # random select a infected day N = random.randint(infected_days_threshold) G.node[seed_node][ 'symptomatic'] = 1 # 0 means you're asymptomatic, and 1 means you're symptomatic G.node[seed_node]['latent_period'] = random.randint( latent_days_threshold) # random select a latent period G.node[seed_node]['infected_period'] = N # next_state is used to set the state of next time stamp nx.set_node_attributes(G, nx.get_node_attributes(G, 'state'), 'next_state') return G, seed_node
def download(self): if os.path.isfile(os.path.join(self.root, self.raw_file_names[2])): return sizes = [self.nodes_per_class for _ in range(self.n_classes)] G = nx.random_partition_graph(sizes, self.p_in, self.p_out, directed=False) y = [G._node[n]["block"] for n in range(len(G.nodes))] y_dict = {n: G._node[n]["block"] for n in range(len(G.nodes))} nx.set_node_attributes(G, y_dict, "y") for n in range(len(G.nodes)): del G._node[n]["block"] adj_dict = {u: list(v_dict.keys()) for u, v_dict in G.adj.items()} pickle.dump(adj_dict, open(self.raw_paths[0], "wb")) y_one_hot = np.eye(self.n_classes)[y] np.save(self.raw_paths[2], y_one_hot) make_x(path=self.raw_dir, name=self.name, y_one_hot=y_one_hot, save=True)
def generateRandomGraphGeometric(clusterNums, minSize=5, pIntra=0.7, pInter=0.01, g=0.08): while True: clusters = (np.random.geometric(g, size=clusterNums) + minSize).tolist() G = nx.random_partition_graph(clusters, pIntra, pInter) if nx.is_connected(G): break partition = G.graph['partition'] idx = 0 for cluster in partition: cluster = list(cluster) for node in cluster: G.node[node]['x'] = (idx + 1) idx += 1 return G
def main(args): (sizes, p_in, p_out, generator_type) = ( args["sizes"], args["p_in"], args["p_out"], args["generator_type"]) visualize, out_path = args["visualize"], args["out_path"] cull_disconnected = args['cull_disconnected'] connect_disconnected = args['connect_disconnected'] shuffle_labels = args['shuffle_labels'] appm = nx.random_partition_graph(sizes, p_in, p_out) signal_generator = SIGNAL_GENERATORS[generator_type] signal = signal_generator(len(args["sizes"])) add_signal_to_graph(appm, signal) if cull_disconnected: cull_disconnected_nodes(appm) appm = nx.relabel.convert_node_labels_to_integers(appm, 0) if connect_disconnected: connect_disconnected_nodes(appm) appm = nx.relabel.convert_node_labels_to_integers(appm, 0) if shuffle_labels: random_labels = list(range(appm.number_of_nodes())) np.random.shuffle(random_labels) mapping = {node: label for node, label in zip(appm.nodes(), random_labels)} appm = nx.relabel_nodes(appm, mapping, copy=True) if visualize: draw_partitioned_graph(appm) if out_path is not None: if out_path.strip(".").strip("/").split("/")[0] == "data": pathlib.Path('./data').mkdir(parents=True, exist_ok=True) dump_graph(appm, out_path) else: return appm
def community_graphs(): print("Community graphs for social networks") print("Caveman graph") G = nx.caveman_graph(2, 13) draw_graph(G) print(" Connected Caveman graph") G = nx.connected_caveman_graph(2, 3) draw_graph(G) print("Relaxed caveman") G = nx.relaxed_caveman_graph(2, 5, 0.2) draw_graph(G) print("Random partition graph") G = nx.random_partition_graph([10, 10, 10], .25, .01) draw_graph(G) print(len(G)) partition = G.graph['partition'] print(len(partition)) print("Planted partition graph") G = nx.planted_partition_graph(4, 3, 0.5, 0.1, seed=42) draw_graph(G) print("Gaussian random partition graph") G = nx.gaussian_random_partition_graph(40, 10, 10, .25, .1) print(len(G)) draw_graph(G)
def model(contacts_per_host, mu, sigma, beta, r, tao, gamma, n_loci, n_nodes, z_out, n_com, randomness, n_seeds, n_steps, community_type): # Attributes used for network P = randomness num_people = n_nodes MAX_TIME_STEPS = n_steps # max time steps gamma = gamma # degree of cross-immunity beta = beta # probability of infection mu = mu # probability of the host losing infection at a time step( used for testing at the moment) sigma = sigma # probability of host losing immunity at a time step tao = tao # probability of mutation R = r # probability of recombination div_sum = 0 N = n_loci # Number of loci # testing network functions n_communities = n_com n_out = z_out n_in = 10 - n_out p_out = n_out / (num_people / n_communities) p_in = n_in / (num_people / n_communities) partitions = [] comm_size = int(num_people / n_communities) sizes = [] for i in range(n_communities): sizes.append(comm_size) ## change community type ## 1 == random ## 0 == linear if community_type == 1: G = nx.random_partition_graph(sizes, p_in, p_out) else: G = linear_community_network(n_communities, num_people, n_in, n_out) for i in range(n_communities): G1 = G.graph['partition'][i] partitions.append(G1) # adding attributes to nodes for i in range(n_communities): G1 = partitions[i] for n, v in G.nodes.items(): v['current_infected'] = set() v['current_immune'] = set() v['newly_infected'] = set() v['newly_recovered'] = set() v['next_immune'] = set() if n in G1: v['index'] = i strain_space = generate_ss(N) communities_strain_space = [] for i in range(n_communities): temp = [] for strain in strain_space: temp2 = [] temp2.append(i) temp2.append(strain) temp.append(tuple(temp2)) communities_strain_space.append(temp) # Seed infection nodes_per_community = int(n_seeds / n_communities) # RANDOM SEEDING for i in range(n_communities): G1 = partitions[i] seed_nodes = random.sample(G1, nodes_per_community) for node in seed_nodes: strain = choice(communities_strain_space[i]) # print("Community %s has strain %s"%(i,strain)) # print(strain) G.node[node]['current_infected'].add(strain) # Record per community discordance and diversity if needed communities_diversity = [] # communities_discordance = [] for i in range(n_communities): communities_diversity.append(0) # communities_discordance.append(0) #Start blank record of host immunity per community host_immune_com = [] for i in range(n_communities): host_immune = {} for n in range(len(communities_strain_space)): for strain in communities_strain_space[n]: host_immune[strain] = [] host_immune_com.append(host_immune) # Start blank record of host infected per community host_infected_com = [] for i in range(n_communities): host_infected = {} for n in range(len(communities_strain_space)): for strain in communities_strain_space[n]: host_infected[strain] = [] host_infected_com.append(host_infected) # Start blank record of host infected total in network total_infected = {} for n in range(len(communities_strain_space)): for strain in communities_strain_space[n]: total_infected[strain] = [] # Start blank record of host infected total in network total_immune = {} for n in range(len(communities_strain_space)): for strain in communities_strain_space[n]: total_immune[strain] = [] # print(total_immune) record_total_infected(G, total_infected, num_people) record_total_immune(G, total_immune, num_people) for i in range(n_communities): # print(i) G1 = partitions[i] host_immune_temp = copy.deepcopy(host_immune_com[i]) host_infected_temp = copy.deepcopy(host_infected_com[i]) record_com_immune(G, host_immune_temp, num_people, G1) record_com_infected(G, host_infected_temp, num_people, G1) host_immune_com[i] = host_immune_temp host_infected_com[i] = host_infected_temp for t in range(1, MAX_TIME_STEPS): for n, v in G.nodes.items(): # #check immune status of nodes # nodes_immune = [n for n, v in G.nodes(data=True) if v['current_immune'] != []] # print("nodes_immune: %s" % nodes_immune) if v['current_immune']: # print("Node %s immune list is %s"%(n,v['current_immune'])) # check immunity lost v['next_immune'] = check_immune(v['current_immune'], n, sigma) # #get a list of infected nodes if v['current_infected']: # print(n) # print(v['current_infected']) # if G.node[node]['current_infected'] == []: # print("node %s is empty" % node) # break #Attempt to infect neighbours for j in G[n]: # print("Node %s is connected to Node %s " % (n, j)) infections = v['current_infected'].copy() s = infections.pop() attempt_infection(s, n, j, G, N, gamma, beta, tao, R) # print(j) # print(G.node[j]['next_infected']) # Attempts to recover from infection # check recovery v['newly_recovered'] = attempt_recovery( v['current_infected'], n, mu) # update the network attributes # print("update all values to next values") for n, v in G.nodes.items(): # Print status of each node attribute before the update # print("PRE-UPDATE--") # print(n) # print('newly_infected: %s' % list(v['newly_infected'])) # print('newly_recovered:%s' % list(v['newly_recovered'])) # print('current_infected:%s' % list(v['current_infected'])) # print('next_immune:%s' % list(v['next_immune'])) # print('current_immune:%s' % list(v['current_immune'])) # apply changes v['current_immune'] = v['next_immune'] | v['newly_recovered'] v['current_infected'] = v['current_infected'] - v['newly_recovered'] v['current_infected'] = v['current_infected'] | v['newly_infected'] # reset temporary hold fields v['next_immune'] = set() v['newly_recovered'] = set() v['newly_infected'] = set() # Print status of each node attribute after the update # print("POST-UPDATE--") # print(n) # print('newly_infected: %s' % list(v['newly_infected'])) # print('newly_recovered:%s' % list(v['newly_recovered'])) # print('current_infected:%s' % list(v['current_infected'])) # print('next_immune:%s' % list(v['next_immune'])) # print('current_immune:%s' % list(v['current_immune'])) record_total_infected(G, total_infected, num_people) record_total_immune(G, total_immune, num_people) for i in range(n_communities): G1 = partitions[i] host_immune_temp = copy.deepcopy(host_immune_com[i]) host_infected_temp = copy.deepcopy(host_infected_com[i]) record_com_immune(G, host_immune_temp, num_people, G1) record_com_infected(G, host_infected_temp, num_people, G1) host_immune_com[i] = host_immune_temp host_infected_com[i] = host_infected_temp # Record diversity within communities for i in range(n_communities): # host_infected = host_infected_com[i] diversity = calc_diversity(host_infected_com[i]) # discordance = calc_discordance(host_infected, n_loci) # communities_discordance[i].append(discordance) # communities_diversity[i].append(diversity) # communities_discordance[i] += discordance communities_diversity[i] += diversity diversity = calc_diversity(total_infected) div_sum += diversity mean_div = calc_div_mean(div_sum, MAX_TIME_STEPS) for i in range(len(communities_diversity)): div = communities_diversity[i] mean_div_c = calc_div_mean(div, MAX_TIME_STEPS) communities_diversity[i] = mean_div_c return host_immune_com, host_infected_com, total_immune, communities_diversity, mean_div
import networkx as nx import matplotlib.pyplot as plt import codecs import time import datetime import random import time import sys import io import math from itertools import groupby from operator import itemgetter import numpy as np # (# groups, # vertices in each group, probability of connecting within group, probability of connecting between groups, seed for random number generator) G = nx.random_partition_graph([800, 200], .1, .0125) adjacencydict = nx.to_dict_of_dicts(G, nodelist=None, edge_data=None) # create dict for states and one infected infectedstates = {} for n in range(len(adjacencydict)): infectedstates.update({n: "S"}) startnode = random.randint(0, len(adjacencydict)) infectedstates.update({startnode: "I"}) def flipstateI(node): num = random.randint(0, 999) # random number 0-9 if num < 10: return True
import os import json from pprint import pprint import networkx as nx import matplotlib.pyplot as plt import codecs import time import datetime import random import time import sys import io # Generate the graph overwhich to run simulation, set ([group sizes], P(connecting within group), P(connecting between group) G = nx.random_partition_graph([70,30],.1,.02) # Create adjacency dict documenting network connections adjacencydict = nx.to_dict_of_dicts(G, nodelist=None, edge_data = None) ## Make a visualization of above graph - output file can be imported into gephi to see visualization # outputdir = "/Users/brookeistvan/Documents/Thesis/seniorthesis" # nx.write_gexf(G, outputdir+"SIgraph.gexf") # Create dict for states of each node starting as susceptible infectedstates = {} for n in range(len(adjacencydict)): infectedstates.update({n:"S"}) # Randomly choose one node to start infected startnode = random.randint(0,len(adjacencydict)) infectedstates.update({startnode:"I"})
def buildRandomPartitionGraph(sizes, p_in, p_out): #create random partition graph G = nx.random_partition_graph(sizes, p_in, p_out) return G
import matplotlib.pyplot as plt import networkx import util import csv import numpy as np if __name__ == "__main__": graphs_with_ground_truth = {} min_cluster_size = 10 for p_out in np.arange(0.2, 0.5, 0.1): for p_in in np.arange(0.5, 0.9, 0.1): for k in range(3, 15): for max_cluster_size in range(min_cluster_size, 101, 10): sizes = [random.randint(min_cluster_size, max_cluster_size) for i in range(k)] graph = networkx.random_partition_graph(sizes, p_in, p_out) ground_truth = [i for i, partition in enumerate(graph.graph['partition']) for node in partition] graph_name = "rp_{:.1f}_{:.1f}_{}_{}_{}".format(p_out, p_in, k, min_cluster_size, max_cluster_size) # util.plot_results(graph, util.get_clustering_results(graph, k), (graph_name, ground_truth)) graphs_with_ground_truth[graph_name] = (graph, ground_truth, k) accuracy = defaultdict(list) with open('graphs_info.csv', 'w', newline='') as f: writer = csv.writer(f) headers = ['graph_name', 'nodes', 'edges', 'radius', 'diameter', 'density', 'avg_cl_coeff', 'avg_degree', 'best_score', 'best_algorithm'] writer.writerow(headers) for graph_name, (graph, ground_truth, k) in graphs_with_ground_truth.items():
adj, features = pickle.load(f) fb_graphs['combined'] = (adj, features) ### ---------- Create Random NetworkX Graphs ---------- ### # Dictionary to store all nx graphs nx_graphs = {} # Small graphs N_SMALL = 200 nx_graphs['er-small'] = nx.erdos_renyi_graph(n=N_SMALL, p=.02, seed=RANDOM_SEED) # Erdos-Renyi nx_graphs['ws-small'] = nx.watts_strogatz_graph(n=N_SMALL, k=5, p=.1, seed=RANDOM_SEED) # Watts-Strogatz nx_graphs['ba-small'] = nx.barabasi_albert_graph(n=N_SMALL, m=2, seed=RANDOM_SEED) # Barabasi-Albert nx_graphs['pc-small'] = nx.powerlaw_cluster_graph(n=N_SMALL, m=2, p=.02, seed=RANDOM_SEED) # Powerlaw Cluster nx_graphs['sbm-small'] = nx.random_partition_graph(sizes=[N_SMALL/10]*10, p_in=.1, p_out=.01, seed=RANDOM_SEED) # Stochastic Block Model # Larger graphs N_LARGE = 1000 nx_graphs['er-large'] = nx.erdos_renyi_graph(n=N_LARGE, p=.01, seed=RANDOM_SEED) # Erdos-Renyi nx_graphs['ws-large'] = nx.watts_strogatz_graph(n=N_LARGE, k=3, p=.1, seed=RANDOM_SEED) # Watts-Strogatz nx_graphs['ba-large'] = nx.barabasi_albert_graph(n=N_LARGE, m=2, seed=RANDOM_SEED) # Barabasi-Albert nx_graphs['pc-large'] = nx.powerlaw_cluster_graph(n=N_LARGE, m=2, p=.02, seed=RANDOM_SEED) # Powerlaw Cluster nx_graphs['sbm-large'] = nx.random_partition_graph(sizes=[N_LARGE/10]*10, p_in=.05, p_out=.005, seed=RANDOM_SEED) # Stochastic Block Model # Remove isolates from random graphs for g_name, nx_g in nx_graphs.iteritems(): isolates = nx.isolates(nx_g) if len(isolates) > 0: for isolate_node in isolates: nx_graphs[g_name].remove_node(isolate_node)
import networkx as nx import sys # Simple graph generator for tests # Usage: python ./generator.py [filename] # random_partition_graph(partitions, edges_in, edges_out) # partitions - array of numbers of nodes in groups # edges_in - probability of edges inside each group # edges_out - probability of edges between groups G = nx.random_partition_graph([10000, 4000, 6000], .3, 0.001) # create file id doesn't exist, fail otherwise fh = open("./tests/generated/" + sys.argv[1], 'xb') # save edges to given file without any additional data nx.write_edgelist(G, fh, data=False)
def generate_synthetic_graph(n_node_par_com, n_com=2, p_in=0.1, p_out=0.001): sizes = [n_node_par_com] * n_com g = nx.random_partition_graph(sizes, p_in, p_out) edges = g.edges() return edges
def build_graph(self): self.graph = nx.random_partition_graph([10, 10], 0, 1) self.node_labels_to_ints()