def distributed_random_topology(x, graph, proportion=0.5, *args): """Same as dynamics but randomly rewires the graph edge connections.""" nx.connected_double_edge_swap(graph, np.floor(proportion * len(graph.nodes()))) L = nx.laplacian_matrix(graph) L = L.todense() return -np.dot(L, x)
def permute_network( G, Q, numEdges, outputFile ): # Permutes network by swapping edges Q * numEdges times H = G.copy() nswap = Q*numEdges swaps = nx.connected_double_edge_swap(H, nswap=nswap) nx.write_edgelist(H, outputFile) return swaps
def random_once(g, kmax, Q=100): #rg = nx.DiGraph(nx.directed_configuration_model(list(d for n, d in g.in_degree()), list(d for n, d in g.out_degree()), create_using = nx.DiGraph())) rg = g.copy() if g.number_of_edges() > 2: nx.connected_double_edge_swap(rg, (Q * g.number_of_edges())) rg = compute_link_property(rg, kmax) #rgs.append(rg) meas = { str(i + 1): rg.graph[GRAPH_KEY_AVG_COMMON_NODES + str(i + 1)] for i in range(kmax) } stds = { str(i + 1): rg.graph[GRAPH_KEY_STD_COMMON_NODES + str(i + 1)] for i in range(kmax) } return meas, stds
def test_connected_double_edge_swap_star_low_window_threshold(): # Testing ui==xi in connected_double_edge_swap with low window threshold graph = nx.star_graph(40) degrees = sorted(d for n, d in graph.degree()) G = nx.connected_double_edge_swap(graph, 1, _window_threshold=0, seed=4) assert nx.is_connected(graph) assert degrees == sorted(d for n, d in graph.degree())
def test_connected_double_edge_swap_star(): # Testing ui==xi in connected_double_edge_swap graph = nx.star_graph(40) degrees = sorted(d for n, d in graph.degree()) G = nx.connected_double_edge_swap(graph, 1, seed=4) assert nx.is_connected(graph) assert degrees == sorted(d for n, d in graph.degree())
def run(args): # Load unpermuted network. edge_list = load_edge_list(args.edge_list_file, unweighted=True) # Permute network. G = nx.Graph() G.add_edges_from(edge_list) if args.seed is not None: random.seed(args.seed) minimum_swaps = int(math.ceil(args.Q*G.number_of_edges())) if not args.connected: G = nx.double_edge_swap(G, minimum_swaps, 2**30) else: # If G is not connected, then we perform the connected double edge swap algorithm on a # largest connected component of G. if not nx.is_connected(G): G = max(nx.connected_component_subgraphs(G), key=len) # The current connected double edge swap algorithm does not guarantee a minimum number of # successful edge swaps, so we enforce it. current_swaps = 0 while current_swaps<minimum_swaps: remaining_swaps = max(minimum_swaps-current_swaps, 100) additional_swaps = nx.connected_double_edge_swap(G, remaining_swaps) current_swaps += additional_swaps permuted_edge_list = G.edges() # Save permuted_network. save_edge_list(args.permuted_edge_list_file, permuted_edge_list)
def permute_network( G, Q, numEdges, outputFile ): # Permutes network by swapping edges Q * numEdges times H = G.copy() nswap = Q*numEdges if node_swap: #print("fast node swap") H = node_swap(H, nswap=nswap) # fix me swaps = nswap else: swaps = nx.connected_double_edge_swap(H, nswap=nswap) nx.write_edgelist(H, outputFile) return swaps
def random_noise_replicate(original, params=None): epsilon = params['epsilon'] preserve_degree = params.get('preserve_degree', False) preserve_connected = params.get('preserve_connected', nx.is_connected(original)) #potentially we could even repeat the edge rewiring multiple times G = original.copy() ne = original.number_of_edges() if ne == 0: return G if preserve_degree and not preserve_connected: edited_edges = random.sample(G.edges(), npr.binomial(ne, epsilon)) random.shuffle(edited_edges) num_edits = len(edited_edges) / 2 for idx in range(num_edits): edgeA = edited_edges[idx] edgeB = edited_edges[idx + num_edits] newA = (edgeA[0], edgeB[0]) newB = (edgeA[1], edgeB[1]) G.remove_edges_from([edgeA, edgeB]) G.add_edges_from([newA, newB]) elif preserve_degree and preserve_connected: nswap = epsilon / 2. * ne nx.connected_double_edge_swap(G, nswap=nswap) #modified in place else: nodes = G.nodes() edited_edges = random.sample(G.edges(), npr.binomial(ne, epsilon)) for edge in edited_edges: G.remove_edge(*edge) pair = random.sample(nodes, 2) G.add_edge(pair[0], pair[1]) G.remove_edges_from(G.selfloop_edges()) return G
def opt(G, n): H = G.copy() diam, aspl = diam_aspl(H) print(diam, aspl) print("") while True: tmp = H.copy() cnt_swapped = nx.connected_double_edge_swap(tmp, nswap=n) tmp_diam, tmp_aspl = diam_aspl(tmp) print(tmp_diam, tmp_aspl) if tmp_diam < diam or (tmp_diam == diam and tmp_aspl < aspl): H, diam, aspl = tmp, tmp_diam, tmp_aspl print("GOOD") print(cnt_swapped) print(diam, aspl)
def random_noise_replicate(original, params=None): epsilon = params['epsilon'] preserve_degree = params.get('preserve_degree', False) preserve_connected = params.get('preserve_connected', nx.is_connected(original)) #potentially we could even repeat the edge rewiring multiple times G = original.copy() ne = original.number_of_edges() if ne == 0: return G if preserve_degree and not preserve_connected: edited_edges = random.sample(G.edges(), npr.binomial(ne, epsilon)) random.shuffle(edited_edges) num_edits = len(edited_edges) / 2 for idx in xrange(num_edits): edgeA = edited_edges[idx] edgeB = edited_edges[idx + num_edits] newA = (edgeA[0],edgeB[0]) newB = (edgeA[1],edgeB[1]) G.remove_edges_from([edgeA,edgeB]) G.add_edges_from([newA,newB]) elif preserve_degree and preserve_connected: nswap = epsilon/2. * ne nx.connected_double_edge_swap(G, nswap=nswap) #modified in place else: nodes = G.nodes() edited_edges = random.sample(G.edges(), npr.binomial(ne, epsilon)) for edge in edited_edges: G.remove_edge(*edge) pair = random.sample(nodes, 2) G.add_edge(pair[0],pair[1]) G.remove_edges_from(G.selfloop_edges()) return G
def run_sim_switch(self, p=1, proportion=0.5, save_data=False): t = 0 if (save_data): self.T_sim = [] self.V_sim = [] self.R_sim = [] self.VG = [] start = time.time() while self.velocity_angle_agreement(self.v) == False: u = self.v.copy() self.v = self.v + self.dt * self.v_dot( self.v, self.r, self.v_graph, self.s_graph, *self.v_dot_arg) self.r = self.r + self.dt * self.r_dot( u, self.r, self.v_graph, self.s_graph, *self.v_dot_arg) self.s_graph = self.S_graph_pos(self.r, self.R) if (np.random.rand(1) < p): nx.connected_double_edge_swap(self.v_graph, np.floor(proportion * self.size)) t = t + self.dt if (save_data): self.T_sim.append(time.time() - start) self.V_sim.append(self.v) self.R_sim.append(self.r) self.VG.append(self.v_graph)
def get_graph_features(h): if h.is_directed(): g = nx.Graph(h) else: g = h def runf(func): try: print "Running", str(func) val = func(g) print "Value:", str(val) if val == np.nan: return -1 sys.stdout.flush() return val except: return -1 a = 1 if h.is_directed() else 0 b = nx.number_of_nodes(h) c = nx.number_of_edges(h) d = -1 e = 1 if not g.is_directed() and runf(nx.is_chordal) else 0 f = runf(nx.graph_clique_number) h = runf(nx.graph_number_of_cliques) i = runf(nx.transitivity) j = runf(nx.average_clustering) k = runf(nx.average_node_connectivity) if c < 100 else -1 l = runf(nx.edge_connectivity) if c < 100 else -1 m = runf(nx.node_connectivity) if c < 100 else -1 n = runf(nx.diameter) if c < 100 else -1 try: o = len(nx.periphery(g)) if b < 100 else -1 except: o = -1 p = 1 if runf(nx.is_eulerian) else 0 q = runf(nx.average_shortest_path_length) if b < 300 else -1 try: r = nx.connected_double_edge_swap( g, nx.number_of_edges(g)) if b < 500 else -1 except: r = -1 s = 1 if runf(nx.is_tree) else 0 t = runf(nx.density) return (a, b, c, d, e, f, h, i, j, k, l, m, n, o, p, q, r, s, t)
def test_connected_double_edge_swap(): graph = nx.barabasi_albert_graph(200, 1) degrees = sorted(d for n, d in graph.degree()) G = nx.connected_double_edge_swap(graph, 40, seed=1) assert nx.is_connected(graph) assert degrees == sorted(d for n, d in graph.degree())
def gen_func(graph): rdmgraph = graph.copy() nx.connected_double_edge_swap(rdmgraph, nswap=n_edge_rewirings) return rdmgraph
def link_analysis(): """ 和隨機網絡相比,判斷目標網絡的每一條連結的拓樸連結屬性:BOND/sink/local bridge of layer #n/global bridge """ global path, times, quick, separation # 剖析包含目錄及檔案名稱的 path 變數,並分別儲存成目錄 head、檔案主要名稱 tail 及副檔名 ext root, ext = os.path.splitext(path) head, tail = os.path.split(root) # 假如目標網絡存在,則讀入記憶體,並計算其平均最短路徑,當作稍後計算基礎 if os.path.exists(path) & os.path.isfile(path): debugmsg('read and analyse the target network...') # 打開 Pajek 網絡檔案,並轉換成無向圖 G = nx.Graph(nx.read_pajek(path)) # 設定開始的第一個 component graph 的編號為 0 compNo = 0 for g in nx.connected_component_subgraphs(G): # 如果這個 component 的節點數為 1 的話,則不做任何事 #if len(g.edges()) == 0: continue if g.order() == 1 or g.size() == 1: continue # 計算平均最短路徑 g.graph[GRAPH_KEY_SHORTEST_PATH] = nx.average_shortest_path_length( g) # component 的名稱等同 component 的編號 g.name = compNo compNo += 1 # 決定每個節點要外看幾層,決定強弱連結 layers = max( 1, int(min((g.graph[GRAPH_KEY_SHORTEST_PATH] / 2.0), separation)) if quick else int(g.graph[GRAPH_KEY_SHORTEST_PATH] / 2.0)) # 計算任意兩個有邊相連的節點的每一層共同朋友的正規化比率 compute_link_property(g, layers) t_start = time.time() t_ttl = 0 rgs = [] # 產生供比較對應用的 times 個隨機網絡 for c in xrange(times): rg_shelve_path = root + '_' + str(compNo) + '_' + str( c) + '_shelve.obj' rg_path = root + '_' + str(compNo) + '_' + str(c) + ext rg_name = 'random_network_' + str(g.name) + '_' + str(c) # 如果第 c 個隨機網絡過去已經產生過,則直接開檔讀取,否則重新建立一個隨機網絡 if os.path.exists(rg_shelve_path) & os.path.isfile( rg_shelve_path): debugmsg('read and analyse the random network #' + str(c) + ' from shelve file ' + rg_shelve_path + '...') sf = shelve.open(rg_shelve_path) rg = sf[rg_name] else: if os.path.exists(rg_path) & os.path.exists(rg_path): debugmsg('read and analyse the random network #' + str(c) + ' from pajek file ' + rg_path + '...') rg = compute_link_property( nx.connected_component_subgraphs( nx.Graph(nx.read_pajek(rg_path)))[0], layers) else: debugmsg( 'create, analyse and write the random network #' + str(c) + ' to pajek file ' + rg_path + '...') rg = g.copy() rg.graph['name'] = rg_name if g.number_of_edges() > 2: nx.connected_double_edge_swap( rg, g.number_of_edges()) compute_link_property(rg, layers) nx.write_pajek(rg, rg_path) rg.remove_nodes_from(rg.nodes()) sf = shelve.open(rg_shelve_path) sf[rg_name] = rg sf.close() rgs.append(rg) t_inc = time.time() - t_start t_ttl += t_inc debugmsg('+--- * Time spent (increment, total): (%f, %f)' % (t_inc, t_ttl)) t_start = time.time() times = len(rgs) debugmsg('generate a threshold for BOND/bridge link analysis...') for i in xrange(layers): l = str(i + 1) g.graph[GRAPH_KEY_AVG_LIST + l] = [] g.graph[GRAPH_KEY_STD_LIST + l] = [] for j in xrange(times): g.graph[GRAPH_KEY_AVG_LIST + l].append( rgs[j].graph[GRAPH_KEY_AVG_COMMON_NODES + l]) g.graph[GRAPH_KEY_STD_LIST + l].append( rgs[j].graph[GRAPH_KEY_STD_COMMON_NODES + l]) g.graph[GRAPH_KEY_THRESHOLD_R1 + l] = scipy.mean( g.graph[GRAPH_KEY_AVG_LIST + l]) + 2 * scipy.mean( g.graph[GRAPH_KEY_STD_LIST + l]) if g.graph[GRAPH_KEY_THRESHOLD_R1 + l] > 1: g.graph[GRAPH_KEY_THRESHOLD_R1 + l] = 1.0 debugmsg( 'assess the link property of every edge of the target network...' ) # phase 1: identify the sink links g.graph[SINK] = 0 g.graph[BOND] = 0 g.graph[LOCAL_BRIDGE] = 0 g.graph[GLOBAL_BRIDGE] = 0 for s, t in g.edges_iter(data=False): if (g.degree(s) == 1) | (g.degree(t) == 1): g[s][t][EDGE_KEY_LAYER + '0'] = SINK g[s][t][EDGE_KEY_NEXT_STEP] = STOP g[s][t][EDGE_KEY_WIDTH] = SINK_BASIC_WIDTH g[s][t][EDGE_KEY_COLOR] = SINK_COLOR g.graph[SINK] += 1 else: g[s][t][EDGE_KEY_NEXT_STEP] = PASS # phase 2: identify the BOND/local bridge links on every layer for i in xrange(layers): l = -(i + 1) n = str(i + 1) g.graph[GRAPH_KEY_PASS_TO_NEXT_LAYER + n] = [] for s, t in g.edges_iter(data=False): if g[s][t][EDGE_KEY_NEXT_STEP] == STOP: g[s][t][EDGE_KEY_LAYER + n] = g[s][t][EDGE_KEY_LAYER + str(i)] elif g[s][t][l] >= g.graph[GRAPH_KEY_THRESHOLD_R1 + n]: g[s][t][EDGE_KEY_LAYER + n] = BOND g[s][t][EDGE_KEY_NEXT_STEP] = STOP g[s][t][EDGE_KEY_WIDTH] = (layers - i + 1) * BOND_BASIC_WIDTH g[s][t][EDGE_KEY_COLOR] = BOND_COLOR g.graph[BOND] += 1 else: g[s][t][EDGE_KEY_LAYER + n] = LOCAL_BRIDGE + ' of layer ' + n g[s][t][EDGE_KEY_WIDTH] = (layers - i + 1) * BRIDGE_BASIC_WIDTH g[s][t][EDGE_KEY_COLOR] = LOCAL_BRIDGE_COLOR g.graph[GRAPH_KEY_PASS_TO_NEXT_LAYER + n].append( g[s][t][l]) if len(g.graph[GRAPH_KEY_PASS_TO_NEXT_LAYER + n]) == 0: g.graph[GRAPH_KEY_THRESHOLD_R2 + n] = 0 else: g.graph[GRAPH_KEY_THRESHOLD_R2 + n] = scipy.mean( g.graph[GRAPH_KEY_PASS_TO_NEXT_LAYER + n]) - scipy.std( g.graph[GRAPH_KEY_PASS_TO_NEXT_LAYER + n]) if g.graph[GRAPH_KEY_THRESHOLD_R2 + n] < 0: g.graph[GRAPH_KEY_THRESHOLD_R2 + n] = 0.0 for s, t in g.edges_iter(data=False): if g[s][t][EDGE_KEY_NEXT_STEP] == PASS: if g[s][t][l] > g.graph[GRAPH_KEY_THRESHOLD_R2 + n]: g[s][t][EDGE_KEY_NEXT_STEP] = STOP g.graph[LOCAL_BRIDGE] += 1 # phase 3: identify the global links for s, t in g.edges_iter(data=False): if g[s][t][EDGE_KEY_NEXT_STEP] == PASS: g[s][t][EDGE_KEY_LAYER + n] = GLOBAL_BRIDGE g[s][t][EDGE_KEY_WIDTH] = BRIDGE_BASIC_WIDTH g[s][t][EDGE_KEY_COLOR] = GLOBAL_BRIDGE_COLOR g.graph[GLOBAL_BRIDGE] += 1 # extra phase 4: identify the node entropy ns = [] nc = [] g.graph[GRAPH_KEY_EDGE_CLASS] = { BOND: g.graph[BOND], LOCAL_BRIDGE: g.graph[LOCAL_BRIDGE], GLOBAL_BRIDGE: g.graph[GLOBAL_BRIDGE] } g.graph[GRAPH_KEY_ENTROPY] = entropy( g.graph[GRAPH_KEY_EDGE_CLASS].values()) for s in g.nodes_iter(data=False): g.node[s][NODE_KEY_EDGE_CLASS] = g.graph[ GRAPH_KEY_EDGE_CLASS].copy() for t in nx.neighbors(g, s): for key in g.node[s][NODE_KEY_EDGE_CLASS].keys(): if g[s][t][EDGE_KEY_LAYER + str(layers)].startswith(key): g.node[s][NODE_KEY_EDGE_CLASS][key] -= 1 g.node[s][NODE_KEY_NEW_ENTROPY] = entropy( g.node[s][NODE_KEY_EDGE_CLASS].values()) g.node[s][NODE_KEY_INFORMATION_GAIN] = max( 0, g.graph[GRAPH_KEY_ENTROPY] - g.node[s][NODE_KEY_NEW_ENTROPY]) ns.append(g.node[s][NODE_KEY_INFORMATION_GAIN]) nc.append([ REGULAR_NODE_COLOR, IMPORTANT_NODE_COLOR, SUPER_NODE_COLOR ][max(0, int(math.ceil(g.node[s][NODE_KEY_INFORMATION_GAIN])))]) ns_avg = scipy.mean(ns) if not ns_avg == 0: ns = [ NODE_SIZE_BASE + NODE_SIZE * (value / ns_avg) for value in ns ] # extra phase 5: save the network fingerprint into a file nfp_shelve_path = 'network_fingerprints.obj' if os.path.exists(nfp_shelve_path) & os.path.isfile( nfp_shelve_path): sf = shelve.open(nfp_shelve_path) finger_prints = sf['finger_prints'] else: sf = shelve.open(nfp_shelve_path) finger_prints = {} d = float(g.graph[BOND] + g.graph[LOCAL_BRIDGE] + g.graph[GLOBAL_BRIDGE] + g.graph[SINK]) network_name = root + '_' + str(compNo) finger_prints[network_name] = { 0: round(g.graph[BOND] / d, 4), 1: round(g.graph[LOCAL_BRIDGE] / d, 4), 2: round(g.graph[GLOBAL_BRIDGE] / d, 4), 3: round(g.graph[SINK] / d, 4) } corr_table = {} for net_name1, net_series1 in finger_prints.items(): corr_table[net_name1] = {} for net_name2, net_series2 in finger_prints.items(): corr_table[net_name1][net_name2] = numpy.corrcoef( net_series1.values(), net_series2.values())[0, 1] sf['corr_table'] = corr_table sf['finger_prints'] = finger_prints sf.close() debugmsg('write the analysis results to a pajek file...') ng = nx.Graph() ng.add_nodes_from(g.nodes()) ng.add_edges_from(g.edges(data=True)) ng.graph['name'] = root + '_' + str(compNo) + '_result' + ext nx.write_pajek(ng, root + '_' + str(compNo) + '_result' + ext) debugmsg('write the analysis results to a excel file...') # Phase 1: write texphe analysis results of the target network to the sheet1 book = xlwt.Workbook() s1 = book.add_sheet('target network') s2 = book.add_sheet(str(times) + ' random networks') s3 = book.add_sheet('node information') si = xlwt.Style.easyxf( 'font: name Arial, colour dark_red, bold True; alignment: horizontal left;' ) st = xlwt.Style.easyxf( 'font: name Arial, colour dark_red, bold True; alignment: horizontal center;' ) sb = xlwt.Style.easyxf('font: name Arial, colour dark_blue;') # phase 1.1: create the heading data of the analysis report row = 5 col = 7 s1.write(0, 0, 'number of nodes = ' + str(g.number_of_nodes()), si) s1.write(1, 0, 'number of edges = ' + str(g.number_of_edges()), si) s1.write( 2, 0, 'average degree = ' + str(g.number_of_edges() * 2.0 / g.number_of_nodes()), si) s1.write(3, 0, 'diameter = ' + str(nx.diameter(g)), si) s1.write( 4, 0, 'average shortest path = ' + str(round(g.graph[GRAPH_KEY_SHORTEST_PATH], 4)), si) s1.write( 5, 0, 'average clustering coefficient = ' + str(round(nx.average_clustering(g), 4)), si) s1.write( 6, 0, 'degree assortativity coefficient = ' + str(round(nx.degree_assortativity_coefficient(g), 4)), si) s1.write( 7, 0, 'BOND = ' + str(g.graph[BOND]) + " (" + str(100 * round(float(g.graph[BOND]) / g.size(), 4)) + '%)', si) s1.write( 8, 0, 'sink = ' + str(g.graph[SINK]) + " (" + str(100 * round(float(g.graph[SINK]) / g.size(), 4)) + '%)', si) s1.write( 9, 0, 'local bridge = ' + str(g.graph[LOCAL_BRIDGE]) + " (" + str(100 * round(float(g.graph[LOCAL_BRIDGE]) / g.size(), 4)) + '%)', si) s1.write( 10, 0, 'global bridge = ' + str(g.graph[GLOBAL_BRIDGE]) + " (" + str(100 * round(float(g.graph[GLOBAL_BRIDGE]) / g.size(), 4)) + '%)', si) s1.write(11, 0, 'entropy = ' + str(g.graph[GRAPH_KEY_ENTROPY]), si) s1.write(row - 1, col - 6, 'st.sp', st) s1.write(row - 1, col - 5, 'avg.sp', st) s1.write(row - 1, col - 4, 's.cc', st) s1.write(row - 1, col - 3, 't.cc', st) s1.write(row - 1, col - 2, 'source', st) s1.write(row - 1, col - 1, 'target', st) for i in xrange(layers): s1.write( row - 3, col + (i * 2), 'R1 = ' + str(round(g.graph[GRAPH_KEY_THRESHOLD_R1 + str(i + 1)], 4)), si) s1.write( row - 2, col + (i * 2), 'R2 = ' + str(round(g.graph[GRAPH_KEY_THRESHOLD_R2 + str(i + 1)], 4)), si) s1.write(row - 1, col + (i * 2), 'intersection weight', st) s1.write(row - 1, col + (i * 2) + 1, 'layer ' + str(i + 1), st) # phase 1.2: create the body data of the analysis report for s, t in g.edges_iter(data=False): s1.write(row, col - 4, round(nx.clustering(g, s), 2), sb) s1.write(row, col - 3, round(nx.clustering(g, t), 2), sb) s1.write(row, col - 2, s, sb) s1.write(row, col - 1, t, sb) for i in xrange(layers): s1.write(row, col + (i * 2), round(g[s][t][-(i + 1)], 4), sb) if (i == 0): s1.write(row, col + (i * 2) + 1, g[s][t][EDGE_KEY_LAYER + str(i + 1)], sb) elif (g[s][t][EDGE_KEY_LAYER + str(i + 1)] != g[s][t][EDGE_KEY_LAYER + str(i)]): s1.write(row, col + (i * 2) + 1, g[s][t][EDGE_KEY_LAYER + str(i + 1)], sb) else: s1.write(row, col + (i * 2) + 1, '...', sb) row += 1 # phase 2: write the analysis results of the random networks to the sheet2 row = 5 col = 3 for i in xrange(layers): l = str(i + 1) s2.write(row - 2, col + (i * 4), 'layer ' + l, st) s2.write(row - 1, col + (i * 4), 'AVG', st) s2.write(row - 1, col + (i * 4) + 1, 'STD', st) for j in xrange(times): s2.write(row + j, col + (i * 4), rgs[j].graph[GRAPH_KEY_AVG_COMMON_NODES + l], sb) s2.write(row + j, col + (i * 4) + 1, rgs[j].graph[GRAPH_KEY_STD_COMMON_NODES + l], sb) # extra phase 3: write the analysis results of node entropy row = 1 col = 1 now = 1 s3.write(row, col + 0, 'node', st) s3.write(row - 1, col + 1, 'o.entropy = ', st) s3.write(row, col + 1, 'n.entropy', st) s3.write(row - 1, col + 2, g.graph[GRAPH_KEY_ENTROPY], sb) s3.write(row, col + 2, 'gain', st) s3.write(row - 1, col + 3, g.graph[GRAPH_KEY_EDGE_CLASS][BOND], sb) s3.write(row, col + 3, 'BOND', st) s3.write(row - 1, col + 4, g.graph[GRAPH_KEY_EDGE_CLASS][LOCAL_BRIDGE], sb) s3.write(row, col + 4, 'local bridge', st) s3.write(row - 1, col + 5, g.graph[GRAPH_KEY_EDGE_CLASS][GLOBAL_BRIDGE], sb) s3.write(row, col + 5, 'global bridge', st) s3.write(row, col + 6, 'avg', st) s3.write(row + 1, col + 6, ns_avg, sb) for s in g.nodes_iter(data=False): s3.write(row + now, col + 0, s, sb) s3.write(row + now, col + 1, g.node[s][NODE_KEY_NEW_ENTROPY], sb) s3.write(row + now, col + 2, g.node[s][NODE_KEY_INFORMATION_GAIN], sb) s3.write(row + now, col + 3, g.node[s][NODE_KEY_EDGE_CLASS][BOND], sb) s3.write(row + now, col + 4, g.node[s][NODE_KEY_EDGE_CLASS][LOCAL_BRIDGE], sb) s3.write(row + now, col + 5, g.node[s][NODE_KEY_EDGE_CLASS][GLOBAL_BRIDGE], sb) now += 1 book.save(root + '_' + str(compNo) + '_result.xls') debugmsg('draw the analysis results of the target network...') # phase 1: draw the target network if path in SPECIAL_NETWORKS: pos = { seq_no: (float(g.node[seq_no]['posx']), float(g.node[seq_no]['posy'])) for seq_no in g.nodes_iter() } else: pos = nx.spring_layout(g, pos=nx.circular_layout(g)) if show_the_major_result == True: fig_no = 10 net_fig1 = plot.figure( fig_no, figsize=(4, 6) if path in SPECIAL_NETWORKS else (6, 6), dpi=200, facecolor='white') if path in SPECIAL_NETWORKS: bb_width = [ 0.1 if g[s][t][EDGE_KEY_LAYER + n] == BOND else 0.5 for (s, t) in g.edges_iter(data=False) ] ns = [0.5 for n in g.nodes_iter(data=False)] else: bb_width = [ g[s][t][EDGE_KEY_WIDTH] for (s, t) in g.edges_iter(data=False) ] plot.title('target network = ' + tail) bb_color = [ g[s][t][EDGE_KEY_COLOR] for (s, t) in g.edges_iter(data=False) ] plot.axis('off') plot.xticks(()) plot.yticks(()) net_fig1.set_tight_layout(True) if path in SPECIAL_NETWORKS: nx.draw_networkx(g, with_labels=False, pos=pos, node_size=ns, linewidths=0.5, edge_color=bb_color, width=bb_width) else: nx.draw_networkx(g, pos=pos, linewidths=0, width=bb_width, node_size=ns, node_color=nc, font_size=8, edge_color=bb_color) plot.savefig(root + '_' + str(compNo) + '_result.png', dpi=600) # phase 1.1: draw the detail analysis result of the target network if show_the_detailed_result == True: for i in xrange(layers): l = i + 1 sub_edge_label = dict() for s, t in g.edges_iter(data=False): sub_edge_label[(s, t)] = round(g[s][t][-l], 3) net_sub_fig = plot.figure(fig_no + l, figsize=(12, 8), facecolor='white') plot.title( 'target network = ' + tail + ' (layer ' + str(l) + ', R1 = ' + str( round(g.graph[GRAPH_KEY_THRESHOLD_R1 + str(i + 1)], 4)) + ', R2 = ' + str( round(g.graph[GRAPH_KEY_THRESHOLD_R2 + str(i + 1)], 4)) + ')') plot.xticks(()) plot.yticks(()) net_sub_fig.set_tight_layout(True) nx.draw_networkx(g, pos=pos, linewidths=0, width=bb_width, node_size=ns, node_color=nc, font_size=8, edge_color=bb_color) nx.draw_networkx_edge_labels(g, pos=pos, edge_labels=sub_edge_label, font_size=6) plot.savefig(root + '_' + str(compNo) + '_result_layer_' + str(l) + '.png') # phase 2: show betweenness centrality for edges if show_the_betweenness_result == True: eb = nx.edge_betweenness_centrality(g) for s, t in eb: eb[(s, t)] = round(eb[(s, t)], 3) bn_width = [ 0.5 + ((eb[(s, t)] - min(eb.values())) / scipy.std(eb.values())) for (s, t) in g.edges_iter(data=False) ] net_fig2 = plot.figure(20, figsize=(12, 8), facecolor='white') plot.title('Target network = ' + tail + ' (betweenness centrality for edges)') plot.xticks(()) plot.yticks(()) net_fig2.set_tight_layout(True) nx.draw_networkx(g, pos=pos, linewidths=0, width=bn_width, node_size=ns, node_color=nc, font_size=8) nx.draw_networkx_edge_labels(g, pos=pos, edge_labels=eb, font_size=6) plot.savefig(root + '_' + str(compNo) + '_result (edge betweenness).png') # phone 3: show pagerank-based weighting for edges if show_the_pagerank_result == True: pg = nx.Graph() pg.add_nodes_from(g.edges()) for pair in pg.nodes(): for vertex in pair: for node in g.neighbors(vertex): if (vertex, node) in g.edges(): if not pair == (vertex, node): pg.add_edge(pair, (vertex, node)) else: if not pair == (node, vertex): pg.add_edge(pair, (node, vertex)) pr = nx.pagerank(pg, max_iter=2000) for s, t in pr: pr[(s, t)] = round(pr[(s, t)], 4) pg_width = [ (pr[(s, t)] - min(pr.values())) / scipy.std(pr.values()) for (s, t) in g.edges_iter(data=False) ] net_fig3 = plot.figure(30, figsize=(12, 8), facecolor='white') plot.title('Target network = ' + tail + ' (pagerank-based weighting for edges)') plot.xticks(()) plot.yticks(()) net_fig3.set_tight_layout(True) nx.draw_networkx(g, pos=pos, linewidths=0, width=pg_width, node_size=ns, node_color=nc, font_size=8) nx.draw_networkx_edge_labels(g, pos=pos, edge_labels=pr, font_size=6) plot.savefig(root + '_' + str(compNo) + '_result (pagerank-based).png') # phase 4: show the result of network clustering if show_the_major_clustering_result == True: fig_no = 40 sg = network_clustering(g, layers) ncc_map = {} color_count = 1 for v in g.nodes_iter(data=False): if not g.node[v][NODE_KEY_GROUP_NUMBER] in ncc_map: ncc_map[g.node[v][NODE_KEY_GROUP_NUMBER]] = color_count color_count += 1 ncc = [ ncc_map[g.node[v][NODE_KEY_GROUP_NUMBER]] for v in g.nodes_iter(data=False) ] net_fig4 = plot.figure(fig_no, figsize=(12, 8), facecolor='white') plot.title('Target network = ' + tail + ' (clustering result)') plot.xticks(()) plot.yticks(()) net_fig4.set_tight_layout(True) nx.draw_networkx(g, pos=pos, linewidths=0, width=bb_width, node_color=ncc, vmin=min(ncc), vmax=max(ncc), cmap=plot.cm.Dark2, font_size=8, edge_color=bb_color) plot.savefig(root + '_' + str(compNo) + '_result (network clustering).png') if show_the_detailed_clustering_result == True: for key, sub_g in sg.items(): if type(sub_g) == list: show_g = nx.Graph() for sub_c in sub_g: show_g.add_nodes_from(sub_c.nodes()) show_g.add_edges_from(sub_c.edges()) else: show_g = sub_g fig_no += 1 net_sub_fig = plot.figure(fig_no, figsize=(12, 8), facecolor='white') plot.title('Target network = ' + tail + ' (' + key + ')') plot.xticks(()) plot.yticks(()) net_sub_fig.set_tight_layout(True) nx.draw_networkx(show_g, pos=pos, linewidths=0, font_size=8) plot.savefig(root + '_' + str(compNo) + '_result (' + key + ').png') plot.show() plot.close('all') return (g, rgs) else: return -1
def test_connected_double_edge_swap_not_connected(): with pytest.raises(nx.NetworkXError): G = nx.path_graph(3) nx.add_path(G, [10, 11, 12]) G = nx.connected_double_edge_swap(G)
def test_connected_double_edge_swap_small(): with pytest.raises(nx.NetworkXError): G = nx.connected_double_edge_swap(nx.path_graph(3))
G = nx.read_adjlist('/Users/qinglingzhang/network_FB4000.csv', comments='#',create_using=nx.Graph(), delimiter=',', nodetype=int, encoding='utf-8') G.remove_node(0) G_assort=nx.degree_assortativity_coefficient(G) N=len(G) L=G.size() g_degrees=list(G.degree().values()) kmax=max(g_degrees) rew=L*100 #rew=L/2.0*math.log(1000000) G1=G.copy() #graph to be degree_preserving rewired nx.connected_double_edge_swap(G1,nswap=rew,_window_threshold=3) nx.write_adjlist(G1,"FB4000_rew1.csv") #delete the first column so that RD.f90 could use the network directly. def del_cvs_col(fname, newfname, idxs, delimiter=' '): with open(fname) as csvin, open(newfname, 'w') as csvout: reader = csv.reader(csvin, delimiter=delimiter) writer = csv.writer(csvout, delimiter=',') rows = (tuple(item for idx, item in enumerate(row) if idx not in idxs) for row in reader) writer.writerows(rows) del_cvs_col('FB4000_rew1.csv', 'FB4000_rew1f.csv', [0]) #generate corelist data=nx.core_number(G)