def edge_centrality(net): values ={} bet = nx.edge_betweenness(net,normalized= True) flow = nx.edge_current_flow_betweenness_centrality(net,normalized= True) load = nx.edge_load(net) com = nx.communicability(net) bet_list =[] flow_list = [] load_list = [] com_list = [] for edge,value in bet.iteritems() : origin,end = edge value_flow = max(flow.get(edge),flow.get((end,origin))) values[edge] = [value,value_flow,load.get(edge),com.get(origin).get(end)] bet_list.append(value) flow_list.append(value_flow) load_list.append(load.get(edge)) com_list.append(com.get(origin).get(end)) file3 = open("bl.csv",'w') for xt in [bet_list,load_list,flow_list,com_list] : for yt in [bet_list,load_list,flow_list,com_list] : corr(xt,yt,file3) print file3.write("\n") file3.close() return values
def edge_centrality(net): values = {} bet = nx.edge_betweenness(net, normalized=True) flow = nx.edge_current_flow_betweenness_centrality(net, normalized=True) load = nx.edge_load(net) com = nx.communicability(net) bet_list = [] flow_list = [] load_list = [] com_list = [] for edge, value in bet.iteritems(): origin, end = edge value_flow = max(flow.get(edge), flow.get((end, origin))) values[edge] = [ value, value_flow, load.get(edge), com.get(origin).get(end) ] bet_list.append(value) flow_list.append(value_flow) load_list.append(load.get(edge)) com_list.append(com.get(origin).get(end)) file3 = open("bl.csv", 'w') for xt in [bet_list, load_list, flow_list, com_list]: for yt in [bet_list, load_list, flow_list, com_list]: corr(xt, yt, file3) print file3.write("\n") file3.close() return values
def edge_current_flow_betweenness_centrality(f, ft, gnx): start = timer.start(ft, 'Edge Current Flow Betweenness Centrality') result = nx.edge_current_flow_betweenness_centrality(gnx) timer.stop(ft, start) for k in result: f.writelines(str(k) + ',' + str(result[k]) + '\n') return result
def centralize_graph(graph, epb='lgth', efb='capa', ndg='capa', nec='capa', npr='capa'): """Compute edge centralities. Parameters ---------- graph : original graph epb : edge property used for computation of edge path betweenness efb : " flow betweenness ndg : " degree centrality nec : " eigenvector centrality npr : " page rank Returns ------- graphCentralities : graph with computed edge centralities """ graphCentralities = graph.copy() edges = graphCentralities.edges(data=True) edgeCapacity = 1.0 * np.array([property['capa'] for node1, node2, property in edges]) edgeCapacity /= edgeCapacity.sum() edgeLength = 1.0 / edgeCapacity for index, (node1, node2, property) in enumerate(edges): property['capa'] = edgeCapacity[index] property['lgth'] = edgeLength[index] edgeBetweenCentrality = nx.edge_betweenness_centrality(graphCentralities, weight=epb) edgeFlowBetweennessCentrality = nx.edge_current_flow_betweenness_centrality(graphCentralities, weight=efb) lineGraph = nx.line_graph(graphCentralities) degree = graphCentralities.degree(weight=ndg) for node1, node2, property in lineGraph.edges(data=True): intersectingNodes = list(set(node1).intersection(node2))[0] property[ndg] = degree[intersectingNodes] eigenvectorCentrality = nx.eigenvector_centrality_numpy(lineGraph, weight=ndg) pageRank = nx.pagerank(lineGraph, weight=ndg) degreeCentrality = dict(lineGraph.degree(weight=ndg)) for index, (node1, node2, property) in enumerate(edges): edge = (node1, node2) if (edge in edgeBetweenCentrality.keys()): property['epb'] = edgeBetweenCentrality[edge] else: property['epb'] = edgeBetweenCentrality[edge[::-1]] if (edge in edgeFlowBetweennessCentrality.keys()): property['efb'] = edgeFlowBetweennessCentrality[edge] else: property['efb'] = edgeFlowBetweennessCentrality[edge[::-1]] if (edge in degreeCentrality.keys()): property['ndg'] = degreeCentrality[edge] else: property['ndg'] = degreeCentrality[edge[::-1]] if (edge in eigenvectorCentrality.keys()): property['nec'] = eigenvectorCentrality[edge] else: property['nec'] = eigenvectorCentrality[edge[::-1]] if (edge in pageRank.keys()): property['npr'] = pageRank[edge] else: property['npr'] = pageRank[edge[::-1]] return(graphCentralities)
def save_weights_rewire(self, epoch): if (self.typemlp == "dense"): self.w1 = self.model.get_layer("dense_1").get_weights() self.w2 = self.model.get_layer("dense_2").get_weights() self.w3 = self.model.get_layer("dense_3").get_weights() self.w4 = self.model.get_layer("dense_4").get_weights() else: self.w1 = self.model.get_layer("sparse_1").get_weights() self.w2 = self.model.get_layer("sparse_2").get_weights() self.w3 = self.model.get_layer("sparse_3").get_weights() self.w4 = self.model.get_layer("dense_4").get_weights() self.wSRelu1 = self.model.get_layer("srelu1").get_weights() self.wSRelu2 = self.model.get_layer("srelu2").get_weights() self.wSRelu3 = self.model.get_layer("srelu3").get_weights() if self.typemlp == "evolutionary": centrality = None if self.use_centralities: weight_masks = np.array( [self.w1[0], self.w2[0], self.w3[0], self.w4[0]]) print("Started creating graph at", datetime.datetime.today().strftime('%Y%m%d_%H%M%S')) graph = weighted_graph_from_weight_masks(weight_masks) print("Graph created, start calculating centrality", datetime.datetime.today().strftime('%Y%m%d_%H%M%S')) centrality = nx.edge_current_flow_betweenness_centrality(graph) print("Centrality calculated", datetime.datetime.today().strftime('%Y%m%d_%H%M%S')) plt.figure() plt.hist(centrality.values(), bins=100) plt.title('Centrality distribution') plt.xlabel("value") plt.ylabel("Frequency") plt.savefig(result_folder + "centralities.png") plt.close() [self.wm1, self.wm1Core] = self.rewireMask(self.w1[0], 0, centrality, self.similarities[0], self.noPar1, epoch) [self.wm2, self.wm2Core] = self.rewireMask(self.w2[0], 1, centrality, self.similarities[1], self.noPar2, epoch) [self.wm3, self.wm3Core] = self.rewireMask(self.w3[0], 2, centrality, self.similarities[2], self.noPar3, epoch) print(sum(sum(connections) for connections in self.wm1)) self.w1[0] = self.w1[0] * self.wm1Core self.w2[0] = self.w2[0] * self.wm2Core self.w3[0] = self.w3[0] * self.wm3Core
def f31(self): return "ND" start = 0 try: c_vals = nx.edge_current_flow_betweenness_centrality(self.G).values() res = sum(c_vals) except nx.NetworkXError: res = "ND" stop = 0 # self.feature_time.append(stop - start) return res
def f31(self): return "ND" start = 0 try: c_vals = nx.edge_current_flow_betweenness_centrality( self.G).values() res = sum(c_vals) except nx.NetworkXError: res = "ND" stop = 0 # self.feature_time.append(stop - start) return res
def induce_weight(G, weight='net_flow', efficiency='speed', verbose=False): if verbose and nx.get_edge_attributes(G, weight): exit_status = existing_edge_attribute_warning(weight) if exit_status: return exit_status if weight == 'net_flow': bb = net_flow(G, efficiency=efficiency) elif weight == 'betweenness': bb = nx.edge_betweenness_centrality(G) elif weight == 'current_flow': bb = nx.edge_current_flow_betweenness_centrality(G) else: raise Exception(f"Weight '{weight}' unknown!") nx.set_edge_attributes(G, values=bb, name=weight)
def cal_edge_current_flow_betweenness_centrality(fn1,fn2): edges=prep.read_edges(fn2) sth={edge:0. for edge in edges} G=nx.Graph() edges_all=prep.read_edges(fn1) G.add_edges_from(edges_all) graphs=list(nx.connected_component_subgraphs(G))# for g in graphs: edge_flow=nx.edge_current_flow_betweenness_centrality( g,normalized=True, weight=None,dtype=np.float32) for x in edge_flow.keys(): if edge_flow.get(x) is not None: sth[x]=edge_flow.get(x) return sth
def __by_centrality__(G, w_max=True, attribute="weight", type="betweenness"): """ helper function to find the most valuable edge returns the edge with the highest/ lowest score Input networkx graph G w_max if true edge with the highest weight attribute is returned else edge with the smallest edge attribute is returned attribute str of edge attribute name type what centrality measure should be used options betweenness : based on betweenness centrality current_flow_betweenness : based on current flow betweenness centrality load : based on load centrality """ if type == "betweenness": centrality = nx.edge_betweenness_centrality(G, weight=attribute) elif type == "current_flow_betweenness": centrality = nx.edge_current_flow_betweenness_centrality( G, weight=attribute) elif type == "load": centrality = nx.algorithms.centrality.edge_load_centrality(G) else: print("method not implemented, please define your own function") return None if w_max: return max(centrality, key=centrality.get) else: return min(centrality, key=centrality.get)
def edgeCentralities(df, G): DF2 = pd.DataFrame(nx.edge_betweenness_centrality(G), index=['edge_betweenness_centrality']) DF2 = DF2.T.unstack(level=0) DF2.columns = DF2.columns.get_level_values(1) DF3 = pd.DataFrame(nx.edge_current_flow_betweenness_centrality(G,normalized=True), index=['edge_current_flow_betweenness_centrality']) DF3 = DF3.T.unstack(level=0) DF3.columns = DF3.columns.get_level_values(1) DF4 = pd.DataFrame(nx.edge_load(G), index=['edge_load']) DF4 = DF4.T.unstack(level=0) DF4.columns = DF4.columns.get_level_values(1) DFB = pd.concat([DF2, DF3, DF4], axis=0) #buggy return pd.Panel({'edge_betweenness_centrality':DF2, 'edge_current_flow_betweenness_centrality':DF3, 'edge_load':DF4})
def centrality(self): result = {} result['degree_centrality'] = nx.degree_centrality(self.graph) if self.directed == 'directed': result['in_degree_centrality'] = nx.in_degree_centrality( self.graph) result['out_degree_centrality'] = nx.out_degree_centrality( self.graph) result['closeness_centrality'] = nx.closeness_centrality(self.graph) result['betweenness_centrality'] = nx.betweenness_centrality( self.graph) # fix the tuple cant decode into json problem stringify_temp = {} temp = nx.edge_betweenness_centrality(self.graph) for key in temp.keys(): stringify_temp[str(key)] = temp[key] result['edge_betweenness_centrality'] = stringify_temp if self.directed == 'undirected': result[ 'current_flow_closeness_centrality'] = nx.current_flow_closeness_centrality( self.graph) result[ 'current_flow_betweenness_centrality'] = nx.current_flow_betweenness_centrality( self.graph) stringify_temp = {} temp = nx.edge_current_flow_betweenness_centrality(self.graph) for key in temp.keys(): stringify_temp[str(key)] = temp[key] result['edge_current_flow_betweenness_centrality'] = stringify_temp result[ 'approximate_current_flow_betweenness_centrality'] = nx.approximate_current_flow_betweenness_centrality( self.graph) result['eigenvector_centrality'] = nx.eigenvector_centrality( self.graph) result[ 'eigenvector_centrality_numpy'] = nx.eigenvector_centrality_numpy( self.graph) result['katz_centrality'] = nx.katz_centrality(self.graph) result['katz_centrality_numpy'] = nx.katz_centrality_numpy( self.graph) result['communicability'] = nx.communicability(self.graph) result['communicability_exp'] = nx.communicability_exp(self.graph) result[ 'communicability_centrality'] = nx.communicability_centrality( self.graph) result[ 'communicability_centrality_exp'] = nx.communicability_centrality_exp( self.graph) result[ 'communicability_betweenness_centrality'] = nx.communicability_betweenness_centrality( self.graph) result['estrada_index'] = nx.estrada_index(self.graph) result['load_centrality'] = nx.load_centrality(self.graph) stringify_temp = {} temp = nx.edge_load(self.graph) for key in temp.keys(): stringify_temp[str(key)] = temp[key] result['edge_load'] = stringify_temp result['dispersion'] = nx.dispersion(self.graph) fname_centra = self.DIR + '/centrality.json' with open(fname_centra, "w") as f: json.dump(result, f, cls=SetEncoder, indent=2) print(fname_centra)
def set_edge_weight(self, edge_weight_method='weight'): if edge_weight_method == 'weight': return # Centrality based methods elif edge_weight_method == 'edge_betweenness_centrality': print("comptuing edge_betweenness_centrality..") C = nx.edge_betweenness_centrality(self.G, weight='weight') print("done!") elif edge_weight_method == 'edge_betweenness_centrality_subset': print("comptuing edge_betweenness_centrality_subset..") C = nx.edge_current_flow_betweenness_centrality(self.G, weight='weight') print('done') elif edge_weight_method == 'edge_current_flow_betweenness_centrality_subset': print( "comptuing edge_current_flow_betweenness_centrality_subset..") C = nx.edge_current_flow_betweenness_centrality_subset( self.G, weight='weight') print('done') elif edge_weight_method == 'edge_load_centrality': print("comptuing edge_load_centrality..") C = nx.edge_load_centrality(self.G) print('done!') # Link Prediction based methods elif edge_weight_method == 'adamic_adar_index': print("comptuing adamic_adar_index ..") preds = nx.adamic_adar_index(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p elif edge_weight_method == 'ra_index_soundarajan_hopcroft': print("comptuing ra_index_soundarajan_hopcroft ..") preds = nx.ra_index_soundarajan_hopcroft(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p elif edge_weight_method == 'preferential_attachment': print("comptuing preferential_attachment ..") preds = nx.preferential_attachment(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p #elif edge_weight_method=='cn_soundarajan_hopcroft': # print("comptuing cn_soundarajan_hopcroft ..") # preds=nx.cn_soundarajan_hopcroft(self.G,self.G.edges()) # C={} # for u, v, p in preds: # C[(u,v)]=p elif edge_weight_method == 'within_inter_cluster': print("comptuing within_inter_cluster ..") preds = nx.within_inter_cluster(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p elif edge_weight_method == 'resource_allocation_index': print("comptuing resource allocation index ..") preds = nx.resource_allocation_index(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p elif edge_weight_method == 'jaccard_coefficient': print("comptuing jaccard_coefficient..") preds = nx.jaccard_coefficient(self.G, self.G.edges()) C = {} for u, v, p in preds: C[(u, v)] = p print('done!') for u, v, d in self.G.edges(data=True): if edge_weight_method == None: d['weight'] = 1 else: d['weight'] = C[(u, v)] return 1
def _calculate(self, include: set): self._features = nx.edge_current_flow_betweenness_centrality(self._gnx)
def generate_graph_features(glycan, libr=None): """compute graph features of glycan\n | Arguments: | :- | glycan (string): glycan in IUPAC-condensed format | libr (list): library of monosaccharides; if you have one use it, otherwise a comprehensive lib will be used\n | Returns: | :- | Returns a pandas dataframe with different graph features as columns and glycan as row """ if libr is None: libr = lib g = glycan_to_nxGraph(glycan, libr=libr) #nbr of different node features: nbr_node_types = len(set(nx.get_node_attributes(g, "labels"))) #adjacency matrix: A = nx.to_numpy_matrix(g) N = A.shape[0] diameter = nx.algorithms.distance_measures.diameter(g) deg = np.array([np.sum(A[i, :]) for i in range(N)]) dens = np.sum(deg) / 2 avgDeg = np.mean(deg) varDeg = np.var(deg) maxDeg = np.max(deg) nbrDeg4 = np.sum(deg > 3) branching = np.sum(deg > 2) nbrLeaves = np.sum(deg == 1) deg_to_leaves = np.array([np.sum(A[:, deg == 1]) for i in range(N)]) max_deg_leaves = np.max(deg_to_leaves) mean_deg_leaves = np.mean(deg_to_leaves) deg_assort = nx.degree_assortativity_coefficient(g) betweeness_centr = np.array( pd.DataFrame(nx.betweenness_centrality(g), index=[0]).iloc[0, :]) betweeness = np.mean(betweeness_centr) betwVar = np.var(betweeness_centr) betwMax = np.max(betweeness_centr) betwMin = np.min(betweeness_centr) eigen = np.array( pd.DataFrame(nx.katz_centrality_numpy(g), index=[0]).iloc[0, :]) eigenMax = np.max(eigen) eigenMin = np.min(eigen) eigenAvg = np.mean(eigen) eigenVar = np.var(eigen) close = np.array( pd.DataFrame(nx.closeness_centrality(g), index=[0]).iloc[0, :]) closeMax = np.max(close) closeMin = np.min(close) closeAvg = np.mean(close) closeVar = np.var(close) flow = np.array( pd.DataFrame(nx.current_flow_betweenness_centrality(g), index=[0]).iloc[0, :]) flowMax = np.max(flow) flowMin = np.min(flow) flowAvg = np.mean(flow) flowVar = np.var(flow) flow_edge = np.array( pd.DataFrame(nx.edge_current_flow_betweenness_centrality(g), index=[0]).iloc[0, :]) flow_edgeMax = np.max(flow_edge) flow_edgeMin = np.min(flow_edge) flow_edgeAvg = np.mean(flow_edge) flow_edgeVar = np.var(flow_edge) load = np.array(pd.DataFrame(nx.load_centrality(g), index=[0]).iloc[0, :]) loadMax = np.max(load) loadMin = np.min(load) loadAvg = np.mean(load) loadVar = np.var(load) harm = np.array( pd.DataFrame(nx.harmonic_centrality(g), index=[0]).iloc[0, :]) harmMax = np.max(harm) harmMin = np.min(harm) harmAvg = np.mean(harm) harmVar = np.var(harm) secorder = np.array( pd.DataFrame(nx.second_order_centrality(g), index=[0]).iloc[0, :]) secorderMax = np.max(secorder) secorderMin = np.min(secorder) secorderAvg = np.mean(secorder) secorderVar = np.var(secorder) x = np.array([len(nx.k_corona(g, k).nodes()) for k in range(N)]) size_corona = x[x > 0][-1] k_corona = np.where(x == x[x > 0][-1])[0][-1] x = np.array([len(nx.k_core(g, k).nodes()) for k in range(N)]) size_core = x[x > 0][-1] k_core = np.where(x == x[x > 0][-1])[0][-1] M = ((A + np.diag(np.ones(N))).T / (deg + 1)).T eigval, vec = eigsh(M, 2, which='LM') egap = 1 - eigval[0] distr = np.abs(vec[:, -1]) distr = distr / sum(distr) entropyStation = np.sum(distr * np.log(distr)) features = np.array([ diameter, branching, nbrLeaves, avgDeg, varDeg, maxDeg, nbrDeg4, max_deg_leaves, mean_deg_leaves, deg_assort, betweeness, betwVar, betwMax, eigenMax, eigenMin, eigenAvg, eigenVar, closeMax, closeMin, closeAvg, closeVar, flowMax, flowAvg, flowVar, flow_edgeMax, flow_edgeMin, flow_edgeAvg, flow_edgeVar, loadMax, loadAvg, loadVar, harmMax, harmMin, harmAvg, harmVar, secorderMax, secorderMin, secorderAvg, secorderVar, size_corona, size_core, nbr_node_types, egap, entropyStation, N, dens ]) col_names = [ 'diameter', 'branching', 'nbrLeaves', 'avgDeg', 'varDeg', 'maxDeg', 'nbrDeg4', 'max_deg_leaves', 'mean_deg_leaves', 'deg_assort', 'betweeness', 'betwVar', 'betwMax', 'eigenMax', 'eigenMin', 'eigenAvg', 'eigenVar', 'closeMax', 'closeMin', 'closeAvg', 'closeVar', 'flowMax', 'flowAvg', 'flowVar', 'flow_edgeMax', 'flow_edgeMin', 'flow_edgeAvg', 'flow_edgeVar', 'loadMax', 'loadAvg', 'loadVar', 'harmMax', 'harmMin', 'harmAvg', 'harmVar', 'secorderMax', 'secorderMin', 'secorderAvg', 'secorderVar', 'size_corona', 'size_core', 'nbr_node_types', 'egap', 'entropyStation', 'N', 'dens' ] feat_dic = {col_names[k]: features[k] for k in range(len(features))} return pd.DataFrame(feat_dic, index=[glycan])
def edgeflow(net): return distri( nx.edge_current_flow_betweenness_centrality(net, normalized=True).values(), 'edge_information_centrality')
def graph_all(inp): [t, e, i, r, G, pos] = inp data = [] label = [] N = G.number_of_nodes() ################################################# degree deg = [val for node, val in G.degree(weight='capa')] label.append('mean[degree]') data.append(np.nanmean(deg)) label.append('sd[degree]') data.append(np.nanstd(deg)) label.append('skewness[degree]') data.append(sp.stats.skew(deg)) ################################################# structure label.append('clustering coefficient') data.append(nx.average_clustering(G, weight='capa')) label.append('assortativity') data.append(nx.degree_pearson_correlation_coefficient(G, weight='capa')) ################################################# distances dists = nx.all_pairs_dijkstra_path_length(G, weight='lgth') dist = [[valv for keyv, valv in valu.items()] for keyu, valu in dists] ecce = np.nanmax(np.array(dist), axis=0) label.append('mean[distance]') data.append(np.nanmean(dist)) label.append('sd[distance]') data.append(np.nanstd(dist)) label.append('skewness[distance]') data.append(sp.stats.skew(np.reshape(dist, -1))) label.append('radius') data.append(ecce.min()) label.append('diameter') data.append(ecce.max()) ################################################# eigenvalues try: spec = np.sort(nx.laplacian_spectrum(G, weight='capa')) except: warnings.warn( 'Computation of Laplacian spectrum failed, effective resistance and algebraic connectivity are not reliable.' ) spec = np.ones(N) label.append('effective resistance') data.append(1.0 / np.nansum(np.divide(1.0 * N, spec[1:N - 1]))) label.append('algebraic connectivity') data.append(spec[1]) ################################################# betweenness flow = nx.edge_current_flow_betweenness_centrality(G, weight='capa', normalized=1).values() label.append('mean[betweenness]') data.append(np.nanmean(flow)) label.append('sd[betweenness]') data.append(np.nanstd(flow)) label.append('skewness[betweenness]') data.append(sp.stats.skew(flow)) ################################################# angles angle_angle = [] angle_weight = [] for u, v, d in G.edges(data=True): angle_angle.append(help_angle(np.subtract(pos[u][0:2], pos[v][0:2]))) angle_weight.append(d['capa']) angle_angle = np.mod(angle_angle, 180) label.append('angle 000') data.append( np.nansum(np.where(angle_angle == 0, angle_weight, 0)) / np.nansum(np.where(angle_angle == 0, 1, 0))) label.append('angle 045') data.append( np.nansum(np.where(angle_angle == 45, angle_weight, 0)) / np.nansum(np.where(angle_angle == 45, 1, 0))) label.append('angle 060') data.append( np.nansum(np.where(angle_angle == 60, angle_weight, 0)) / np.nansum(np.where(angle_angle == 60, 1, 0))) label.append('angle 090') data.append( np.nansum(np.where(angle_angle == 90, angle_weight, 0)) / np.nansum(np.where(angle_angle == 90, 1, 0))) label.append('angle 120') data.append( np.nansum(np.where(angle_angle == 120, angle_weight, 0)) / np.nansum(np.where(angle_angle == 120, 1, 0))) label.append('angle 135') data.append( np.nansum(np.where(angle_angle == 135, angle_weight, 0)) / np.nansum(np.where(angle_angle == 135, 1, 0))) label.append('angle ratio 00-90') data.append(data[-6] / data[-3]) return t, e, i, r, data, label
def add_current_flow_betweenness_edge(graf): print "Adding current flow betweenness to edges" cfb_dict = nx.edge_current_flow_betweenness_centrality(graf) nx.set_edge_attributes(graf, 'cfb', cfb_dict)
min_edge=Minimum.edges() for i in range(n): if (str(int(t_val[i,0])),str(int(t_val[i,1]))) in min_edge: t_val[i,5]=1 sp_min=stats.spearmanr(t_val[:,2],t_val[:,5]) print("unweighted links selected from minimum spanning tree",sp_min) # In[95]: #########task 7 - Current-flow betweenness centrality uses an electrical #current model for information spreading in contrast to betweenness centrality which uses shortest paths. edge_cu=nx.edge_current_flow_betweenness_centrality(net) for i in range(n): if (str(int(t_val[i,0])),str(int(t_val[i,1]))) in edge_cu.keys(): t_val[i,6]=edge_cu[(str(int(t_val[i,0])),str(int(t_val[i,1])))] if (str(int(t_val[i,1])),str(int(t_val[i,0]))) in edge_cu.keys(): t_val[i,6]=edge_cu[(str(int(t_val[i,1])),str(int(t_val[i,0])))] sp_cu=stats.spearmanr(t_val[:,2],t_val[:,6]) print("edge_current_flow_betweenness_centrality",sp_cu) # In[100]: #########task 7 - edge_load_centrality
import operator as op import numpy as np import time #start_time = time.time() # This contains the edges with the volumes and the costs graph2 = nx.read_edgelist( "Transportation-Networks-Data/Anaheim/Anaheim-flow-3.txt", create_using=nx.MultiGraph(), nodetype=int, data=[('volume', float), ('cost', float)]) # Compute the betweenness centrality for weighted graph bc_flow_w = nx.edge_current_flow_betweenness_centrality(graph2, normalized=True, weight='cost', solver='full') sorted_bc_flow_w = sorted(bc_flow_w.items(), key=op.itemgetter(1), reverse=True) all_pairs_2 = [] for i in graph2.nodes(): for j in graph2.nodes(): if (i != j): all_pairs_2.append((i, j)) # Compute and store the sum of all dijkstra shortest paths # divided by the number of node pairs shortest_paths_w = {}
key=op.itemgetter(1), reverse=True) #for i in graph1.edges(): # print(i) # #for i in graph2.edges(): # print(i) #for i in graph1.nodes(): # for j in graph2.nodes(): # if (i != j): # a = nx.get_edge_data(i,j) # print(a) edge_flow_mini = nx.edge_current_flow_betweenness_centrality(graph2, normalized=True, weight='weight', solver='full') flow_mini = sorted(edge_flow_mini.items(), key=op.itemgetter(1), reverse=True) edge_flow_mini_2 = nx.edge_current_flow_betweenness_centrality(graph2, normalized=True, weight=None, solver='full') flow_mini_2 = sorted(edge_flow_mini_2.items(), key=op.itemgetter(1), reverse=True)
def edgeflow(net): return distri(nx.edge_current_flow_betweenness_centrality(net, normalized = True).values(),'edge_information_centrality')
def graph_all(G,pos): data=[] label=[] N=G.number_of_nodes() ################################################# degree deg=G.degree(weight='capa').values() label.append('mean[degree]') data.append(numpy.mean(deg)) print "mean[degree] =%f" %(numpy.mean(deg)) print "median[degree] =%f" %(numpy.median(deg)) label.append('sd[degree]') data.append(numpy.std(deg)) print "sd[degree] =%f" %(numpy.std(deg)) label.append('skewness[degree]') data.append(scipy.stats.skew(deg)) print "skewness[degree] =%f" %(scipy.stats.skew(deg)) ################################################# structure label.append('clustering coefficient') data.append(networkx.average_clustering(G,weight='capa')) print "clustering coefficient=%f" %(networkx.average_clustering(G,weight='capa')) label.append('assortativity') data.append(networkx.degree_pearson_correlation_coefficient(G,weight='capa')) print "assortativity =%f" %(networkx.degree_pearson_correlation_coefficient(G,weight='capa')) ################################################# distances dists=networkx.all_pairs_dijkstra_path_length(G,weight='lgth') dist=[[v for v in u.itervalues()] for u in dists.itervalues()] ecce=numpy.array(dist).max(0) label.append('mean[distance]') data.append(numpy.mean(dist)) print "mean[distance]=%f" %(numpy.mean(dist)) label.append('sd[distance]') data.append(numpy.std(dist)) print "sd[distance]=%f" %(numpy.std(dist)) label.append('skewness[distance]') data.append(scipy.stats.skew(numpy.reshape(dist,-1))) print "skewness[distance]=%f" %(scipy.stats.skew(numpy.reshape(dist,-1))) label.append('radius') data.append(ecce.min()) print "radius = %f"%(ecce.min()) label.append('diameter') data.append(ecce.max()) print "diameter = %f"%(ecce.max()) ################################################# eigenvalues spec=numpy.sort(networkx.laplacian_spectrum(G,weight='capa')) label.append('effective resistance') data.append(1.0/numpy.sum(numpy.divide(1.0*N,spec[1:N-1]))) print "effective resistance =%f" %(1.0/numpy.sum(numpy.divide(1.0*N,spec[1:N-1]))) label.append('algebraic connectivity') data.append(spec[1]) print "algebraic connectivity= %f" %(spec[1]) ################################################# betweenness flow=networkx.edge_current_flow_betweenness_centrality(G,weight='capa',normalized=1).values() label.append('mean[betweenness]') data.append(numpy.mean(flow)) print "mean[betweenness]=%f" %(numpy.mean(flow)) label.append('sd[betweenness]') data.append(numpy.std(flow)) print "sd[betweenness]=%f" %(numpy.std(flow)) label.append('skewness[betweenness]') data.append(scipy.stats.skew(flow)) print "skewness[betweenness]=%f" %(scipy.stats.skew(flow)) ################################################# angles angle_angle=[] angle_weight=[] for u,v,d in G.edges_iter(data=1): angle_angle.append(help_angle(numpy.subtract(pos[u][0:2],pos[v][0:2]))) angle_weight.append(d['capa']) angle_angle=numpy.mod(angle_angle,180) label.append('angle 000') a1 = numpy.sum(numpy.where(angle_angle==0,angle_weight,0))/numpy.sum(numpy.where(angle_angle==0,1,0)) data.append(a1) print"angle 000 = %f" % a1 label.append('angle 045') a2 = numpy.sum(numpy.where(angle_angle==45,angle_weight,0))/numpy.sum(numpy.where(angle_angle==45,1,0)) data.append(a2) print"angle 045 = %f" %a2 label.append('angle 060') a3 = numpy.sum(numpy.where(angle_angle==60,angle_weight,0))/numpy.sum(numpy.where(angle_angle==60,1,0)) data.append(a3) print"angle 060 = %f" %a3 label.append('angle 090') a4 = numpy.sum(numpy.where(angle_angle==90,angle_weight,0))/numpy.sum(numpy.where(angle_angle==90,1,0)) data.append(a4) print"angle 090 = %f" %a4 label.append('angle 120') a5 = numpy.sum(numpy.where(angle_angle==120,angle_weight,0))/numpy.sum(numpy.where(angle_angle==120,1,0)) data.append(a5) print"angle 120 = %f" %a5 label.append('angle 135') a6 = numpy.sum(numpy.where(angle_angle==135,angle_weight,0))/numpy.sum(numpy.where(angle_angle==135,1,0)) data.append(a6) print"angle 135 = %f" %a6 label.append('angle ratio 00-90') a7 = data[-6]/data[-3] data.append(a7) print"angle ratio 00-90 = %f" %a7 return data,label