def minimax(G, V, t, d, isMax): if f.Done(G, V, t): return len(V) / len(G) else: if isMax: bestValue = 0 value = 0 for i in range(nx.eccentricity(G, t)): for g in G.nodes(): if nx.shortest_path_length(G, g, t) == (i + 1): V_new = V.copy() V_new.append(g) value = minimax(G, V_new, g, d + 1, False) if value >= bestValue: bestValue = value else: bestValue = 1 value = 1 for i in range(nx.eccentricity(G, t)): for g in G.nodes(): if nx.shortest_path_length(G, g, t) == (i + 1): V_new = V.copy() V_new.append(g) value = minimax(G, V_new, g, d + 1, False) if value <= bestValue: bestValue = value return bestValue
def findBigEccentricity1(h, node, subinfectG, infectionG, TurerumorSource2, sourceh): # #这个感染图中,最小离心率是?这个暂时卡住了,怎么办?那么先假装实现, # minecceity=10 # for i in subinfectionG.nodes: # if nx.eccentricity(subinfectionG,i)<minecceity: # minecceity=nx.eccentricity(subinfectionG,i) # print ('查看最小离心率'+str(minecceity)) # print ('这个h下的错误节点为'+str(node)) # minEccentricity=nx.eccentricity(subinfectionG,node) # print( '它的偏心率为'+str(minEccentricity)) # # ecctemp=0 # minEccentricitynnode=0 # print('从他的邻居节点找离心率最小的') #或者找离中心近的。 # for heighbour in list(subinfectionG.neighbors(node)): # ecctemp=nx.eccentricity(subinfectionG,heighbour) # if ecctemp<minEccentricity: # minEccentricity=ecctemp # minEccentricitynnode=heighbour # #如果没有离心率更小的话,就返回源点。 #直接从邻居中找一个当前h下能行不就可以吗?有时候邻居节点都不行,那就只能从整个感染子图中找了 newnodes = 5000 for heighbour in list(subinfectG.neighbors(node)): #这只能找一圈节点。 print('那么这个点' + str(node) + '的邻居节点' + str(heighbour) + '试试看效果') if isReceived(heighbour, h, subinfectG, infectionG) == True: print('在邻居中找到的可行节点为' + str(heighbour)) newnodes = heighbour break allnodelist = list( nx.bfs_tree(subinfectG, source=TurerumorSource2, depth_limit=sourceh).nodes) if newnodes == 5000: print('对不起,他的邻居节点找不到满足h=' + str(h) + '的点了,从第2个感染点的感染区域中找了') for i in list(allnodelist): # 这只能找一圈节点。 print('从第2个感染点的感染区域图子图找到的第一个邻居节点是' + str(i)) if isReceived(i, h, subinfectG, infectionG) == True: print('在从第2个感染点的感染区域感染子图找到的可行节点为' + str(i)) newnodes = i break if newnodes == 5000: print('从第2个感染点的感染区域感染图都找不到了,返回原点吧') return node else: print('从第2个感染点的感染区域子图中找到了的节点为' + str(newnodes)) print('重新找的节点为的偏心率' + str(nx.eccentricity(subinfectG, newnodes))) return newnodes else: print('找到邻居节点满足h=' + str(h) + '的点') print('重新找的节点为的偏心率' + str(nx.eccentricity(subinfectG, newnodes))) return newnodes
def test_eccentricity(self): assert_equal(networkx.eccentricity(self.G, 1), 6) e = networkx.eccentricity(self.G) assert_equal(e[1], 6) sp = dict(networkx.shortest_path_length(self.G)) e = networkx.eccentricity(self.G, sp=sp) assert_equal(e[1], 6) e = networkx.eccentricity(self.G, v=1) assert_equal(e, 6) # This behavior changed in version 1.8 (ticket #739) e = networkx.eccentricity(self.G, v=[1, 1]) assert_equal(e[1], 6) e = networkx.eccentricity(self.G, v=[1, 2]) assert_equal(e[1], 6) # test against graph with one node G = networkx.path_graph(1) e = networkx.eccentricity(G) assert_equal(e[0], 0) e = networkx.eccentricity(G, v=0) assert_equal(e, 0) assert_raises(networkx.NetworkXError, networkx.eccentricity, G, 1) # test against empty graph G = networkx.empty_graph() e = networkx.eccentricity(G) assert_equal(e, {})
def test_eccentricity(self): assert nx.eccentricity(self.G, 1) == 6 e = nx.eccentricity(self.G) assert e[1] == 6 sp = dict(nx.shortest_path_length(self.G)) e = nx.eccentricity(self.G, sp=sp) assert e[1] == 6 e = nx.eccentricity(self.G, v=1) assert e == 6 # This behavior changed in version 1.8 (ticket #739) e = nx.eccentricity(self.G, v=[1, 1]) assert e[1] == 6 e = nx.eccentricity(self.G, v=[1, 2]) assert e[1] == 6 # test against graph with one node G = nx.path_graph(1) e = nx.eccentricity(G) assert e[0] == 0 e = nx.eccentricity(G, v=0) assert e == 0 pytest.raises(nx.NetworkXError, nx.eccentricity, G, 1) # test against empty graph G = nx.empty_graph() e = nx.eccentricity(G) assert e == {}
def test_eccentricity(self): assert_equal(networkx.eccentricity(self.G, 1), 6) e = networkx.eccentricity(self.G) assert_equal(e[1], 6) sp = networkx.shortest_path_length(self.G) e = networkx.eccentricity(self.G, sp=sp) assert_equal(e[1], 6) e = networkx.eccentricity(self.G, v=1) assert_equal(e, 6) e = networkx.eccentricity( self.G, v=[1, 1]) #This behavior changed in version 1.8 (ticket #739) assert_equal(e[1], 6) e = networkx.eccentricity(self.G, v=[1, 2]) assert_equal(e[1], 6) # test against graph with one node G = networkx.path_graph(1) e = networkx.eccentricity(G) assert_equal(e[0], 0) e = networkx.eccentricity(G, v=0) assert_equal(e, 0) assert_raises(networkx.NetworkXError, networkx.eccentricity, G, 1) # test against empty graph G = networkx.empty_graph() e = networkx.eccentricity(G) assert_equal(e, {})
def test_eccentricity(self): assert_equal(networkx.eccentricity(self.G, 1), 6) e = networkx.eccentricity(self.G) assert_equal(e[1], 6) sp = networkx.shortest_path_length(self.G) e = networkx.eccentricity(self.G, sp=sp) assert_equal(e[1], 6) e = networkx.eccentricity(self.G, v=1) assert_equal(e, 6) e = networkx.eccentricity(self.G, v=[1, 1]) assert_equal(e, 6)
def test_eccentricity(self): assert_equal(networkx.eccentricity(self.G,1),6) e=networkx.eccentricity(self.G) assert_equal(e[1],6) sp=networkx.shortest_path_length(self.G) e=networkx.eccentricity(self.G,sp=sp) assert_equal(e[1],6) e=networkx.eccentricity(self.G,v=1) assert_equal(e,6) e=networkx.eccentricity(self.G,v=[1,1]) assert_equal(e,6)
def search20q(G, ranking): seen = [] seen.append("Emotion") Q = deque() s = bfs_successors(G, "Emotion") #aggregateRanking[q #questions = sorted(s["Emotion"], key=ranking.get) #questions = sorted(s["Emotion"], key=G.out_degree) questions = sorted(s["Emotion"], key=lambda x: nx.eccentricity(U, x)) print questions Q.extend(questions) seen.extend(questions) history = [] count = 0 while (Q): count = count + 1 print count #Q = sorted(Q, key=ranking.get) #Q = deque(Q) print Q t = Q.pop() if isinstance(t, tuple): qgloss = t[0] else: qgloss = t print "history" print history if history: tmp1 = zip(*history) if qgloss in tmp1[0]: continue ans = ask(qgloss) history.append((qgloss, ans)) if not isinstance(t, tuple): # we are dealing w/ guess as opposed to # question if ans == "yes": #found it print "awesome!" return t else: pass # no inference/action for wrong guess elif isinstance(t, tuple): # we were asking a question, as opp to guess try: successors = bfs_successors(G, (qgloss, ans)) #questions = sorted(s[t], key=ranking.get) questions = sorted(s[t], key=lambda x: nx.eccentricity(U, x)) for q in questions: if not q in seen: seen.append(q) #if(ans == "yes"): Q.append(q) #else: # Q.appendleft(q) except KeyError: successors = []
def search20q(G,ranking): seen = [] seen.append("Emotion") Q = deque() s = bfs_successors(G,"Emotion") #aggregateRanking[q #questions = sorted(s["Emotion"], key=ranking.get) #questions = sorted(s["Emotion"], key=G.out_degree) questions = sorted(s["Emotion"], key=lambda x: nx.eccentricity(U,x)) print questions Q.extend(questions) seen.extend(questions) history = [] count = 0 while(Q): count = count+1 print count #Q = sorted(Q, key=ranking.get) #Q = deque(Q) print Q t = Q.pop() if isinstance(t,tuple): qgloss = t[0] else: qgloss = t print "history" print history if history: tmp1 = zip(*history) if qgloss in tmp1[0]: continue ans = ask(qgloss) history.append((qgloss,ans)) if not isinstance(t,tuple): # we are dealing w/ guess as opposed to # question if ans == "yes": #found it print "awesome!" return t else: pass # no inference/action for wrong guess elif isinstance(t,tuple): # we were asking a question, as opp to guess try: successors = bfs_successors(G,(qgloss,ans)) #questions = sorted(s[t], key=ranking.get) questions = sorted(s[t], key=lambda x: nx.eccentricity(U,x)) for q in questions: if not q in seen: seen.append(q) #if(ans == "yes"): Q.append(q) #else: # Q.appendleft(q) except KeyError: successors = []
def get_eccentricity(graph): try: ecc = nx.eccentricity(type(graph)()) return ecc except nx.exception.NetworkXError: # Found infinite path length because the graph is not strongly connected eccentricity = [] scc = nx.strongly_connected_component_subgraphs(graph) for n_id, graph in enumerate(scc): if len(graph) > 1: ecc = nx.eccentricity(graph) eccentricity.append( (nx.center(scc, ecc), nx.periphery(scc, ecc), nx.radius(scc, ecc), nx.diameter(scc, ecc))) return eccentricity
def graph_stats(G): result = {} try: # https://networkx.github.io/documentation/stable/reference/algorithms/generated/networkx.algorithms.flow.min_cost_flow.html#networkx.algorithms.flow.min_cost_flow # demand maybe from strongly_connected_components # TODO: revisit this # calculate_demand(G) # result["min_cost_flow"] = nx.min_cost_flow(G, capacity="inverse_weight", weight="weight") result["pagerank"] = nx.pagerank(G) result["betweenness_centrality"] = nx.betweenness_centrality(G) result["degree_centrality"] = nx.degree_centrality(G) result["eccentricity"] = nx.eccentricity(G) result["average_node_connectivity"] = nx.average_node_connectivity(G) result["dominating_set"] = nx.dominating_set(G) result["strongly_connected_components"] = list(nx.strongly_connected_components(G)) except Exception: pass return result
def jarden_cente_networkx(self, infectG, subiG, source_true): dict = nx.eccentricity(subiG) print('dict', dict) sort_dict = sorted(dict.items(), key=lambda x: x[1]) print('sort_dict', sort_dict) print('源是', [sort_dict[0][0]]) return [sort_dict[0][0]]
def CalculoTiemp(G): dic={"Grafo":[],"Fuente":[], "Sumidero":[], "Media":[], "Mediana":[], "Std":[], "FlujoMax":[] ,"Grado":[], "Coef Agrup":[], "Centralidad Ce":[], "Centralidad Car":[], "Excentricidad":[], "PageRank":[] } Nodes=G.nodes; for i in Nodes: for j in Nodes: if i!=j: t=[] for k in range(5): t.append(Edmond(G,i,j)) dic["Grafo"].append(nombre) dic["Fuente"].append(i) dic["Sumidero"] .append(j) dic["Media"].append(np.mean(t)) dic["Mediana"].append(np.median(t)) dic["Std"].append(np.std(t)) # standar Desviation dic["FlujoMax"].append(nx.maximum flow value(G, i, j, capacity='weight')) dic["Grado"].append(G.degree(i)) dic["Coef Agrup"].append(nx.clustering(G,i)) dic["Centralidad Ce"].append(nx.closeness centrality(G,i)) dic["Centralidad Car"].append(nx.load centrality(G,i)) dic["Excentricidad"].append(nx.eccentricity(G,i)) PageR=nx.pagerank(G,weight="capacity") dic["PageRank"]=PageR[i] df=pd.DataFrame(dic) df.to_csv("times.csv", index=None, mode ='a')
def main(): header_list=["a","b","w"] E=pd.read_csv('mammalia-voles-bhp-trapping-55.edges',sep=' ' ,header=None, names=header_list) G=nx.from_pandas_edgelist(E,"a","b",["w"]) if (not nx.is_weighted(G)): G=addWeight1(G) ### Draw Graph ### pos = nx.spring_layout(G) nx.draw(G,pos,with_labels=True) plt.figure(figsize=(12,8)) plt.show() ################## q_a=noNodesEdges(G) q_b=averageDegree(G) q_c=density(G) if (nx.is_connected(G)): q_d=diameter(G) else: print("Since the graph isn't connected; diamter of the graph:",inf) ## FROM LIBRARY DIAMTER CALCULATION FOR WEIGHTED GRAPH### shortest1 = nx.shortest_path_length(G, weight="w") shortest2 = dict(shortest1) ecc = nx.eccentricity(G, sp=shortest2) diam = nx.diameter(G, e=ecc) print("From the Library, Diameter:", diam) ########################################################## q_e=clusteringCoefficient(G) print("Average Clustering Coefficient:",round(statistics.mean(q_e),6))
def hierarchical_analyze(network, source_node): max_repost_distance = nx.eccentricity(network, source_node) length = nx.single_source_shortest_path_length(network, source_node) hier = {} for i in range(1, max_repost_distance + 1): hier[i] = len([k for k, v in length.items() if v == i]) return hier
def eccentricity_list(net): if 'eccentricity' in net.graph['already_computed']: return net.graph['already_computed']['eccentricity'] else: net.graph['already_computed']['eccentricity'] = nx.eccentricity( giant_component(net), sp=distances_matrix(net)) return net.graph['already_computed']['eccentricity']
def eccentricityAttributes(graph): return_values = [] #Average effective eccentricity eccVals = [] e = 0 for n in graph.nodes(): try: eccVals.append(nx.eccentricity(graph, v=n)) except nx.NetworkXError: eccVals.append(0) eccSum = 0 center_nodes = 0 phobic = 0 diameter = max(eccVals) radius = min(eccVals) for i in range(len(eccVals)): if eccVals[i] == radius: center_nodes += 1 if graph.node[i]['hydro'] == 'phobic': phobic += 1 eccSum += eccVals[i] return_values.append(eccSum / float(nx.number_of_nodes(graph))) #Effective diameter return_values.append(diameter) #Effective radius return_values.append(radius) #Percentage central nodes return_values.append(center_nodes / float(nx.number_of_nodes(graph))) #Percentage central nodes that are hydrophobic return_values.append(phobic / float(center_nodes)) return return_values
def get_graph_property(fn, i): doc = [ 'Order', 'Index', 'Mean degree', 'Number of connected components', 'Variance of degree', 'Min degree', 'Max degree', 'Radius', 'Diameter', 'Density', 'Connectivity', 'Mean eccentricity', 'Variance of eccentricity', 'Min eccentricity', 'Max eccentricity', 'Mean clustering', 'Variance of clustering', 'Min clustering', 'Max clustering' ] with open('graphproperties_%d.csv' % i, 'ab') as fout: fout.write('\t'.join(doc) + '\n') if 'sw' in fn: return if '10000' not in fn: return print 'Processing', fn with gzip.open('../graphs/' + fn) as f: jdata = json.load(f) index = jdata['index'] del jdata['index'] order = jdata['order'] del jdata['order'] mean_degree = jdata['mean_degree'] del jdata['mean_degree'] var_degree = jdata['var_degree'] del jdata['var_degree'] del jdata['meanSharedNeighbors'] G = json_graph.node_link_graph(jdata) min_degree = np.min(G.degree().values()) max_degree = np.max(G.degree().values()) ecc = nx.eccentricity(G) min_ecc = np.min(ecc.values()) max_ecc = np.max(ecc.values()) mean_ecc = np.mean(ecc.values()) var_ecc = np.var(ecc.values()) radius = min_ecc diameter = max_ecc #center = nx.center(G) #periphery = nx.periphery(G) density = nx.density(G) connectivity = nx.node_connectivity(G) clustering = nx.clustering(G) min_clust = np.min(clustering.values()) max_clust = np.max(clustering.values()) mean_clust = np.mean(clustering.values()) var_clust = np.var(clustering.values()) num_cc = nx.number_connected_components(G) row = [ order, index, mean_degree, num_cc, var_degree, min_degree, max_degree, radius, diameter, density, connectivity, mean_ecc, var_ecc, min_ecc, max_ecc, mean_clust, var_clust, min_clust, max_clust ] fout.write('\t'.join(map(str, row)) + '\n')
def basic_stats(self): #not decided on what level to deal with this yet: #either return error un not dealing with unconnected files, #or making it deal with unconnected files: the latter. #How about with dealing with each independently. # if not nx.is_connected(g): # conl= nx.connected_components(g) # for n in conl: # turn n into graph if it isnt # calculate ec, per, cnt # how and when to visualise the subgraphs? # iterate to next n if nx.is_connected(self.nx_graph): ec = nx.eccentricity(self.nx_graph) else: ec = 'NA - graph is not connected' per = nx.periphery(self.nx_graph) cnt = nx.center(self.nx_graph) result = { #"""fast betweenness algorithm""" 'bbc': nx.brandes_betweenness_centrality(self.nx_graph), 'tn': nx.triangles(self.nx_graph), # number of triangles 'ec': ec, 'per': per, 'cnt': cnt, 'Per': self.nx_graph.subgraph(per), 'Cnt': self.nx_graph.subgraph(cnt) } return result
def whole_graph_metrics(graph, weighted=False): graph_metrics = {} # Shortest average path length graph_metrics['avg_shortest_path'] = \ nx.average_shortest_path_length(graph, weight=weighted) # Average eccentricity ecc_dict = nx.eccentricity(graph) graph_metrics['avg_eccentricity'] = np.mean(np.array(ecc_dict.values())) # Average clustering coefficient # NOTE: Option to include or exclude zeros graph_metrics['avg_ccoeff'] = \ nx.average_clustering(graph, weight=weighted, count_zeros=True) # Average node betweeness avg_node_btwn_dict = nx.betweenness_centrality(graph, normalized=True) graph_metrics['avg_node_btwn'] = \ np.mean(np.array(avg_node_btwn_dict.values())) # Average edge betweeness avg_edge_btwn_dict = nx.edge_betweenness_centrality(graph, normalized=True) graph_metrics['avg_edge_btwn'] = \ np.mean(np.array(avg_edge_btwn_dict.values())) # Number of isolates graph_metrics['isolates'] = len(nx.isolates(graph)) return graph_metrics
def get_node_descriptors(self): """ Gets the node descriptors for each node in the graph :return: A dictionary of dictonaries where first key is node name and second key the descriptor name """ #First execute the algorithms where the full graph must be analyzed #(Not sense of analyzing a single node) betweenness = nx.betweenness_centrality(self.graph) eigenvector_crentrality = nx.eigenvector_centrality(nx.Graph(self.graph)) pagerank = nx.pagerank(nx.Graph(self.graph)) #Generates the dictionary of the first key (Node name) node_descriptors = {} for node in self.graph.nodes: #Generates the dictionary of the second key (Descriptor) node_descriptors[node] = {} #Gets the degree of the node node_descriptors[node]['Degree'] = self.graph.degree[node] #Gets the strength of the node (The sum of his weights) node_descriptors[node]['Strength'] = np.sum([weight for _, _, weight in self.graph.edges(node, 'weight')]) #Gets the clustering coefficient node_descriptors[node]['Clustering coefficient'] = nx.clustering(nx.Graph(self.graph), node) #Gets the average path length (By the mean of the shortes path length between the node and the rest of the graph) node_descriptors[node]['Average path length'] = np.mean(list(nx.shortest_path_length(self.graph, node).values())) #Gets his maximum path length (Same as the eccentricity of the node) node_descriptors[node]['Maximum path lenght'] = nx.eccentricity(self.graph, v=node) #For the previously calculated algorithm takes the value of the current node. node_descriptors[node]['Betweenness'] = betweenness[node] node_descriptors[node]['Eigenvector centrality'] = eigenvector_crentrality[node] node_descriptors[node]['PageRank'] = pagerank[node] #Return the dictionary of dictionaries return node_descriptors
def extract_simple_features(self, graph): res = {} try: print('diameter: ', nx.diameter(graph)) print('eccentricity: ', nx.eccentricity(graph)) print('center: ', nx.center(graph)) print('periphery: ', nx.periphery(graph)) res['connected'] = True except Exception as e: print('Graph not connected') res['connected'] = False res['density'] = '{:.6f}'.format(nx.density(graph)) res['Avg_degree'] = '{:.6f}'.format( sum([i[1] for i in nx.degree(graph)]) / len(nx.degree(graph))) res['Avg_weight'] = '{:.6f}'.format( sum([graph[edge[0]][edge[1]]['weight'] for edge in graph.edges]) / len([graph[edge[0]][edge[1]]['weight'] for edge in graph.edges])) res['edges'] = len(graph.edges) res['nodes'] = len(graph.nodes) res['self_loops'] = len(list(nx.nodes_with_selfloops(graph))) res['edge_to_node_ratio'] = '{:.6f}'.format( len(graph.nodes) / len(graph.edges)) res['negative_edges'] = nx.is_negatively_weighted(graph) return res
def FindBestMove(G, V, t, M, dist=0): if M: bestDist = 0 bestValue = 0 for i in range(nx.eccentricity(G, t)): for g in G.nodes(): if nx.shortest_path_length(G, g, t) == (i + 1): V_new = V.copy() V_new.append(g) value = minimax(G, V_new, g, 0, False) if value >= bestValue: bestValue = value bestDist = i return bestDist, bestValue else: bestVert = None bestValue = 1 for g in G.nodes(): if nx.shortest_path_length(G, g, t) == dist: V_new = V.copy() V_new.append(g) value = minimax(G, V_new, g, 0, True) if value >= bestValue: bestValue = value bestVert = g return bestVert, bestValue
def make_graph(grammar, terminals, non_terminals): G = nx.DiGraph() tab_list = [] node_length = [] for i in terminals: for j in terminals: ith = "f" + str(i) jth = "g" + str(j) if (i == "x" and j == "x") or (i == "$" and j == "$"): print("nothing") elif (i == "x" and j != "x") or (i != "$" and j == "$"): tab_list.append(tuple((ith, jth))) elif (i != "x" and j == "x") or (i == "$" and j != "$"): tab_list.append(tuple((jth, ith))) elif (i == j): tab_list.append(findgrp(i, grammar)) elif (i != j): for x, item in enumerate(terminals[:-1]): for opr in range(len(terminals) - 1): if (x + opr < len(terminals)): if (i == item and j == terminals[x + opr]): tab_list.append(tuple((jth, ith))) elif (i == terminals[x + opr] and j == item): tab_list.append(tuple((ith, jth))) G.add_edges_from(tab_list) plt.figure(figsize=(9, 9)) nx.draw_networkx(G, with_label=True, node_color='green') print("The longest node of the graph is", nx.algorithms.dag.dag_longest_path(G, weight='weight')) for i in G: node_length.append(nx.eccentricity(G, v=i)) print("Each node length is ", node_length)
def fight(k): n = 2**k A = np.zeros((n, n)) M = np.zeros((n, n)) for r in range(k): round(A, M, r) print("") print("RESULT") print(M) print(M.sum(axis=0)) print(M.sum(axis=0).max()) print("ADJANCEY MATRIX") print(A) print("DEGREES") print(A.sum(axis=0)) G = nx.from_numpy_matrix(A) print("ECCENTRICITY") try: ec = nx.eccentricity(G) print(ec) np_ec = np.array(list(ec.values())) print("average:", np.average(np_ec)) print("varience:", np.var(np_ec)) print("DIAMETER:", nx.diameter(G)) except nx.exception.NetworkXError: print("diameter is infinite")
def connected_components(self): """ Returns basic statistics about the connected components of the graph. This includes their number, order, size, diameter, radius, average clusttering coefficient, transitivity, in addition to basic info about the largest and smallest connected components. """ cc_stats = {} cc = nx.connected_components(self.graph.structure) for index, component in enumerate(cc): cc_stats[index] = {} this_cc = cc_stats[index] this_cc["order"] = len(component) this_cc["size"] = len( self.graph.structure.edges(component) ) subgraph = self.graph.structure.subgraph(component) this_cc["avg_cluster"] = nx.average_clustering(subgraph) this_cc["transitivity"] = nx.transitivity(subgraph) eccentricity = nx.eccentricity(subgraph) ecc_values = eccentricity.values() this_cc["diameter"] = max(ecc_values) this_cc["radius"] = min(ecc_values) return cc_stats
def eccentricity(self): self._require_spl() self._eccentricity = nx.eccentricity(self.G, sp=self._spl_dict) print('Eccentricity computed') return self._eccentricity
def _calculate_dep(self, include: set): # Not using eccentricity to handle disconnected graphs. (If a graph has more than 1 connected components, # the eccentricty will raise an exception) self._features = { node: nx.eccentricity(self._gnx, node) for node in self._gnx }
def _create_node_feature_matrix(self, graph): """ Calculating the node features. Arg types: * **graph** *(NetworkX graph)* - The graph of interest. Return types: * **X** *(NumPy array)* - The node features. """ log_degree = np.array([ math.log(graph.degree(node) + 1) for node in range(graph.number_of_nodes()) ]).reshape(-1, 1) eccentricity = np.array([ nx.eccentricity(graph, node) for node in range(graph.number_of_nodes()) ]).reshape(-1, 1) clustering_coefficient = np.array([ nx.clustering(graph, node) for node in range(graph.number_of_nodes()) ]).reshape(-1, 1) X = np.concatenate([log_degree, eccentricity, clustering_coefficient], axis=1) return X
def connected_components(self): """ Returns basic statistics about the connected components of the graph. This includes their number, order, size, diameter, radius, average clusttering coefficient, transitivity, in addition to basic info about the largest and smallest connected components. """ cc_stats = {} cc = nx.connected_components(self.graph.structure) for index, component in enumerate(cc): cc_stats[index] = {} this_cc = cc_stats[index] this_cc["order"] = len(component) this_cc["size"] = len(self.graph.structure.edges(component)) subgraph = self.graph.structure.subgraph(component) this_cc["avg_cluster"] = nx.average_clustering(subgraph) this_cc["transitivity"] = nx.transitivity(subgraph) eccentricity = nx.eccentricity(subgraph) ecc_values = eccentricity.values() this_cc["diameter"] = max(ecc_values) this_cc["radius"] = min(ecc_values) return cc_stats
def analyze(self, save=True, wordkey='word'): net = self.g print('>> analyzing graph of', net.order(), 'nodes and', net.size(), 'edges ...') statd = {} statd['centrality_degree'] = nx.degree_centrality(net) statd['centrality_information'] = nx.current_flow_closeness_centrality( net) statd['centrality_closeness'] = nx.closeness_centrality(net) statd['centrality_betweenness'] = nx.betweenness_centrality(net) statd['centrality_betweenness_weighted'] = nx.betweenness_centrality( net, weight='weight') statd['centrality_eigenvector'] = nx.eigenvector_centrality(net) statd['centrality_degree'] = nx.degree_centrality(net) statd['clustering_coefficient'] = nx.clustering(net) statd['eccentricity'] = nx.eccentricity(net) print('>> done with analysis.') old = [] for node in net.nodes(): dx = { 'model': self.model.name, wordkey: node, 'neighbors': ', '.join(sorted(list(nx.all_neighbors(net, node)))) } #for k,v in self.node2d.get(node,{}).items(): dx[k]=v for statname, node2stat in list(statd.items()): dx[statname] = node2stat[node] old += [dx] if save: pytxt.write2(self.fn.replace('.graphml', '.analysis2.txt'), old) return old
def checkMaxDistance(self, subgraph): """ Check to see if the graph has paths from all compounds to all other compounds within the specified limit Parameters --------- subgraph : NetworkX subgraph obj the subgraph to check for the max distance between nodes Returns ------- withinMaxDistance : bool True if the subgraph has all the nodes within the specified max distance """ withinMaxDistance = True for node in subgraph: eccentricity = nx.eccentricity(subgraph, node) if eccentricity > self.maxPathLength: withinMaxDistance = False return withinMaxDistance
def CSVdata(eon, demands, id=None): try: lengths = list(nx.get_edge_attributes(eon, 'length').values()) degrees = nx.degree(eon) demands_report = fromDemands(demands) ecc_by_length = nx.eccentricity( eon, sp=dict(nx.all_pairs_dijkstra_path_length(eon, weight='length'))) data = { '': id, 'mean_degree': meanDegree(eon, degrees=degrees), 'degree_variance': degreeVariance(eon, degrees=degrees), 'density': nx.density(eon), 'radius_by_hops': nx.radius(eon), 'diameter_by_hops': nx.diameter(eon), 'min_length': min(lengths), 'max_length': max(lengths), 'radius_by_length': nx.radius(eon, e=ecc_by_length), 'diameter_by_length': nx.diameter(eon, e=ecc_by_length), 'total_data_rate': demands_report['total_data_rate'], 'blocking_coefficient': demands_report['blocking_coefficient'] } return data except: return None
def get_measurments(DG, extended=False): """ Get network and node-based measurments for a network as networkX.DiGraph :param DG networkx.DiGraph: directed graph from networkX :param extended boolean: False default. get extra measures (diameter, avg closeness centrality, avg eccentricity, avg eigenvector centrality) :return: measure with values: number nodes, edges, network's density, avg betweenness centrality, avg degree centrality and avg eigenvector centrality :rtype: dict """ import networkx as nx import numpy as np measure = {} measure['nNodes'] = nx.number_of_nodes(DG) measure['nEdges'] = nx.number_of_edges(DG) measure['density'] = nx.density(DG) bc = nx.betweenness_centrality(DG) measure['avg betweenness centrality'] = np.mean(list(bc.values())) dc = nx.degree_centrality(DG) measure['avg degree centrality'] = np.mean(list(dc.values())) if extended: measure['diameter'] = nx.diameter(DG) cc = nx.closeness_centrality(DG) measure['avg closeness centrality'] = np.mean(list(cc.values())) ec = nx.eccentricity(DG) measure['avg eccentricity'] = np.mean(list(ec.values())) ev = nx.eigenvector_centrality(DG) measure['avg eigenvector centrality'] = np.mean(list(ev.values())) return(measure)
def print_graph_features(self, graph): res = {} try: print('diameter: ', nx.diameter(graph)) print('eccentricity: ', nx.eccentricity(graph)) print('center: ', nx.center(graph)) print('periphery: ', nx.periphery(graph)) res['connected'] = True except Exception as e: print('Graph not connected') res['connected'] = False res['density'] = '{:.6f}'.format(nx.density(graph)) res['Avg_degree'] = '{:.6f}'.format(sum([i[1] for i in nx.degree(graph)]) / len(nx.degree(graph))) res['Avg_weight'] = '{:.6f}'.format(sum([graph[edge[0]][edge[1]]['weight'] for edge in graph.edges]) / len( [graph[edge[0]][edge[1]]['weight'] for edge in graph.edges])) res['edges'] = len(graph.edges) res['nodes'] = len(graph.nodes) res['self_loops'] = len(list(nx.nodes_with_selfloops(graph))) res['edge_to_node_ratio'] = '{:.6f}'.format(len(graph.nodes) / len(graph.edges)) res['negative_edges'] = nx.is_negatively_weighted(graph) print(algo.max_clique(graph)) # print('density: ', res['density']) # print('Average degree: ', res['Avg_degree']) # print('Average weight: ', res['Avg_weight']) # print('edges: ', len(graph.edges)) # print('Nodes: ', len(graph.nodes)) # print('self loops: ', res['self_loops']) # print('edges to nodes ratio: ', res['edge_to_node_ratio']) # print('negative edges: ', res['negative_edges']) # nodes = [node for node in graph.nodes] # print(nodes) return res
def print_graph_info(graph): e = nx.eccentricity(graph) print 'graph with %u nodes, %u edges' % (len(graph.nodes()), len(graph.edges())) print 'radius: %s' % nx.radius(graph, e) # min e print 'diameter: %s' % nx.diameter(graph, e) # max e print 'len(center): %s' % len(nx.center(graph, e)) # e == radius print 'len(periphery): %s' % len(nx.periphery(graph, e)) # e == diameter
def __calcCentrality(self, G, cnt): ''' For calculating Graph centrality measures ''' cntV = list() if cnt == 'deg': cntV = list(dict(G.degree).values()) elif cnt == 'ei': cntV = list(nx.eigenvector_centrality_numpy(G).values()) elif cnt == 'sh': cntV = list(nx.constraint(G).values()) elif cnt == 'pr': cntV = list(nx.pagerank_numpy(G).values()) elif cnt == 'bw': cntV = list(nx.betweenness_centrality(G).values()) elif cnt == 'cl': cntV = list(nx.clustering(G).values()) elif cnt == 'cc': cntV = list(nx.closeness_centrality(G).values()) elif cnt == 'ec': cntV = list(nx.eccentricity(G).values()) else: raise ValueError( 'calcCettrality: wrong cnt value or not implemented yet') return cntV
def nodes_by_eccentricity(graph): if len(graph) == 1: return graph.nodes() # need to crop the global shortest paths otherwise get #NetworkXError: Graph not connected: infinite path length eccentricities = nx.eccentricity(graph) return sorted(eccentricities.keys(), key = lambda n: eccentricities[n])
def properties(g): """ Computes simple and classic graph metrics. Parameters ---------- g : graph A networkx graph """ # networkx short summary of information for the graph g print(nx.info(g)) # Draw the degree distribution. Powerlow distribution for a real (complex) network plt.figure(num=None) fig = plt.figure(1) degree_sequence=[d for n, d in g.degree()] # degree sequence #print("Degree sequence %s" % degree_sequence) plt.hist(degree_sequence, bins='auto') plt.title("powerlaw degree distribution") plt.ylabel("# nodes") plt.xlabel("degree") ###plt.show() pylab.close() del fig precomputed_eccentricity = nx.eccentricity(g) # costly step, we save time here! print("Graph density %f" % nx.density(g)) print("Diameter (maximum eccentricity): %d" % nx.diameter(g,precomputed_eccentricity)) print("Radius (minimum eccentricity): %d" % nx.radius(g,precomputed_eccentricity)) #The radius is the minimum eccentricity. print("Mean eccentricity (eccentricity(v) = the maximum distance from v to all other nodes): %s" % np.mean(list(precomputed_eccentricity.values()))) print("Center is composed of %d nodes (nodes with eccentricity equal to radius)" % len(nx.center(g, precomputed_eccentricity))) print("Periphery is composed of %d nodes (nodes with eccentricity equal to the diameter)" % len(nx.periphery(g,precomputed_eccentricity))) print("Mean clustering coefficient %f" % np.mean(list(nx.clustering(g).values()))) total_triangles=sum(nx.triangles(g).values())/3 print("Total number of triangles in graph: %d" % total_triangles)
def calculate_max_ecc(graph, nodes): max_ecc = 0 for node in nodes: ecc = nx.eccentricity(graph, node) if ecc > max_ecc: max_ecc = ecc return max_ecc
def get_nations_network_by_year(year): cursor = get_db().cursor() cursor.execute("""SELECT reporting, reporting_slug, partner, partner_slug, Flow, expimp, reporting_continent, partner_continent,reporting_type,partner_type FROM flow_joined WHERE reporting NOT LIKE "Worl%%" AND partner NOT LIKE "Worl%%" AND Flow != "null" AND year = %s """%(year) ) table = [list(r) for r in cursor] json_sql_response=[] for row in table: json_sql_response.append({ "reporting": row[0], "reporting_id": row[1], "partner": row[2], "partner_id": row[3], "flow": row[4], "expimp": row[5], "reporting_continent": row[6], "partner_continent": row[7], "reporting_type": row[8], "partner_type": row[9] }) # Create a graph instance G=nx.Graph() nodes = [] for row in table: nodes.append(row[1]) nodes.append(row[3]) # add edge to the graph G.add_edge(row[1], row[3]) nodes = set(nodes) # add nodes to graph G.add_nodes_from(nodes) if len(G.nodes())>0: stats = { "average_clustering": nx.average_clustering(G), "center": nx.center(G), "diameter": nx.diameter(G), "eccentricity": nx.eccentricity(G) } else: stats=[] json_response = {} json_response["stats"] = stats json_response["network"] = json_sql_response return json.dumps(json_response,encoding="UTF8")
def graph_radius(graph): sp = nx.shortest_path_length(graph,weight='weight') ecc = nx.eccentricity(graph,sp=sp) if ecc: rad = nx.radius(graph,e=ecc) else: rad = 0 return rad
def graph_diameter(graph): sp = nx.shortest_path_length(graph,weight='weight') ecc = nx.eccentricity(graph,sp=sp) if ecc: dia = nx.diameter(graph,e=ecc) else: dia = 0 return dia
def OrigEccentricity(self): ''' returns a 2d array containing the eccentricity of the origin node for all edges ''' sp = self.get_shortest_path_dict() probas = np.dot( np.array(nx.eccentricity(self, sp = sp).values(),dtype=float).reshape(-1,1), np.ones((1,self.number_of_nodes()))) return probas
def get_path_lengths(self): if not hasattr(self,"shortest_path_lenghts") or self.shortest_path_lenghts is None: self.shortest_paths_lengths = nx.all_pairs_shortest_path_length(self.G) self.avg_shortest_path = sum([ length for sp in self.shortest_paths_lengths.values() for length in sp.values() ])/float(self.N*(self.N-1)) self.eccentricity = nx.eccentricity(self.G,sp=self.shortest_paths_lengths) self.diameter = nx.diameter(self.G,e=self.eccentricity) self.radius = nx.radius(self.G,e=self.eccentricity) return self.shortest_paths_lengths
def nodes_by_eccentricity(graph): if len(graph) == 1: return graph.nodes() # need to crop the global shortest paths otherwise get #NetworkXError: Graph not connected: infinite path length eccentricities = {} try: eccentricities = nx.eccentricity(graph) except nx.exception.NetworkXError: # If not strongly connected, perform eccentricities per connected component if not nx.is_strongly_connected(graph): #TODO: provide this function inside ANK, add memoization for intensive operation for component_nodes in nx.strongly_connected_components(graph): eccentricities.update(nx.eccentricity(graph.subgraph(component_nodes))) # sort nodes by name, stability sort ensures that lexical order is used as tie-breaker for equal eccen. nodes_sorted = sorted(graph.nodes(), key = lambda x: x.fqdn) return sorted(nodes_sorted, key = lambda n: eccentricities[n])
def nodes_by_eccentricity(graph): if len(graph) == 1: return graph.nodes() # need to crop the global shortest paths otherwise get #NetworkXError: Graph not connected: infinite path length eccentricities = nx.eccentricity(graph) # sort nodes by name, stability sort ensures that lexical order is used as tie-breaker for equal eccen. nodes_sorted = sorted(graph.nodes(), key = lambda x: x.fqdn) return sorted(nodes_sorted, key = lambda n: eccentricities[n])
def test_eccentricity(testgraph): """ Testing eccentricity function for graphs. """ a, b = testgraph nx_ecc = nx.eccentricity(a) sg_ecc = sg.digraph_distance_measures.eccentricity(b, b.nodes()) for i in range(sg_ecc[0].size): assert sg_ecc[1, i] == nx_ecc[sg_ecc[0, i]]
def calculate(network): try: n = nx.eccentricity(network) except: return 0 if len(n.values()) == 0: return 0 else: return round(sum(n.values())/len(n.values()), 7)
def drawEccentricity(self, filename = 'eccentricity.png'): graph = self.getLCC() eccentricities = [x[1] for x in nx.eccentricity(graph).items()] unique = set(eccentricities) counts = sorted([(v, eccentricities.count(v)) for v in unique], key = lambda x:x[0]) xs = [x[0] for x in counts] ys = [x[1] for x in counts] print(xs, ys) plt.xlabel('Eccentricity') plt.ylabel('Node Count') plt.bar(xs, ys) plt.savefig(filename)
def eccentricity(state): G = nx.Graph() for node in xrange(len(state.nodeList)): G.add_node(node) for edge in state.nodeList[node]: G.add_edge(node,edge) if nx.is_connected(G): avgEccen = 0.0 nodeEccentricities = nx.eccentricity(G) for node in nodeEccentricities: avgEccen += nodeEccentricities[node] avgEccen /= G.number_of_edges() return avgEccen else: return 0
def geodesic_distance(plex, root=None): """ Given a simplicial complex in the form of a networkx graph, computes the geodesic distance to each node from a given root node. If the root is not specified, an arbitrary extreme root is found by eccentricity. Returns a list of distances. Assumes the graph is connected. """ if root is None: # compute the eccentricity of each node max_distances = nx.eccentricity(plex) root = max(max_distances.items(), key=lambda x: x[1])[0] # compute the distance from the root to each node distances = nx.shortest_path_length(plex, root) return [distances[v] for v in sorted(plex.nodes())]
def distance_scores(season, graph): # Take largest connected component g = graph if nx.is_connected(graph) else max(nx.connected_component_subgraphs(graph), key=len) # Ratio of largest connected component subgraph conn = len(max(nx.connected_component_subgraphs(g), key=len)) / float(nx.number_of_nodes(graph)) conn = np.round(conn, 3) # Radius, diameter rad = nx.radius(g) diam = nx.diameter(g) # Average eccentricity ecc = np.mean(nx.eccentricity(g).values()) ecc = np.round(ecc, 3) # Put it all into a dataframe df = pd.DataFrame([[season,conn,rad,diam,ecc]], columns=['season', 'conn', 'rad', 'diam', 'ecc']) return df
def __init__(self, graph, feature_list=[]): self.no_feature = 39 self.G = graph self.nodes = nx.number_of_nodes(self.G) self.edges = nx.number_of_edges(self.G) self.Lap = nx.normalized_laplacian_matrix(self.G) # ??? how to check whether comparable, addable? self.eigvals = numpy.linalg.eigvals(self.Lap.A).tolist() try: self.radius = nx.radius(self.G) except nx.exception.NetworkXError: self.radius = "ND" try: self.ecc_dic = nx.eccentricity(self.G) except nx.exception.NetworkXError: self.ecc_dic = {} self.degree_dic = nx.average_neighbor_degree(self.G) self.pagerank = nx.pagerank(self.G).values() if feature_list == []: self.feature_list = list(range(1, self.no_feature + 1)) else: self.feature_list = feature_list self.feature_vector = [] self.feature_time = []
def test_eccentricity_invalid(self): G=networkx.Graph([(1,2),(3,4)]) e = networkx.eccentricity(G,sp=1)
def eccentricity(self, v=None): g = to_nx_graph(self) return nx.eccentricity(g, v)
import networkx as nx import plot_multigraph from matplotlib import pylab as plt n = 80 p = 10. / n G = nx.fast_gnp_random_graph(n, p, seed=42) def to_list(dict_): return [dict_[k] for k in G.nodes()] graph_colors = [ ("eccentricity", to_list(nx.eccentricity(G))), ("clustering", to_list(nx.clustering(G))), ("square_clustering", to_list(nx.square_clustering(G))), ] fig = plot_multigraph.plot_color_multigraph(G, graph_colors, 2, 2, node_size=50) plt.savefig('graphs/distance.png', facecolor=fig.get_facecolor())
def extended_stats(G, connectivity=False, anc=False, ecc=False, bc=False, cc=False): """ Calculate extended topological stats and metrics for a graph. Many of these algorithms have an inherently high time complexity. Global topological analysis of large complex networks is extremely time consuming and may exhaust computer memory. Consider using function arguments to not run metrics that require computation of a full matrix of paths if they will not be needed. Parameters ---------- G : networkx multidigraph connectivity : bool if True, calculate node and edge connectivity anc : bool if True, calculate average node connectivity ecc : bool if True, calculate shortest paths, eccentricity, and topological metrics that use eccentricity bc : bool if True, calculate node betweenness centrality cc : bool if True, calculate node closeness centrality Returns ------- stats : dict dictionary of network measures containing the following elements (some only calculated/returned optionally, based on passed parameters): - avg_neighbor_degree - avg_neighbor_degree_avg - avg_weighted_neighbor_degree - avg_weighted_neighbor_degree_avg - degree_centrality - degree_centrality_avg - clustering_coefficient - clustering_coefficient_avg - clustering_coefficient_weighted - clustering_coefficient_weighted_avg - pagerank - pagerank_max_node - pagerank_max - pagerank_min_node - pagerank_min - node_connectivity - node_connectivity_avg - edge_connectivity - eccentricity - diameter - radius - center - periphery - closeness_centrality - closeness_centrality_avg - betweenness_centrality - betweenness_centrality_avg """ stats = {} full_start_time = time.time() # create a DiGraph from the MultiDiGraph, for those metrics that require it G_dir = nx.DiGraph(G) # create an undirected Graph from the MultiDiGraph, for those metrics that # require it G_undir = nx.Graph(G) # get the largest strongly connected component, for those metrics that # require strongly connected graphs G_strong = get_largest_component(G, strongly=True) # average degree of the neighborhood of each node, and average for the graph avg_neighbor_degree = nx.average_neighbor_degree(G) stats['avg_neighbor_degree'] = avg_neighbor_degree stats['avg_neighbor_degree_avg'] = sum(avg_neighbor_degree.values())/len(avg_neighbor_degree) # average weighted degree of the neighborhood of each node, and average for # the graph avg_weighted_neighbor_degree = nx.average_neighbor_degree(G, weight='length') stats['avg_weighted_neighbor_degree'] = avg_weighted_neighbor_degree stats['avg_weighted_neighbor_degree_avg'] = sum(avg_weighted_neighbor_degree.values())/len(avg_weighted_neighbor_degree) # degree centrality for a node is the fraction of nodes it is connected to degree_centrality = nx.degree_centrality(G) stats['degree_centrality'] = degree_centrality stats['degree_centrality_avg'] = sum(degree_centrality.values())/len(degree_centrality) # calculate clustering coefficient for the nodes stats['clustering_coefficient'] = nx.clustering(G_undir) # average clustering coefficient for the graph stats['clustering_coefficient_avg'] = nx.average_clustering(G_undir) # calculate weighted clustering coefficient for the nodes stats['clustering_coefficient_weighted'] = nx.clustering(G_undir, weight='length') # average clustering coefficient (weighted) for the graph stats['clustering_coefficient_weighted_avg'] = nx.average_clustering(G_undir, weight='length') # pagerank: a ranking of the nodes in the graph based on the structure of # the incoming links pagerank = nx.pagerank(G_dir, weight='length') stats['pagerank'] = pagerank # node with the highest page rank, and its value pagerank_max_node = max(pagerank, key=lambda x: pagerank[x]) stats['pagerank_max_node'] = pagerank_max_node stats['pagerank_max'] = pagerank[pagerank_max_node] # node with the lowest page rank, and its value pagerank_min_node = min(pagerank, key=lambda x: pagerank[x]) stats['pagerank_min_node'] = pagerank_min_node stats['pagerank_min'] = pagerank[pagerank_min_node] # if True, calculate node and edge connectivity if connectivity: start_time = time.time() # node connectivity is the minimum number of nodes that must be removed # to disconnect G or render it trivial stats['node_connectivity'] = nx.node_connectivity(G_strong) # edge connectivity is equal to the minimum number of edges that must be # removed to disconnect G or render it trivial stats['edge_connectivity'] = nx.edge_connectivity(G_strong) log('Calculated node and edge connectivity in {:,.2f} seconds'.format(time.time() - start_time)) # if True, calculate average node connectivity if anc: # mean number of internally node-disjoint paths between each pair of # nodes in G, i.e., the expected number of nodes that must be removed to # disconnect a randomly selected pair of non-adjacent nodes start_time = time.time() stats['node_connectivity_avg'] = nx.average_node_connectivity(G) log('Calculated average node connectivity in {:,.2f} seconds'.format(time.time() - start_time)) # if True, calculate shortest paths, eccentricity, and topological metrics # that use eccentricity if ecc: # precompute shortest paths between all nodes for eccentricity-based # stats start_time = time.time() sp = {source:dict(nx.single_source_dijkstra_path_length(G_strong, source, weight='length')) for source in G_strong.nodes()} log('Calculated shortest path lengths in {:,.2f} seconds'.format(time.time() - start_time)) # eccentricity of a node v is the maximum distance from v to all other # nodes in G eccentricity = nx.eccentricity(G_strong, sp=sp) stats['eccentricity'] = eccentricity # diameter is the maximum eccentricity diameter = nx.diameter(G_strong, e=eccentricity) stats['diameter'] = diameter # radius is the minimum eccentricity radius = nx.radius(G_strong, e=eccentricity) stats['radius'] = radius # center is the set of nodes with eccentricity equal to radius center = nx.center(G_strong, e=eccentricity) stats['center'] = center # periphery is the set of nodes with eccentricity equal to the diameter periphery = nx.periphery(G_strong, e=eccentricity) stats['periphery'] = periphery # if True, calculate node closeness centrality if cc: # closeness centrality of a node is the reciprocal of the sum of the # shortest path distances from u to all other nodes start_time = time.time() closeness_centrality = nx.closeness_centrality(G, distance='length') stats['closeness_centrality'] = closeness_centrality stats['closeness_centrality_avg'] = sum(closeness_centrality.values())/len(closeness_centrality) log('Calculated closeness centrality in {:,.2f} seconds'.format(time.time() - start_time)) # if True, calculate node betweenness centrality if bc: # betweenness centrality of a node is the sum of the fraction of # all-pairs shortest paths that pass through node start_time = time.time() betweenness_centrality = nx.betweenness_centrality(G, weight='length') stats['betweenness_centrality'] = betweenness_centrality stats['betweenness_centrality_avg'] = sum(betweenness_centrality.values())/len(betweenness_centrality) log('Calculated betweenness centrality in {:,.2f} seconds'.format(time.time() - start_time)) log('Calculated extended stats in {:,.2f} seconds'.format(time.time()-full_start_time)) return stats
def a_eccentricity(G): return np.average(nx.eccentricity(G).values())
def test_eccentricity_infinite(self): G=networkx.Graph([(1,2),(3,4)]) e = networkx.eccentricity(G)