def betweenness_centrality_labeling(self,graph,approx=None): result={} labeled_graph=nx.Graph(graph) if approx is None: centrality=list(nx.betweenness_centrality(graph).items()) else: centrality=list(nx.betweenness_centrality(graph,k=approx).items()) sorted_centrality=sorted(centrality,key=lambda n:n[1],reverse=True) dict_={} label=0 for t in sorted_centrality: dict_[t[0]]=label label+=1 nx.set_node_attributes(labeled_graph,dict_,'labeling') ordered_nodes=list(zip(*sorted_centrality))[0] result['labeled_graph']=labeled_graph result['sorted_centrality']=sorted_centrality result['ordered_nodes']=ordered_nodes return result
def addCentralities(self): """Add centrality coeficients for the final version of the graph. Centralities include : Kalz centrality, global clustering, square clustering, closeness centrality, harmonic centrality, betweenness centrality. From all centralities we choose the max or the mean. More about these centralities can be found in networkX algorithms. """ self.kalz_coef = max(list(nx.katz_centrality(self.graph).values())) self.glob_clust_coef = mean(list(nx.clustering(self.graph).values())) self.square_clustering_coef = mean( list(nx.square_clustering(self.graph).values())) self.harmonic_coef = max( list(nx.harmonic_centrality(self.graph).values())) self.betweenness_coef = max( list(nx.betweenness_centrality(self.graph).values())) self.closeness_coef = max( list(nx.closeness_centrality(self.graph).values()))
def _update_stat_json(self, stat_name): """ Callback function for updating the statistic shown. :param stat_name: (str) Name of the statistic to display (e.g. graph_summary). :return: (json) Json of the graph information depending on chosen statistic. """ switcher = { "graph_summary": self.get_graph_summary(), "average_degree_connectivity": nx.average_degree_connectivity(self.graph.get_graph()), "average_neighbor_degree": nx.average_neighbor_degree(self.graph.get_graph()), "betweenness_centrality": nx.betweenness_centrality(self.graph.get_graph()), } if type(self.graph).__name__ == "PMFG": switcher["disparity_measure"] = self.graph.get_disparity_measure() return json.dumps(switcher.get(stat_name), indent=2)
def graph_properties(G, n, p): # graph properties A = nx.adjacency_matrix(G).toarray() L = nx.laplacian_matrix(G).toarray() betwn_ctrlity = nx.betweenness_centrality(G) names = ['id', 'data'] formats = ['i', 'f8'] dtype = dict(names=names, formats=formats) betwn_ctrlity_array = np.array(list(betwn_ctrlity.items()), dtype=dtype) c = nx.average_clustering(G) d = nx.diameter(G) np.savetxt('Q8_Assets/Adjacency_N_' + str(n) + '_P_' + str(p) + '.csv', A, delimiter=",") np.savetxt('Q8_Assets/Laplacian_N_' + str(n) + '_P_' + str(p) + '.csv', L, delimiter=",") np.savetxt('Q8_Assets/Betweeness_N_' + str(n) + '_P_' + str(p) + '.csv', betwn_ctrlity_array, delimiter=",") print('Clustering = ', c, '\n') print('Diameter = ', d, '\n')
def calculate(self, graph, params): nx_graph = util.to_networkx(graph) return nx.betweenness_centrality(nx_graph)
__author__ = 'nourl' from common.transform import * from PyCommDete import * from networkx import nx from inputs.formal_edgelist import * from multiprocessing import Pool from sys import exit #C = nx.read_gml(filelist[1]) C = nx.Graph(formal_edgelist('./benchmarks/network.dat')) print "length:", len(C) betw_ori = nx.betweenness_centrality(C) degr_ori = C.degree() nodes = C.nodes() betw_ori = sorted(betw_ori.iteritems(), key=lambda x: x[1], reverse=True) average = sum(x[1] for x in betw_ori) / len(betw_ori) betw = [x for x in betw_ori if x[1] >= 0.2 * average] betw_ex_nei = [x[0] for x in betw] bitmap = [0] * len(betw_ori) print "len of betw_ex_nei:", len(betw_ex_nei) for x in betw_ex_nei: bitmap[x - 1] = x recover = [] for x in betw_ex_nei: if bitmap[x - 1] > 0: recover.append(x) nei = C.neighbors(x)
__author__ = 'nourl' from common.transform import * from PyCommDete import * from networkx import nx from inputs.formal_edgelist import * from multiprocessing import Pool from sys import exit #C = nx.read_gml(filelist[1]) C = nx.Graph(formal_edgelist('./benchmarks/network.dat')) print "length:",len(C) betw_ori = nx.betweenness_centrality(C) degr_ori = C.degree() nodes = C.nodes() betw_ori = sorted(betw_ori.iteritems(), key=lambda x:x[1],reverse=True) average = sum(x[1] for x in betw_ori)/len(betw_ori) betw=[x for x in betw_ori if x[1]>=0.2*average] betw_ex_nei=[x[0] for x in betw] bitmap = [0]*len(betw_ori) print "len of betw_ex_nei:",len(betw_ex_nei) for x in betw_ex_nei: bitmap[x-1]=x recover=[] for x in betw_ex_nei: if bitmap[x-1] >0: recover.append(x) nei = C.neighbors(x)
def get_all_nodes(netw,seeds_type): if seeds_type == 1: orig = C.degree() elif seeds_type == 2: orig = nx.betweenness_centrality(netw) elif seeds_type == 3: betw = nx.betweenness_centrality(netw) degr = netw.degree() nodes = netw.nodes() betw = sorted(betw.iteritems(), key=lambda x:x[1],reverse=True) bitmap = [0]*len(betw) betw_ex_nei=[x[0] for x in betw] print "len of betw_ex_nei:",len(betw_ex_nei) for x in betw_ex_nei: bitmap[x-1]=x recover=[] for x in betw_ex_nei: if bitmap[x-1] >0: recover.append(x) nei = C.neighbors(x) for n in nei: bitmap[n-1] = 0 for x in recover: bitmap[x-1]=x seed_betw = [node for node in nodes if bitmap[node-1]] print "len of betw:",len(seed_betw) print "seed_betw:",seed_betw degr = sorted(degr.iteritems(), key=lambda x:x[1],reverse=True) bitmap = [0]*len(degr) degr_ex_nei=[x[0] for x in degr] print "len of degr_ex_nei:",len(degr_ex_nei) for x in degr_ex_nei: bitmap[x-1]=x recover=[] for x in degr_ex_nei: if bitmap[x-1] >0: recover.append(x) nei = C.neighbors(x) for n in nei: bitmap[n-1] = 0 for x in recover: bitmap[x-1]=x seed_degr = [node for node in nodes if bitmap[node-1]] print "len of degr:",len(seed_degr) print "seed_degr",seed_degr seed_cross=list(set(seed_betw).intersection(set(seed_degr))) print "len of seed_cross:",len(seed_cross) print "seed_cross:",seed_cross return seed_cross nodes = netw.nodes() orig=sorted(orig.iteritems(), key=lambda x:x[1],reverse=True) bitmap = [0]*len(orig) average = sum(x[1] for x in orig)/len(orig) orig=[x for x in orig if x[1]>=average] orig_over_ave=[x[0] for x in orig] for x in orig_over_ave: bitmap[x-1]=x recover=[] for x in orig_over_ave: if bitmap[x-1] >0: recover.append(x) nei = C.neighbors(x) for n in nei: bitmap[n-1] = 0 for x in recover: bitmap[x-1]=x seed = [node for node in nodes if bitmap[node-1]] return seed