def timeAlg(gName, alg): G=readG(gName) l=[] for i in range(10): tSet=algor.randseed(G, 0.02,250) begin=time.time() #alg(G, 0.02,250, tSet=tSet, r=5) nx.voterank(G, number_of_nodes=250) l.append(time.time()-begin) l=numpy.array(l) print('mean time is ', numpy.average(l), ' std is ', numpy.std(l))
def test_voterank_centrality_5(self): G = nx.MultiDiGraph() G.add_edges_from( [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] ) exact = [2, 0, 5, 4] assert exact == nx.voterank(G)
def selectMixZonesByMetricAndRegion(n_mixzones, G, k_anonymity, radius_mixzone, metric): regions_placement = {} selected_mixzones = [] number_of_mixzones_placed = 0 if metric == utils.EIGENVECTOR_METRIC: centrality = nx.eigenvector_centrality(G, max_iter=1000) nodes_ordered = sorted(centrality.items(), key=operator.itemgetter(1), reverse=True) for node, center_value in nodes_ordered: if not (G.node[node]["region"] in regions_placement.keys()): regions_placement[G.node[node]["region"]] = 1 selected_mixzones.append(node) number_of_mixzones_placed += 1 if number_of_mixzones_placed == n_mixzones: return utils.generateMixZonesObjects(selected_mixzones, G, k_anonymity, radius_mixzone) else: nodes_ordered = nx.voterank(G, max_iter=1000) for node in nodes_ordered: if not (G.node[node]["region"] in regions_placement.keys()): regions_placement[G.node[node]["region"]] = 1 selected_mixzones.append(node) number_of_mixzones_placed += 1 if number_of_mixzones_placed == n_mixzones: return utils.generateMixZonesObjects(selected_mixzones, G, k_anonymity, radius_mixzone)
def selectSeeds(graph, forSequential): start = datetime.datetime.now() voteRank = nx.voterank(createNxGraph(graph), forSequential) end = datetime.datetime.now() if (len(voteRank) < forSequential): voteRank = copy.copy(selectSeedsRandomly(graph, forSequential)) return voteRank, (end - start)
def voterank(G): voteranks = nx.voterank(G) # convert ranking of nodes to an approximate value res = {} for i in range(len(voteranks)): node = voteranks[i] res[node] = 1 / math.log(i + 2) return res
def selectSeeds(graph, forSequential): start = timer() voteRank = nx.voterank(createNxGraph(graph), forSequential) end = timer() if (len(voteRank) < forSequential): voteRank = copy.copy(selectSeedsRandomly(graph, forSequential)) return voteRank, timedelta(seconds=end - start)
def simulate(spreader_count, gamma, time): spreaders = nx.voterank(G, spreader_count) s_scale, i_scale, q_scale, r_scale = Multi_SIQR(10, spreaders, gamma, time) plt.plot(range(len(r_scale)), r_scale, '-b', label = "Recovered") plt.plot(range(len(i_scale)), i_scale, '-r', label = "Infected") plt.plot(range(len(q_scale)), q_scale, '-g', label = "Quarantined") plt.plot(range(len(s_scale)), s_scale, '-m', label = "Susceptible") plt.xlabel('Time') plt.ylabel('F(t)') plt.legend(loc='best') plt.savefig('/home/devalfa/Network/Corona'+str(gamma)+'_'+s+'_stay_in'+'.png') plt.show()
def ranking(): voterank = nx.voterank(G) table = prettytable.PrettyTable([ 'Rank', 'Author ID', 'Name', 'Final voting score', 'Final voting ability' ]) for index, aid in enumerate(voterank): table.add_row([ index, aid, G.nodes[aid]['name'], G.nodes[aid]['voterank'][0], G.nodes[aid]['voterank'][1] ]) print(table)
def get_graphFeatures(self, G=None): if G is not None: #density density = nx.density(G) #average degree avg_degree = self.get_average_degree(G) #average closeness centrality avg_closeness = np.array( list(nx.closeness_centrality(G, distance='weight').values())) avg_closeness = np.mean(avg_closeness[avg_closeness != 0.0]) #average betweenness centrality btw_centrality = np.array( list(nx.betweenness_centrality(G, weight='weight').values())) btw_centrality = np.mean(btw_centrality[btw_centrality != 0.0]) #average harmonic centrality harmonic = np.array( list(nx.harmonic_centrality(G, distance='weight').values())) harmonic = np.mean(harmonic[harmonic != 0.0]) #get eccentricity, radius, efficiency eccen = nx.eccentricity(G) radius = nx.radius(G, e=eccen) eccen = np.array(list(eccen.values())) avg_eccen = np.mean(eccen[eccen != 0.0]) efficiency = nx.global_efficiency(G) #get transitivity and average cluster coefficient transitivity = nx.transitivity(G) avg_cluster_coef = nx.average_clustering(G, weight='weight', count_zeros=False) #avg vote rank avg_voteRank = stat.mean(nx.voterank(G)) #avg information centrality ic = np.array( list(nx.information_centrality(G, weight='weight').values())) avg_ic = np.mean(ic[ic != 0.0]) #avg load centrality lc = np.array(list( nx.load_centrality(G, weight='weight').values())) avg_lc = np.mean(lc[lc != 0.0]) return [ self.num_nuclei, density, avg_degree, avg_closeness, btw_centrality, harmonic, avg_eccen, radius, efficiency, transitivity, avg_cluster_coef, avg_voteRank, avg_ic, avg_lc ] else: return None
def voteRank(graph, N): """ Return the N nodes with the largest voterank in the graph Uses the networkx package Return a list of strings of nodes """ topN = [(-float("inf"), "null") ] * N # initialize N seed nodes with -inf degree G = nx.Graph() for node in graph.keys(): for neighbor in graph[node]: G.add_edge(node, neighbor) return nx.voterank(G, N, max_iter=2000)
def test_voterank_centrality_1(self): G = nx.Graph() G.add_edges_from( [ (7, 8), (7, 5), (7, 9), (5, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 6), (2, 6), (3, 6), (4, 6), ] ) assert [0, 7, 6] == nx.voterank(G)
def get_top_voterank(G, percent): numOfnodes = math.ceil(int(percent) * len(G.nodes())/100) #degree_dict = dict(G.degree(G.nodes())) #nx.set_node_attributes(G, degree_dict, 'degree') sorted_voterank = nx.voterank(G,numOfnodes) # Run voterank #sorted_voterank = sorted(voterank_dict.items(), key=itemgetter(1), reverse=True) #print("Top %s nodes by voterank:"%numOfnodes) #for b in sorted_voterank[:numOfnodes]: # print(b) #First get the top 20 nodes by betweenness as a list #top_voterank = sorted_voterank[:numOfnodes] #Then find and print their degree #for tb in voterank_dict: # Loop through top_betweenness # degree = degree_dict[tb[0]] # Use degree_dict to access a node's degree, see footnote 2 # print("Name:", tb[0], "| voterank:", tb[1], "| Degree:", degree) return sorted_voterank #[tb[0] for tb in top_voterank]
def getOrderedNodesByMetric(G, n_regions, metric): regions_ordered_nodes = [None] * n_regions if metric == utils.EIGENVECTOR_METRIC: centrality = nx.eigenvector_centrality(G, max_iter=1000) nodes_ordered = sorted(centrality.items(), key=operator.itemgetter(1), reverse=True) for node, c in nodes_ordered: if regions_ordered_nodes[int(G.node[node]['region'])] is None: regions_ordered_nodes[int(G.node[node]['region'])] = [node] else: regions_ordered_nodes[int(G.node[node]['region'])].append(node) else: nodes_ordered = nx.voterank(G, max_iter=1000) for node in nodes_ordered: if regions_ordered_nodes[int(G.node[node]['region'])] is None: regions_ordered_nodes[int(G.node[node]['region'])] = [node] else: regions_ordered_nodes[int(G.node[node]['region'])].append(node) return regions_ordered_nodes
def _fit_author_attributes(self, X: pd.DataFrame): self.author_attributes = {} self.author_attributes['author_implication'] = ( X.groupby('author').count().iloc[:, 0].to_dict() ) # to merge on author # Centrality self.author_attributes[ "author_degree_centrality"] = nx.centrality.degree_centrality( self.g_author) self.author_attributes[ "author_out_degree_centrality"] = nx.centrality.out_degree_centrality( self.g_author) self.author_attributes[ "author_in_degree_centrality"] = nx.centrality.in_degree_centrality( self.g_author) # Voterank lst_voterank = nx.voterank(self.g_author, 1000) ordered_author = {} for elt in lst_voterank: ordered_author[elt] = 1 self.author_attributes['author_is_influential'] = ordered_author
def graph_properties(self, graph): K = dict(nx.degree(graph)) CC = dict(nx.closeness_centrality(graph)) BC = dict(nx.betweenness_centrality(graph)) EBC = dict(nx.edge_betweenness_centrality(graph)) EC = dict(nx.eigenvector_centrality(graph, max_iter=1000)) C = dict(nx.clustering(graph)) VR = nx.voterank(graph) VRS = {} # voterank score for node in graph.nodes(): try: VRS[node] = len(VR) - VR.index(node) except: VRS[node] = 0 nx.set_node_attributes(graph, K, "Degree") nx.set_node_attributes(graph, CC, "Closeness Centrality") nx.set_node_attributes(graph, BC, "Betweenness Centrality") nx.set_node_attributes(graph, EBC, "Edge Betweenness Centrality") nx.set_node_attributes(graph, EC, "Eigenvector Centrality") nx.set_node_attributes(graph, C, "Clustering Coefficient") nx.set_node_attributes(graph, VRS, "VoteRank Score") return graph
def test_voterank_centrality_3(self): G = nx.gnc_graph(10, seed=7) d = nx.voterank(G, 4) exact = [3, 6, 8] assert exact == d
def test_voterank_centrality_2(self): G = nx.florentine_families_graph() d = nx.voterank(G, 4) exact = ["Medici", "Strozzi", "Guadagni", "Castellani"] assert exact == d
def selectSeeds(graph, forSequential): start = timer() voteRank = nx.voterank(createNxGraph(graph), forSequential) end = timer() return voteRank, timedelta(seconds=end - start)
def test_voterank_centrality_2(self): G = nx.florentine_families_graph() d = nx.voterank(G, 4) exact = ['Medici', 'Strozzi', 'Guadagni', 'Castellani'] assert_equal(exact, d)
def get_influential_nodes_voterank(graph): print("Calculando a influencia dos vertices") most_influential_nodes = nx.voterank(graph) return most_influential_nodes[0:10]
def test_voterank_centrality_1(self): G = nx.Graph() G.add_edges_from([(7, 8), (7, 5), (7, 9), (5, 0), (0, 1), (0, 2), (0, 3), (0, 4), (1, 6), (2, 6), (3, 6), (4, 6)]) assert_equal([0, 7, 6], nx.voterank(G))
def compute_subgraph_center(self, subgraph): if self.method == 'betweenness_centrality': d = nx.betweenness_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'betweenness_centrality_subset': d = nx.betweenness_centrality_subset(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'information_centrality': d = nx.information_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'local_reaching_centrality': d = {} for n in self.G.nodes(): d[n] = nx.local_reaching_centrality(self.G, n, weight='weight') center = max(d, key=d.get) elif self.method == 'voterank': d = nx.voterank(subgraph) center = max(d, key=d.get) elif self.method == 'percolation_centrality': d = nx.percolation_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'subgraph_centrality': d = nx.subgraph_centrality(subgraph) center = max(d, key=d.get) elif self.method == 'subgraph_centrality_exp': d = nx.subgraph_centrality_exp(subgraph) center = max(d, key=d.get) elif self.method == 'estrada_index': d = nx.estrada_index(subgraph) center = max(d, key=d.get) elif self.method == 'second_order_centrality': d = nx.second_order_centrality(subgraph) center = max(d, key=d.get) elif self.method == 'eigenvector_centrality': d = nx.eigenvector_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'load_centrality': d = nx.load_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'closeness_centrality': d = nx.closeness_centrality(subgraph) center = max(d, key=d.get) elif self.method == 'current_flow_closeness_centrality': d = nx.current_flow_closeness_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'current_flow_betweenness_centrality': d = nx.current_flow_betweenness_centrality(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'current_flow_betweenness_centrality_subset': d = nx.current_flow_betweenness_centrality_subset(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'approximate_current_flow_betweenness_centrality': d = nx.approximate_current_flow_betweenness_centrality( subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'harmonic_centrality': d = nx.harmonic_centrality(subgraph) center = max(d, key=d.get) elif self.method == 'page_rank': d = nx.pagerank(subgraph, weight='weight') center = max(d, key=d.get) elif self.method == 'hits': d = nx.hits(subgraph) center = max(d, key=d.get) elif self.method == 'katz_centrality': d = nx.katz_centrality(subgraph, weight='weight') center = max(d, key=d.get) else: new_centers = nx.center(subgraph) # new_centers gives a list of centers and here we just pick one randomly --not good for stability # to do : find a better way to choose the center--make it stable index = random.randint(0, len(new_centers) - 1) center = new_centers[index] return center
def voterank_centrality(G): return normalize(nx.voterank(G))
try: eig_cent = nx.eigenvector_centrality(Graph) except nx.PowerIterationFailedConvergence: print("Eigenvector Centrality Failed") ft = time.perf_counter() print(f"Calculated eig_cent in {(ft - st):.4f} seconds") st = time.perf_counter() close_cent = nx.closeness_centrality(Graph) ft = time.perf_counter() print(f"Calculated close_cent in {(ft - st):.4f} seconds") st = time.perf_counter() load_cent = nx.load_centrality(Graph) ft = time.perf_counter() print(f"Calculated load_cent in {(ft - st):.4f} seconds") st = time.perf_counter() vote_rank = nx.voterank(Graph) ft = time.perf_counter() print(f"Calculated vote_rank in {(ft - st):.4f} seconds") st = time.perf_counter() page_rank = nx.pagerank(Graph) ft = time.perf_counter() print(f"Calculated page_rank in {(ft - st):.4f} seconds") ft1 = time.perf_counter() print(f"Calculated ALL the metrics in {(ft1 - st1):.4f} seconds") # Visualize st = time.perf_counter() pos = nx.spring_layout(Graph) node_color = [20000.0 * Graph.degree(v) for v in Graph] node_size = [v * 10000 for v in bet_cent.values()] plt.figure(figsize=(20, 20))