def test_eigenvector(self): g = get_string_graph() com = algorithms.eigenvector(g) self.assertEqual(type(com.communities), list) if len(com.communities) > 0: self.assertEqual(type(com.communities[0]), list) self.assertEqual(type(com.communities[0][0]), str)
plt.title(f'{name} algo of {graph_name}') plt.show() # plot the graph viz.plot_community_graph(nx_g, pred_coms, figsize=(5, 5)) plt.title(f'Communities for {name} algo of {graph_name}.') plt.show() #%% odd_ports = ['ATLANTIC CITY', 'OCEAN CITY', 'KEY WEST'] df_odd_ports = df_edgelist[(df_edgelist['Source'].isin(odd_ports)) | (df_edgelist['Target'].isin(odd_ports))] #%% explore communities in communities pred_coms = algorithms.eigenvector(nx_g) communities = pred_coms.communities coms_dict = dict() for c in range(len(communities)): com_list = list() for i in communities[c]: com_list.append(i) coms_dict[c] = com_list df_com0 = df_edgelist[(df_edgelist['Source'].isin(coms_dict[0])) | (df_edgelist['Target'].isin(coms_dict[0]))] #%% nx_com0 = nx.from_edgelist(df_com0[['Source', 'Target']].values) pred_coms_com0 = algorithms.eigenvector(nx_com0)
g.add_edge(fields[0], fields[1], weight=float(fields[2])) print(g.number_of_nodes()) print(g.number_of_edges()) if (options.method == 'leiden'): communities = algorithms.leiden(g, weights='weight', **clust_kwargs) elif (options.method == 'louvain'): communities = algorithms.louvain(g, weight='weight', **clust_kwargs) elif (options.method == 'cpm'): communities = algorithms.cpm(g, weights='weight', **clust_kwargs) elif (options.method == 'der'): communities = algorithms.der(g, **clust_kwargs) elif (options.method == 'edmot'): communities = algorithms.edmot(g, **clust_kwargs) elif (options.method == 'eigenvector'): communities = algorithms.eigenvector(g, **clust_kwargs) elif (options.method == 'gdmp2'): communities = algorithms.gdmp2(g, **clust_kwargs) elif (options.method == 'greedy_modularity'): communities = algorithms.greedy_modularity(g, weight='weight', **clust_kwargs) #elif(options.method == 'infomap'): # communities = algorithms.infomap(g) elif (options.method == 'label_propagation'): communities = algorithms.label_propagation(g, **clust_kwargs) elif (options.method == 'markov_clustering'): communities = algorithms.markov_clustering(g, **clust_kwargs) elif (options.method == 'rber_pots'): communities = algorithms.rber_pots(g, weights='weight', **clust_kwargs) elif (options.method == 'rb_pots'):
# # ATTENZIONE: richiede qualche minuto # # CONSIGLIO: passare alla cella successiva che carica i risultati da file # In[30]: accuracy_spinglass = 0 accuracy_eigenvector = 0 accuracy_leiden = 0 accuracy_cpm = 0 accuracy_rber_pots = 0 for i in range(10): result_spinglass_tmp = algorithms.spinglass(g1) result_eigenvector_tmp = algorithms.eigenvector(g1) result_leiden_tmp = algorithms.leiden(g1) result_cpm_tmp = algorithms.cpm(g1, resolution_parameter=.00018) result_rber_pots_tmp = algorithms.rber_pots(g1, resolution_parameter=.32) #definizione colonne che servono per calcolare l'accuracy nodes1['community_spinglass'] = -1 for i in range(len(result_spinglass_tmp.communities)): for j in result_spinglass_tmp.communities[i]: nodes1.loc[j, 'community_spinglass'] = i nodes1['community_eigenvector'] = -1 for i in range(len(result_eigenvector_tmp.communities)): for j in result_eigenvector_tmp.communities[i]: nodes1.loc[j, 'community_eigenvector'] = i nodes1['community_leiden'] = -1 for i in range(len(result_leiden_tmp.communities)):
LFR_G = generate_lfr(mixing_parameter) set_comm = {frozenset(LFR_G.nodes[v]["community"]) for v in LFR_G} comm_list = [node_set for node_set in set_comm] true_labels = extract_communities_list(comm_list) nx.draw(LFR_G, nx.spring_layout(LFR_G), node_color=true_labels, cmap=plt.cm.get_cmap('rainbow'), node_size=30) comm_num = len(true_labels) plt.title('len: %i', comm_num) plt.show() ############################### Infomap ############################### infomap_partition = cd.infomap(LFR_G) # Partition graph with Infomap infomap_labels = extract_communities_list(infomap_partition.communities) nmi_infomap.append(normalized_mutual_info_score(true_labels, infomap_labels)) ############################### Leading Eigenvector ############################### eigenvector_partition = cd.eigenvector(LFR_G) eigenvector_labels = extract_communities_list(eigenvector_partition.communities) nmi_eigenvector.append(normalized_mutual_info_score(true_labels, eigenvector_labels)) ############################### Louvian ############################### louvian_partition = cd.louvain(LFR_G) louvian_labels = extract_communities_list(louvian_partition.communities) nmi_louvian.append(normalized_mutual_info_score(true_labels, louvian_labels)) ############################### Leiden ############################### leiden_partition = cd.leiden(LFR_G) leiden_labels = extract_communities_list(leiden_partition.communities) nmi_leiden.append(normalized_mutual_info_score(true_labels, louvian_labels)) ############################### Walktrap ############################### walktrap_partition = cd.walktrap(LFR_G)
############################### Infomap ############################### start_time = time.time() infomap_partition = cd.infomap(G) infomap_time = time.time() - start_time infomap_communities = extract_communities_list(infomap_partition.communities, G) infomap_partitions = get_partitions(infomap_communities) nmi_infomap = normalized_mutual_info_score(true_communities, infomap_communities) ari_infomap = adjusted_rand_score(true_communities, infomap_communities) vi_infomap = variation_of_information(true_partitions, infomap_partitions) ############################### Leading Eigenvector ############################### start_time = time.time() eigenvector_partition = cd.eigenvector(G) eifenvector_time = time.time() - start_time eigenvector_communities = extract_communities_list( eigenvector_partition.communities, G) eigenvector_paritions = get_partitions(eigenvector_communities) nmi_eigenvector = normalized_mutual_info_score(true_communities, eigenvector_communities) ari_eigenvector = adjusted_rand_score(true_communities, eigenvector_communities) vi_eigenvector = variation_of_information(true_partitions, eigenvector_paritions) ############################### Louvian ############################### start_time = time.time() louvian_partition = cd.louvain(G) louvian_time = time.time() - start_time