def test_nx_cluster(self): g = nx.karate_club_graph() coms = algorithms.louvain(g) pos = nx.spring_layout(g) viz.plot_network_clusters(g, coms, pos) plt.savefig("cluster.pdf") os.remove("cluster.pdf") coms = algorithms.angel(g, 0.25) pos = nx.spring_layout(g) viz.plot_network_clusters(g, coms, pos, plot_labels=True, plot_overlaps=True) plt.savefig("cluster.pdf") os.remove("cluster.pdf")
#creates graph G = nx.Graph(filtered_entities) print(G.nodes()) print(G.edges()) #remove outliers/self-loops G.remove_edges_from(nx.selfloop_edges(G)) G = nx.k_core(G,k=2) #Louvain/infomap algorithm and graph plot #coms = algorithms.louvain(G) coms = algorithms.infomap(G) pos = nx.spring_layout(G) viz.plot_community_graph(G, coms, figsize=(8, 8), node_size=200, plot_overlaps=False, plot_labels=True, cmap=None, top_k=None, min_size=None) viz.plot_network_clusters(G, coms, position=None, figsize=(8, 8), node_size=200, plot_overlaps=False, plot_labels=False, cmap=None, top_k=None, min_size=None) #converting this to an nx graph for calculations. mod = evaluation.modularity_density(G,coms) print(mod) #calculating modularity mod = evaluation.modularity_density(G,coms) print(mod) #calculating purity #communities = eva(G, coms) #pur = evaluation.purity(communities) #print(pur) #calculating avg embeddedness
plt.show() #%% evaluation.newman_girvan_modularity(unet,algorithms.infomap(net)) #%% import networkx as nx from cdlib import algorithms, viz coms = algorithms.infomap(net) pos = nx.spring_layout(net) viz.plot_network_clusters(net, coms, pos,plot_labels=True) #%% #not in book from heapq import nlargest h,a=nx.hits(net) hubs=nlargest(2, h, key = h.get) labels = {} for node in net.nodes(): if node in hubs: #set the node name as the key and the label as its value labels[node] = node
# find all the unique communities and define an empty set to catch a frozenset of all nodes in each community truth_coms = df_lcc_truth['truth'].unique() communities_set = set() # iterate through the unique communities to get the nodes in each community and add them to the set as frozensets for com in truth_coms: com_nodes = df_lcc_truth[df_lcc_truth['truth'] == com].iloc[:, 0].values communities_set.add(frozenset(com_nodes)) # build the nodeclustering object ground_truth_com = NodeClustering(communities=communities_set, graph=nx_g, method_name="ground_truth") #%% define and plot ground truth # define the positions here so all the cluster plots in the loop are the same structure pos = nx.fruchterman_reingold_layout(nx_g) #%% # plot the original network with the ground truth communities viz.plot_network_clusters(nx_g, ground_truth_com, pos, figsize=(5, 5)) #nx.draw_networkx_labels(nx_g, pos=pos) plt.title(f'Ground Truth of {graph_name}') plt.show() #%% evaluate ground communitiy metrics viz.plot_com_properties_relation(ground_truth_com, evaluation.size, evaluation.internal_edge_density) plt.show() #%% viz.plot_com_stat(ground_truth_com, evaluation.conductance) plt.show() viz.plot_com_stat(ground_truth_com, evaluation.average_internal_degree) plt.show()
for c in range(len(communities)): for i in communities[c]: coms_dict[i] = [c] # make a df with the results of the algo df_clusts = pd.DataFrame.from_dict(coms_dict).T.reset_index() df_clusts.columns = ['node', name] # merge this results with the df_nodes to keep track of all the nodes' clusters df_nodes = pd.merge(df_nodes, df_clusts, how='left', left_on='node', right_on='node') # plot the network clusters viz.plot_network_clusters(nx_g, pred_coms, pos, figsize=(5, 5)) plt.title(f'{name} algo of {graph_name}') plt.show() # plot the graph viz.plot_community_graph(nx_g, pred_coms, figsize=(5, 5)) plt.title(f'Communities for {name} algo of {graph_name}.') plt.show() #%% odd_ports = ['ATLANTIC CITY', 'OCEAN CITY', 'KEY WEST'] df_odd_ports = df_edgelist[(df_edgelist['Source'].isin(odd_ports)) | (df_edgelist['Target'].isin(odd_ports))] #%% explore communities in communities
if i == j: count_leiden += 1 l.remove(i) print("Equal community:", count_leiden) print("#total communtites in leiden:", len(coms_leiden.communities)) print("#total communtites in graph:", len(coms_graph)) # Visualising a subset of nodes # Subset import numpy as np F = nx.Graph() for (u, v, wt) in G.edges.data('weight'): if wt > 500: print(u, v, wt) if u not in F.nodes(): F.add_node(u, labels=G.nodes()[u]['labels'], properties=G.nodes()[u]['properties']) if v not in F.nodes(): F.add_node(v, labels=G.nodes()[v]['labels'], properties=G.nodes()[v]['properties']) F.add_edge(u, v, weight=wt) pos = nx.spring_layout(F, k=10 / np.sqrt(len(F.nodes())), iterations=20) viz.plot_network_clusters(F, coms, pos, plot_labels=True, node_size=400) #if __name__ == '__main__': #create_networkx_graph()`
def draw_community_graph(g, coms, filename): pos = nx.spring_layout(g) viz.plot_network_clusters(g, coms, pos) plt.savefig(filename)
cs.erdos_renyi_modularity().score)) print("{0:>15s} | {1:.6f}".format( 'Robustness', cs.normalized_mutual_information(alg(G)).score)) print("{0:>15s} | {1:.1f} sec\n".format('Timing', time() - tic)) return cs # G = read('toy') G = read('karate') # G = read('women') # G = read('dolphins') # G = read('got-appearance') # G = read('diseasome') # G = read('wars') # G = read('transport') # G = read('java') # G = read('imdb') # G = read('wikileaks') info(G) clusters(G, lambda G: algorithms.girvan_newman(G, level=1)) clusters(G, lambda G: algorithms.label_propagation(G)) cs = clusters(G, lambda G: algorithms.louvain(G)) # clusters(G, lambda G: algorithms.leiden(G)) # clusters(G, lambda G: algorithms.sbm_dl(G)) viz.plot_network_clusters(G, cs, nx.spring_layout(G)) plt.show()