def gn(dataset_path): G = load_graph(dataset_path) algorithm = GN(G) a = algorithm.execute() # a = np.array(algorithm.execute()) gn_pred = [5] * 1000 for i in range(len(a)): for j in range(len(a[i])): gn_pred[a[i][j] - 1] = i return gn_pred
class GN: def __init__(self, G): self._G_cloned = clone_graph(G) self._G = G self._partition = [[n for n in G.nodes()]] self._max_Q = 0.0 def execute(self): while len(self._G.edges()) != 0: edge = max(nx.edge_betweenness(self._G).items(),key=lambda item:item[1])[0] self._G.remove_edge(edge[0], edge[1]) components = list(nx.connected_components(self._G)) if len(components) != len(self._partition): cur_Q = cal_Q(components, self._G_cloned) if cur_Q > self._max_Q: self._max_Q = cur_Q self._partition = components print self._max_Q print self._partition return self._partition if __name__ == '__main__': G = load_graph('../network/club.txt') algorithm = GN(G) algorithm.execute() #print len(G.edges(None, False))
class GN: def __init__(self, G): self._G_cloned = clone_graph(G) self._G = G self._partition = [[n for n in G.nodes()]] self._max_Q = 0.0 def execute(self): while len(self._G.edges()) != 0: edge = max(nx.edge_betweenness(self._G).items(), key=lambda item: item[1])[0] self._G.remove_edge(edge[0], edge[1]) components = [ list(c) for c in list(nx.connected_components(self._G)) ] if len(components) != len(self._partition): cur_Q = cal_Q(components, self._G_cloned) if cur_Q > self._max_Q: self._max_Q = cur_Q self._partition = components print(self._max_Q) print(self._partition) return self._partition if __name__ == '__main__': G = load_graph('../network/club.txt') algorithm = GN(G) algorithm.execute() # print len(G.edges(None, False))
import sys sys.path.append('../../') sys.path.append('../../algorithm') from algorithm import LPA from util import graph_helper import time if __name__ == '__main__': G = graph_helper.load_graph('staticInOut1.csv') max_iter = 2000000 #默认200 algorithm = LPA.LPA(G, max_iter) start = time.time() communities = algorithm.execute() end = time.time() print "Algorithm done in " + str(end - start) + "s." community_id = 1 with open("vertices_LPA_1.csv", 'w') as FILEOUT: print >> FILEOUT, "Id,commnuity_id" for cset in communities: for c in cset: print >> FILEOUT, str(c) + "," + str(community_id) community_id = community_id + 1 print "Done"
clique_neighbor[j].add(i) # depth first search clique neighbors for communities communities = [] for i, c in enumerate(cliques): if i in remained and len(c) >= self._k: #print 'remained cliques', len(remained) communities.append(set(c)) neighbors = list(clique_neighbor[i]) while len(neighbors) != 0: n = neighbors.pop() if n in remained: #if len(remained) % 100 == 0: #print 'remained cliques', len(remained) communities[len(communities) - 1].update(cliques[n]) remained.remove(n) for nn in clique_neighbor[n]: if nn in remained: neighbors.append(nn) return communities if __name__ == '__main__': G = load_graph('../network/community.txt') algorithm = CPM(G, 4) communities = algorithm.execute() for community in communities: print(community) nx.draw_networkx(G, with_labels=False) plt.show()
for i in range(self._n): c_id = 0 cur_max = q[i][0] for j in range(1, self._k): if q[i][j] > cur_max: cur_max = q[i][j] c_id = j communities[c_id].add(i) return communities.values() if __name__ == '__main__': path_list = [ '../network/club.txt', '../../Dataset/facebook/facebook_combined.txt' ] graph_path = path_list[0] G = load_graph(graph_path) # G = nx.karate_club_graph() algorithm = EM(G, 2) start_time = time.time() communities = algorithm.execute() execution_time = (time.time() - start_time) time_length = '{0:.2f} s'.format(execution_time) print(time_length) for c in communities: print(len(c)) print(sorted(c))