def load_centrality(gnx, f, ft): start = timer.start(ft, 'load_centrality') load_centrality_dict = nx.load_centrality(gnx) timer.stop(ft, start) for k in load_centrality_dict: f.writelines(str(k) + ',' + str(load_centrality_dict[k]) + '\n') return load_centrality_dict
def louvainCommunityDetection(f,ft,gnx): start = timer.start(ft, 'Louvain') bp = community.best_partition(gnx) comSizeBp = getCommunitySize(gnx, bp) timer.stop(ft,start) writeTofile(comSizeBp, f) return comSizeBp
def k_core(f, ft, gnx): start = timer.start(ft, 'K-Core') result = nx.core_number(gnx) timer.stop(ft, start) for k in result: f.writelines(str(k) + ',' + str(result[k]) + '\n') return result
def edge_betweenness_centrality(f, ft, gnx): start = timer.start(ft, 'Edge Betweenness Centrality') result = nx.edge_betweenness_centrality(gnx) timer.stop(ft, start) for k in result: f.writelines(str(k) + ',' + str(result[k]) + '\n') return result
def communicability_centrality(gnx, f, ft): start = timer.start(ft, 'load_centrality') communicability_centrality_dict = nx.communicability_centrality(gnx) timer.stop(ft, start) for k in communicability_centrality_dict: f.writelines( str(k) + ',' + str(communicability_centrality_dict[k]) + '\n') return communicability_centrality_dict
def average_neighbor_degree(gnx, f, ft): start = timer.start(ft, 'average_neighbor_degree') average_neighbor_degree_dict = nx.average_neighbor_degree(gnx) timer.stop(ft, start) for k in average_neighbor_degree_dict: f.writelines( str(k) + ',' + str(average_neighbor_degree_dict[k]) + '\n') return average_neighbor_degree_dict
def page_rank(gnx, f, ft): start = timer.start(ft, 'Page Rank') page_rank_values = nx.pagerank(gnx, alpha=0.9) timer.stop(ft, start) for k in page_rank_values.keys(): f.writelines(str(k) + ',' + str(page_rank_values[k]) + '\n') return page_rank_values
def closeness_centrality(f, ft, gnx): start = timer.start(ft, 'Closeness Centrality') result = nx.closeness_centrality(gnx) timer.stop(ft, start) for k in result: f.writelines(str(k) + ',' + str(result[k]) + '\n') return result
def fiedlerVector(gnx, f, ft): start = timer.start(ft, 'fiedler_vector') fiedlerVector = nx.fiedler_vector(gnx) timer.stop(ft, start) fiedlerMap = {} for i in range(len(fiedlerVector)): f.writelines(str(gnx.nodes()[i]) + ',' + str(fiedlerVector[i]) + '\n') fiedlerMap[gnx.nodes()[i]] = fiedlerVector[i] return fiedlerMap
def attractor_basin(gnx, f, ft): if (not gnx.is_directed()): return start = timer.start(ft, 'Attractor Basin') attractor_dict = calc_attractor_basin(gnx) timer.stop(ft, start) for k in attractor_dict.keys(): f.writelines(str(k) + ',' + str(attractor_dict[k]) + '\n') return attractor_dict
def flow_mesure(f, ft, gnx,threshold): start = timer.start(ft, 'Flow Mesure') flow_map = calculate_flow_index(gnx,threshold) timer.stop(ft, start) for n in flow_map: f.writelines(str(n)+','+str(flow_map[n]) + '\n') return flow_map
def hierarchy_energy(gnx, f, ft): start = timer.start(ft, 'hierarchyEnergy') hierarchyEnergy_list, vet_index = calculate_hierarchyEnergy_index(gnx) timer.stop(ft, start) hierarchyEnergy_map = {} #writing the results in to file num = 0 for n in vet_index: f.writelines(str(n) + ',' + str(hierarchyEnergy_list[num][0]) + '\n') hierarchyEnergy_map[n] = hierarchyEnergy_list[num][0] num += 1 return hierarchyEnergy_map
def find_all_motifs(f, ft, ggt, motifs_number): motifs_veriations = get_motif_veriation_list(motifs_number) start = timer.start(ft, 'Find Motifs ' + str(motifs_number) + ' ') result = gt.clustering.motifs(ggt, motif_list=motifs_veriations, k=motifs_number, return_maps=True) timer.stop(ft, start) return parse_motif_result(f, ft, ggt, motifs_number, result, motifs_veriations)
def find_all_circuits(f, ft, ggt): start = timer.start(ft, 'Find Cycles') circuits = graph_tool.topology.all_circuits(ggt) timer.stop(ft, start) for c in circuits: first = True for v in c: if (first): f.writelines('[' + str(ggt.vp.id[v])) first = False else: f.writelines(',' + str(ggt.vp.id[v])) f.writelines(']\n')
def edge_based_degree_undirected(gnx, f, ft): start = timer.start(ft, 'Edge based degree') nodes_dict = ReadFeatureFile.fileToMap_vertices('general.txt') edge_dict = {} for edge in gnx.edges(): f.write(edge[0] + ',' + edge[1] + ' ') edge_dict[edge] = [] sub = float(nodes_dict[edge[0]][0]) - nodes_dict[edge[1]][0] mean = float(nodes_dict[edge[0]][0]) + nodes_dict[edge[1]][0] / 2 f.write(str(sub) + ',') f.write(str(mean) + '\n') edge_dict[edge].append(sub, mean) timer.stop(ft, start) return edge_dict
def general_information_undirected(gnx, f, ft): degrees = [] start = timer.start(ft, 'Genral information') nodes = gnx.nodes() [degrees.append([n, gnx.degree(n)]) for n in nodes] timer.stop(ft, start) [ f.writelines(str(degree[0]) + ',' + str(degree[1]) + '\n') for degree in degrees ] map_degree = {} for degree in degrees: map_degree[degree[0]] = [degree[1]] return map_degree
def bfs_distance_distribution(f, ft, gnx): start = timer.start(ft, 'BFS distance distribution') bfs_dist = calc_bfs_dist(gnx) dist_moments = {} for key in bfs_dist.keys(): lst = [] lst.append( float( np.average(bfs_dist[key], weights=range(1, len(bfs_dist[key]) + 1)))) lst.append(float(np.std(bfs_dist[key]))) dist_moments[key] = lst timer.stop(ft, start) write_bfs_moments_to_file(dist_moments, f) return dist_moments
def general_information_directed(gnx, f, ft): out_deg = [] in_deg = [] start = timer.start(ft, 'Genral information') nodes = gnx.nodes() [out_deg.append([n, gnx.out_degree(n)]) for n in nodes] [in_deg.append([n, gnx.in_degree(n)]) for n in nodes] timer.stop(ft, start) [ f.writelines( str(nodes[i]) + ',' + str(in_deg[i][1]) + ',' + str(out_deg[i][1]) + '\n') for i in range(len(nodes)) ] map_degree = {} for n in range(len(nodes)): map_degree[nodes[n]] = [in_deg[n][1], out_deg[n][1]] return map_degree
def betweenness_centrality(gnx, f, ft, normalized=False): start = timer.start(ft, 'Betweenness Centrality') result = nx.betweenness_centrality(gnx, normalized=False) timer.stop(ft, start) for k in result: f.writelines(str(k) + ',' + str(result[k]) + '\n'); return result # def betweenness_centrality(ggt, f, ft, normalized=False): # b_prop = ggt.new_vertex_property('float') # ggt.vp.bc = b_prop; # # start = timer.start(ft, 'Betweenness Centrality') # graph_tool.centrality.betweenness(ggt, vprop=b_prop, norm=normalized) # timer.stop(ft, start) # # for v in ggt.vertices(): # f.writelines(ggt.vp.id[v] + ',' + str(ggt.vp.bc[v]) + '\n') # # graph_tool.centrality.betweenness(ggt,norm=False) # nx.betweenness_centrality(gnx, normalized=False)
def find_all_motifs(f, ft, gnx, motif_path, motifs_number=3, calculate_edges=True): gnx_copy = gnx.copy() start = timer.start(ft, 'Find Motifs ' + str(motifs_number) + ' ') if motifs_number == 3: motifs_hist = find_motifs_3(gnx_copy, motif_path, calculate_edges) if motifs_number == 4: motifs_hist = find_motifs_4(gnx_copy, motif_path, calculate_edges) timer.stop(ft, start) # print motifs_hist['v'] # print motifs_hist['e'] print('start write to file: ' + str(datetime.now())) motifs_vertices_hist = motifs_hist['v'] for i in motifs_vertices_hist: line = str(i) for h in motifs_vertices_hist[i]: line = line + ',' + str(h) f.writelines(line + '\n') print('finish write to file: ' + str(datetime.now())) if calculate_edges: path = f.name.split('.txt')[0] f_edges = open(path + '_directed_edges.txt', 'wb') motifs_edges_hist = motifs_hist['e'] for e in motifs_edges_hist: line = e[0] + ',' + e[1] + ' ' for h in motifs_edges_hist[e]: line = line + ',' + str(h) f_edges.write(line + '\n') f_edges.close() return motifs_hist