def test_degree_p4_weighted(self): G = nx.path_graph(4) G[1][2]["weight"] = 4 answer = {1: 2.0, 2: 1.8} nd = nx.average_degree_connectivity(G, weight="weight") assert nd == answer answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G) assert nd == answer D = G.to_directed() answer = {2: 2.0, 4: 1.8} nd = nx.average_degree_connectivity(D, weight="weight") assert nd == answer answer = {1: 2.0, 2: 1.8} D = G.to_directed() nd = nx.average_degree_connectivity( D, weight="weight", source="in", target="in" ) assert nd == answer D = G.to_directed() nd = nx.average_degree_connectivity( D, source="in", target="out", weight="weight" ) assert nd == answer
def test_degree_p4_weighted(self): G=nx.path_graph(4) G[1][2]['weight']=4 answer={1:2.0,2:1.8} nd = nx.average_degree_connectivity(G,weight='weight') assert_equal(nd,answer) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.8} nd = nx.average_degree_connectivity(D,weight='weight') assert_equal(nd,answer) answer={1:2.0,2:1.8} D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='weight', source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D,source='in',target='out', weight='weight') assert_equal(nd,answer)
def test_weight_keyword(self): G=nx.path_graph(4) G[1][2]['other']=4 answer={1:2.0,2:1.8} nd = nx.average_degree_connectivity(G,weight='other') assert_equal(nd,answer) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G,weight=None) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.8} nd = nx.average_degree_connectivity(D,weight='other') assert_equal(nd,answer) answer={1:2.0,2:1.8} D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='other', source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='other',source='in', target='in') assert_equal(nd,answer)
def test_in_out_weight(self): G = nx.DiGraph() G.add_edge(1, 2, weight=1) G.add_edge(1, 3, weight=1) G.add_edge(3, 1, weight=1) for s, t in permutations(["in", "out", "in+out"], 2): c = nx.average_degree_connectivity(G, source=s, target=t) cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight") assert c == cw
def test_degree_barrat(self): G = nx.star_graph(5) G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) G[0][5]["weight"] = 5 nd = nx.average_degree_connectivity(G)[5] assert nd == 1.8 nd = nx.average_degree_connectivity(G, weight="weight")[5] assert nd == pytest.approx(3.222222, abs=1e-5) nd = nx.k_nearest_neighbors(G, weight="weight")[5] assert nd == pytest.approx(3.222222, abs=1e-5)
def test_degree_barrat(self): G=nx.star_graph(5) G.add_edges_from([(5,6),(5,7),(5,8),(5,9)]) G[0][5]['weight']=5 nd = nx.average_degree_connectivity(G)[5] assert_equal(nd,1.8) nd = nx.average_degree_connectivity(G,weight='weight')[5] assert_almost_equal(nd,3.222222,places=5) nd = nx.k_nearest_neighbors(G,weight='weight')[5] assert_almost_equal(nd,3.222222,places=5)
def test_in_out_weight(self): G = nx.DiGraph() G.add_edge(1, 2, weight=1) G.add_edge(1, 3, weight=1) G.add_edge(3, 1, weight=1) for s, t in permutations(['in', 'out', 'in+out'], 2): c = nx.average_degree_connectivity(G, source=s, target=t) cw = nx.average_degree_connectivity(G, source=s, target=t, weight='weight') assert_equal(c, cw)
def all_or_top(graph, n=0): if n == 0: description_for_every_node_in_graph(graph) print("------------------------------------") d = nx.average_degree_connectivity(graph) od = collections.OrderedDict(sorted(d.items())) for key, value in od.items(): print("Degree: " + str(key) + "\t Average degree connectivity: " + str(value)) print("------------------------------------") else: bs.print_top_n_by_metric(nx.average_neighbor_degree(graph), "Average neighbor degree", n) bs.print_top_n_by_metric(nx.average_degree_connectivity(graph), "Average degree Connectivity", n)
def gen_graph_features(G): #GRAPH STATISTICS node_connectivity_deg = nx.average_degree_connectivity(G) graph_connectivity_deg = np.mean(list(node_connectivity_deg)) graph_density = nx.density(G) graph_node_count = len(G.node) graph_edge_count = G.number_of_edges() print("\n\t graph density = " + str(graph_density), "\n\t graph connectivity degree = " + str(graph_connectivity_deg), "\n\t node count = " + str(graph_node_count), "\n\t edge count = " + str(graph_edge_count)) #COMPONENT STATISTICS #find size (number of nodes) of greatest connected component Gc = max(nx.connected_component_subgraphs(G), key=len) max_cc_size = len(Gc) percent_GC = max_cc_size/graph_node_count #create a list of connected components sorted by size cc_list = sorted(nx.connected_component_subgraphs(G), key=len, reverse = True) #find the major connected components of the map major_cc_list = [] LCCs_node_count = 0 for cc in cc_list: if len(cc) >= max_cc_size/2: major_cc_list.append(cc) LCCs_node_count += len(cc) percent_LCCs = LCCs_node_count/graph_node_count #large connected components (LCCs) statistics counter = 0 major_cc = Gc counter += 1 #compute major component stats node_connectivity_deg = nx.average_degree_connectivity(major_cc) major_cc_connectivity_deg = np.mean(list(node_connectivity_deg)) major_cc_density = nx.density(major_cc) LCC_node_count = len(major_cc) LCC_edge_count = major_cc.number_of_edges() avg_node_deg = LCC_edge_count/LCC_node_count normalized_size = LCC_node_count/max_cc_size return [len(G.node),nx.density(major_cc),percent_GC,major_cc_connectivity_deg]
def knn_pack(graph, *kwargs): t = dict() for k in kwargs: t.__setitem__(k, kwargs[k]) t.__setitem__('asr', nx.degree_assortativity_coefficient(graph)) t.__setitem__('weighted_asr', nx.degree_assortativity_coefficient(graph, weight = 'weight')) if graph.is_directed(): t.__setitem__('knn', nx.average_degree_connectivity(graph, source = 'out', target = 'in')) if len(nx.get_edge_attributes(graph, 'weight')): t.__setitem__('weighted_knn', nx.average_degree_connectivity(graph, source = 'out', target = 'in', weight = 'weight')) else: t.__setitem__('knn', nx.average_degree_connectivity(graph)) if len(nx.get_edge_attributes(graph, 'weight')): t.__setitem__('weighted_knn', nx.average_degree_connectivity(graph, weight = 'weight')) return(t)
def summary_statistics(self) -> pd.DataFrame: """function to return the summary statistics :return pandas dataframe""" if self.ppi_file is not None: #ppi file provided #self.read_ppis() #self.create_node_dict() self.compile_files("new_node_list.tsv", "new_edge_list.tsv") #building new files self.import_graph("new_edge_list.tsv") else: #edge and node file provided self.import_graph(self.edge_list) network_dict = { 'number_of_nodes': nx.number_of_nodes(self.graph), 'number_of_edges': nx.number_of_edges(self.graph), 'density': nx.density(self.graph), 'average_degree_connectivity': np.mean( np.array( list(nx.average_degree_connectivity(self.graph).values()))) } network_df = pd.DataFrame(network_dict.values(), index=network_dict.keys(), columns=['Values']) return network_df
def test_single_node(self): # TODO Is this really the intended behavior for providing a # single node as the argument `nodes`? Shouldn't the function # just return the connectivity value itself? G = nx.trivial_graph() conn = nx.average_degree_connectivity(G, nodes=0) assert conn == {0: 0}
def analyse_graph(G): print(nx.info(G)) n_components = nx.number_connected_components(G) print("Number of connected components:", n_components) if n_components > 1: component_sizes = [ len(c) for c in sorted(nx.connected_components(G), key=len, reverse=True) ] print("Connected component sizes:", component_sizes) lcc_percent = 100 * component_sizes[0] / G.number_of_nodes() print(f"LCC: {lcc_percent}%") avg_c = nx.average_clustering(G) print("Average clustering coefficient:", avg_c) degree_assortativity = nx.degree_pearson_correlation_coefficient(G) print("Degree assortativity:", degree_assortativity) if nx.is_connected(G): avg_d = nx.average_shortest_path_length(G) print("Average distance:", avg_d) else: avg_distances = [ nx.average_shortest_path_length(C) for C in (G.subgraph(c).copy() for c in nx.connected_components(G)) ] print("Average distances:", avg_distances) avg_connectivity = nx.average_degree_connectivity(G) print("Average degree connectivity:", avg_connectivity)
def draw_graph(nodes, edges, graphs_dir, default_lang='all'): lang_graph = nx.MultiDiGraph() lang_graph.add_nodes_from(nodes) for edge in edges: if edges[edge] == 0: lang_graph.add_edge(edge[0], edge[1]) else: lang_graph.add_edge(edge[0], edge[1], weight=float(edges[edge]), label=str(edges[edge])) # print graph info in stdout # degree centrality print('-----------------\n\n') print(default_lang) print(nx.info(lang_graph)) try: # When ties are associated to some positive aspects such as friendship or collaboration, # indegree is often interpreted as a form of popularity, and outdegree as gregariousness. DC = nx.degree_centrality(lang_graph) max_dc = max(DC.values()) max_dc_list = [item for item in DC.items() if item[1] == max_dc] except ZeroDivisionError: max_dc_list = [] # https://ru.wikipedia.org/wiki/%D0%9A%D0%BE%D0%BC%D0%BF%D0%BB%D0%B5%D0%BA%D1%81%D0%BD%D1%8B%D0%B5_%D1%81%D0%B5%D1%82%D0%B8 print('maxdc', str(max_dc_list), sep=': ') # assortativity coef AC = nx.degree_assortativity_coefficient(lang_graph) print('AC', str(AC), sep=': ') # connectivity print("Слабо-связный граф: ", nx.is_weakly_connected(lang_graph)) print("количество слабосвязанных компонент: ", nx.number_weakly_connected_components(lang_graph)) print("Сильно-связный граф: ", nx.is_strongly_connected(lang_graph)) print("количество сильносвязанных компонент: ", nx.number_strongly_connected_components(lang_graph)) print("рекурсивные? компоненты: ", nx.number_attracting_components(lang_graph)) print("число вершинной связности: ", nx.node_connectivity(lang_graph)) print("число рёберной связности: ", nx.edge_connectivity(lang_graph)) # other info print("average degree connectivity: ", nx.average_degree_connectivity(lang_graph)) print("average neighbor degree: ", sorted(nx.average_neighbor_degree(lang_graph).items(), key=itemgetter(1), reverse=True)) # best for small graphs, and our graphs are pretty small print("pagerank: ", sorted(nx.pagerank_numpy(lang_graph).items(), key=itemgetter(1), reverse=True)) plt.figure(figsize=(16.0, 9.0), dpi=80) plt.axis('off') pos = graphviz_layout(lang_graph) nx.draw_networkx_edges(lang_graph, pos, alpha=0.5, arrows=True) nx.draw_networkx(lang_graph, pos, node_size=1000, font_size=12, with_labels=True, node_color='green') nx.draw_networkx_edge_labels(lang_graph, pos, edges) # saving file to draw it with dot-graphviz # changing overall graph view, default is top-bottom lang_graph.graph['graph'] = {'rankdir': 'LR'} # marking with blue nodes with maximum degree centrality for max_dc_node in max_dc_list: lang_graph.node[max_dc_node[0]]['fontcolor'] = 'blue' write_dot(lang_graph, os.path.join(graphs_dir, default_lang + '_links.dot')) # plt.show() plt.savefig(os.path.join(graphs_dir, 'python_' + default_lang + '_graph.png'), dpi=100) plt.close()
def summary_statistics(self) -> None: """Generates summary statistics for a network.""" molecule_counter = {PROTEIN: 0, RNA: 0, DNA: 0} for metadata in self.nodes.values(): molecule_counter[metadata[MOLECULE]] += 1 sum_stats = { 'Nodes': sum(molecule_counter.values()), 'Protein Nodes': molecule_counter[PROTEIN], 'RNA Nodes': molecule_counter[RNA], 'DNA Nodes': molecule_counter[DNA], 'Graph Density': nx.density(self.graph), 'Average Degree Connectivity': str(nx.average_degree_connectivity(self.graph)) } sum_stats.update( Counter([ attr['rel_type'] for _, _, attr in self.graph.edges(data=True) ])) self.sum_stats = pd.DataFrame({ 'Stat': list(sum_stats.keys()), 'Value': list(sum_stats.values()) })
def computeKnn(graph, knn_file, weight=None): G = nx.path_graph(4) G.edge[1][2]['weight'] = 3 print nx.k_nearest_neighbors(G) knnfs = codecs.open(knn_file, 'w+', encoding='utf-8') knn = nx.average_degree_connectivity(graph) print graph, 'knn as follows:' print knn sumknn = sum(knn.values()) minknn = min(knn.keys()) maxknn = max(knn.keys()) index = maxknn currentSum = 0.0 while index >= minknn: if index in knn.keys(): currentSum = knn[index] else: index -= 1 continue freq = currentSum * 1.0 / sumknn knnfs.write(str(index) + ',' + str(freq) + '\r\n') print index, freq index -= 1 #for (key, value) in knn.items(): # knnfs.write(str(key)+ ',' + str(value) + '\r\n') knnfs.flush() knnfs.close()
def avg_degree(G): ''' Compute the average degree connectivity of graph. :param G: a networkx graph :return: avg degree ''' return nx.average_degree_connectivity(G)
def test_single_node(self): # TODO Is this really the intended behavior for providing a # single node as the argument `nodes`? Shouldn't the function # just return the connectivity value itself? G = nx.trivial_graph() conn = nx.average_degree_connectivity(G, nodes=0) assert_equal(conn, {0: 0})
def analyze_net(G): print("Analysis:") avg_k = nx.average_degree_connectivity(G) print("Average degree:", avg_k) avg_c = nx.average_clustering(G) print("Average clustering coefficient:", avg_c) avg_d = nx.average_shortest_path_length(G) print("Average distance:", avg_d)
def getresults(graph): print("Analisando grafo...") print("Média do grau dos nodos: " + str(nx.average_degree_connectivity(graph))) print("Coeficiente de clusterização: " + str(nx.average_clustering(graph))) for g in nx.connected_component_subgraphs(graph): print("Distância média dos nós: " + str(nx.average_shortest_path_length(g))) print("Betweenness das arestas: " + str(nx.edge_betweenness(graph))) print("Betweenness dos nodos: " + str(nx.betweenness_centrality(graph)))
def plotDegreeCorrelationFunction(G): degreeCorrelation = nx.average_degree_connectivity(G) dc_sorted = dict(sorted(degreeCorrelation.items())) plt.title('Degree Correlation Function') plt.ylabel('Knn(K)') plt.xlabel('K') plt.loglog(list(dc_sorted.keys()), list(dc_sorted.values()), '.') plt.show()
def descriptives(G,grouping = None): degree = nx.degree_histogram(G) plt.bar(x = range(len(degree)), height = degree) plt.savefig('images/degree_hist.png') plt.close() neighbor_degree = nx.average_neighbor_degree(G) dict_to_hist(neighbor_degree,'neighbor_degree') degree_conn = nx.average_degree_connectivity(G) dict_to_hist(degree_conn,'degree_conn')
def grow_graphs_using_krongen(graph, gn, recurrence_nbr=1, graph_vis_bool=False, nbr_runs = 1): """ grow graph using krongen given orig graph, gname, and # of recurrences Returns ------- nth graph --<kpgm>-- """ import math from pami import kronfit from os import environ import subprocess tsvGraphName = "/tmp/{}kpgraph.tsv".format(gn) # tmpGraphName = "/tmp/{}kpgraph.tmp".format(gn) # if environ['HOME'] == '/home/saguinag': # args = ("time/bin/linux/krongen", "-i:{}".format(tsvGraphName),"-n0:2", "-m:\"0.9 0.6; 0.6 0.1\"", "-gi:5") # elif environ['HOME'] == '/Users/saguinag': # args = ("time/bin/mac/krongen", "-i:{}".format(tsvGraphName),"-n0:2", "-m:\"0.9 0.6; 0.6 0.1\"", "-gi:5") # else: # args = ('./kronfit.exe -i:tmp.txt -n0:2 -m:"0.9 0.6; 0.6 0.1" -gi:5') kp_graphs = [] k = int(math.log(graph.number_of_nodes(),2))+1 # Nbr of Iterations if 0: print 'k:',k,'n',graph.number_of_nodes() print " --- Model inference, kronfit learn a Kronecker seed matrix" P = kronfit(graph) #[[0.9999,0.661],[0.661, 0.01491]] M = '-m:"{} {}; {} {}"'.format(P[0][0], P[0][1], P[1][0], P[1][1]) if environ['HOME'] == '/home/saguinag': args = ("time/bin/linux/krongen", "-o:"+tsvGraphName, M, "-i:{}".format(k)) elif environ['HOME'] == '/Users/saguinag': print tsvGraphName args = ("bin/macos/krongen", "-o:"+tsvGraphName, M, "-i:{}".format(k)) else: args = ('./krongen.exe -o:{} '.format(tsvGraphName) +M +'-i:{}'.format(k+1)) for i in range(nbr_runs): popen = subprocess.Popen(args, stdout=subprocess.PIPE) popen.wait() #output = popen.stdout.read() if os.path.exists(tsvGraphName): KPG = nx.read_edgelist(tsvGraphName, nodetype=int) else: print "!! Error, file is missing" for u,v in KPG.selfloop_edges(): KPG.remove_edge(u, v) kp_graphs.append( KPG ) if DBG: print 'Avg Deg:', nx.average_degree_connectivity(graph) import phoenix.visadjmatrix as vis # vis.draw_sns_adjacency_matrix(graph) vis.draw_sns_graph(graph) return kp_graphs # returns a list of kp graphs
def runAnalytics(G): # GRAPH STATISTICS node_connectivity_deg = nx.average_degree_connectivity(G) graph_connectivity_deg = np.mean(list(node_connectivity_deg)) graph_density = nx.density(G) graph_node_count = len(G.node) graph_edge_count = G.number_of_edges() print(''' density: {} connectivity degree: {} node count: {} edge count: {} --------------------------- '''.format(graph_density, graph_connectivity_deg, graph_node_count, graph_edge_count)) # #COMPONENT STATISTICS # #find size (number of nodes) of greatest connected component # Gc = max(nx.connected_component_subgraphs(G), key=len) # max_cc_size = len(Gc) # percent_GC = max_cc_size/graph_node_count # #create a list of connected components sorted by size # cc_list = sorted(nx.connected_component_subgraphs(G), key=len, reverse = True) # #find the major connected components of the map # major_cc_list = [] # LCCs_node_count = 0 # for cc in cc_list: # if len(cc) >= max_cc_size/2: # major_cc_list.append(cc) # LCCs_node_count += len(cc) # percent_LCCs = LCCs_node_count/graph_node_count # #large connected components (LCCs) statistics # counter = 0 # major_cc = Gc # counter += 1 # #compute major component stats # node_connectivity_deg = nx.average_degree_connectivity(major_cc) # major_cc_connectivity_deg = np.mean(list(node_connectivity_deg)) # major_cc_density = nx.density(major_cc) # LCC_node_count = len(major_cc) # LCC_edge_count = major_cc.number_of_edges() # avg_node_deg = LCC_edge_count/LCC_node_count # normalized_size = LCC_node_count/max_cc_size # print([len(G.node),nx.density(major_cc),percent_GC,major_cc_connectivity_deg]) # plt.figure() # nx.draw(G, node_size=5) return G
def test_degree_p4(self): G=nx.path_graph(4) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.5} nd = nx.average_degree_connectivity(D) assert_equal(nd,answer) answer={1:2.0,2:1.5} D=G.to_directed() nd = nx.average_degree_connectivity(D, source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D, source='in', target='in') assert_equal(nd,answer)
def test_degree_p4(self): G=nx.path_graph(4) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.5} nd = nx.average_degree_connectivity(D) assert_equal(nd,answer) answer={1:2.0,2:1.5} D=G.to_directed() nd = nx.average_in_degree_connectivity(D) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_out_degree_connectivity(D) assert_equal(nd,answer)
def average_degree_connectivityPlots(G): avg_degree_conn = nx.average_degree_connectivity(G) hist = [i for i in avg_degree_conn.keys()] values = [i for i in avg_degree_conn.values()] plt.figure() plt.xlabel('degree') plt.ylabel('average connectivity') plt.plot(hist,values,'ro') plt.savefig('avg_deg_connectivity') plt.close()
def assortativity_distribution(graph: nx.Graph) -> None: assorts = sorted(nx.average_degree_connectivity(graph).items()) assort_x, assort_y = log_binning(dict(assorts), 40) plt.figure() plt.scatter(assort_x, assort_y, c='r', marker='s', s=25, label='') plt.title('Assortativity') plt.xlabel('k') plt.ylabel('$<k_{nn}>$') plt.savefig(os.path.join(common.FIGURES_FOLDER, 'assortativity.png'))
def test_degree_p4(self): G = nx.path_graph(4) answer = {1: 2.0, 2: 1.5} nd = nx.average_degree_connectivity(G) assert nd == answer D = G.to_directed() answer = {2: 2.0, 4: 1.5} nd = nx.average_degree_connectivity(D) assert nd == answer answer = {1: 2.0, 2: 1.5} D = G.to_directed() nd = nx.average_degree_connectivity(D, source="in", target="in") assert nd == answer D = G.to_directed() nd = nx.average_degree_connectivity(D, source="in", target="in") assert nd == answer
def test_degree_p4_weighted(self): G = nx.path_graph(4) G[1][2]['weight'] = 4 answer = {1: 2.0, 2: 1.8} nd = nx.average_degree_connectivity(G, weighted=True) assert_equal(nd, answer) D = G.to_directed() answer = {2: 2.0, 4: 1.8} nd = nx.average_degree_connectivity(D, weighted=True) assert_equal(nd, answer) answer = {1: 2.0, 2: 1.8} D = G.to_directed() nd = nx.average_in_degree_connectivity(D, weighted=True) assert_equal(nd, answer) D = G.to_directed() nd = nx.average_out_degree_connectivity(D, weighted=True) assert_equal(nd, answer)
def test_degree_p4_weighted(self): G=nx.path_graph(4) G[1][2]['weight']=4 answer={1:2.0,2:1.8} nd = nx.average_degree_connectivity(G,weighted=True) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.8} nd = nx.average_degree_connectivity(D,weighted=True) assert_equal(nd,answer) answer={1:2.0,2:1.8} D=G.to_directed() nd = nx.average_in_degree_connectivity(D,weighted=True) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_out_degree_connectivity(D,weighted=True) assert_equal(nd,answer)
def graphInfo(graph, weighted=False): print("Number of Vertices = ", graph.number_of_nodes()) print("Number of Edges = ", graph.number_of_edges()) print("Number of Connected Components = ", nx.number_connected_components(graph)) if weighted == False: print("Size of Unweighted Graph = ", graph.size(weight=None)) else: print("Size of Weighted Graph = ", graph.size(weight="weight")) averageWeightedDegree = nx.average_degree_connectivity(graph, weight="weight") print("Average Weighted Degree = ", averageWeightedDegree)
def compute_metrics(G): metrics = {} metrics['e_n_r'] = nx.number_of_edges(G) / nx.number_of_nodes( G) #Edge to node ratio metrics['av_clu'] = nx.average_clustering(G) #avg clustering metrics['av_mdc'] = get_weighted_avg(nx.average_degree_connectivity(G)) metrics['av_deg'] = get_avg_degree(G) #avg_degree metrics['tran'] = nx.transitivity(G) # transitivity metrics['den'] = nx.density(G) metrics['c_cen'] = get_avg(nx.closeness_centrality(G)) metrics['b_cen'] = get_avg(nx.betweenness_centrality(G)) return metrics
def computeKnn(graph, knn_file, weight=None): G = nx.path_graph(4) G.edge[1][2]['weight'] = 3 print nx.k_nearest_neighbors(G) knnfs = codecs.open(knn_file, 'w+', encoding='utf-8') knn = nx.average_degree_connectivity(graph) print graph, 'knn as follows:' print knn for (key, value) in knn.items(): knnfs.write(str(key) + ',' + str(value) + '\r\n') knnfs.flush() knnfs.close()
def run(folder, infilename, title): ingraph = read_graphml(folder + os.sep + infilename) try: #avg_cluster_coeff = nx.average_clustering(ingraph) #print('average clustering for ' + title + " = " + str(avg_cluster_coeff)) avg_deg_coeff = nx.average_degree_connectivity(ingraph) print('average degree for ' + title + ' = ' + str(avg_deg_coeff)) except Exception as e: print (e.__str__()) '''
def randomGraph(G): n = G.number_of_nodes() d = nx.average_degree_connectivity(G) c = sum(i[1] for i in d.items()) p = c / (n - 1) try: RG = nx.fast_gnp_random_graph(n, p) plot(RG) print 'Global Clustering: {0}\t'.format(str(p)), l = math.log(RG.number_of_nodes()) / math.log(c) print 'Average path length : {0}\n'.format(str(l)) except: 'Failed attempt to get connected random graph..Try again!!!'
def randomGraph(G): n = G.number_of_nodes() d= nx.average_degree_connectivity(G) c = sum(i[1] for i in d.items()) p = c/(n-1) try: RG = nx.fast_gnp_random_graph(n,p) plot(RG) print 'Global Clustering: {0}\t'.format(str(p)), l = math.log(RG.number_of_nodes())/math.log(c) print 'Average path length : {0}\n'.format(str(l)) except: 'Failed attempt to get connected random graph..Try again!!!'
def smallworld(G): n = G.number_of_nodes() d= nx.average_degree_connectivity(G) c = sum(i[1] for i in d.items()) c0 = 0.75*(c-2)/(c-1) beta = random.uniform(0.01,0.1) try: SG= nx.connected_watts_strogatz_graph(n,int(c),beta) plot(SG) c = ((1-beta)**3)*c0 print 'Global Clustering: {0}\t'.format(str(c)), print 'Average path length : {0}\n'.format(str(nx.average_shortest_path_length(SG))) except: 'Failed attempt to get connected small world graph..Try again!!!'
def loi_puissance(self): distri = nx.degree_histogram(self.graphe) k_obs=nx.average_degree_connectivity(self.graphe) tab_deg=self.graphe.degree() #calcul les desgrés de tous les noeuds list_deg=[] type(tab_deg) for key,value in tab_deg.iteritems(): temp = [key,value] list_deg.append(temp[1]) ########################"" f=0 for k in range(1,len(distri)): f+=(distri[k]-(self.nb* (k**(-GAMMA))))**2 ######################## pk=[x/self.nb for x in distri] gradient, intercept, r_value, p_value, std_err = stats.linregress(distri,pk) #stats.kstest(k_theo,k_obs) return f
def test_zero_deg(self): G=nx.DiGraph() G.add_edge(1,2) G.add_edge(1,3) G.add_edge(1,4) c = nx.average_degree_connectivity(G) assert_equal(c,{1:0,3:1}) c = nx.average_degree_connectivity(G, source='in', target='in') assert_equal(c,{0:0,1:0}) c = nx.average_degree_connectivity(G, source='in', target='out') assert_equal(c,{0:0,1:3}) c = nx.average_degree_connectivity(G, source='in', target='in+out') assert_equal(c,{0:0,1:3}) c = nx.average_degree_connectivity(G, source='out', target='out') assert_equal(c,{0:0,3:0}) c = nx.average_degree_connectivity(G, source='out', target='in') assert_equal(c,{0:0,3:1}) c = nx.average_degree_connectivity(G, source='out', target='in+out') assert_equal(c,{0:0,3:1})
def parametres(G,nG): print "\nParametres disponibles:\n" ################################### print "\tParametres Globals: \n" print "\t[1] Betweenness Centralization\n" print "\t[2] Average path length\n" print "\t[3] Assortativity degree\n" print "\t[4] Diameter\n" print "\t[5] Density\n" print "\t[6] Cohesion\n" print "\t[7] Radius\n" print "\tParametres per cada node de la xarxa: \n" print "\t[11] Betweenness\n" print "\t[12] Pagerank\n" print "\t[13] EigenVectorCentrality\n" print "\t[14] Average degree connectivity\n" print "\t[15] Periphery\n" print "\t[16] Eccentricity\n" print "\t[17] Center nodes\n" se = raw_input('Escriu els numeros dels parametres que vulguis amb una coma entre mig:\n') secom = se.split(',') for i in range(len(secom)): if secom[i] == "1": print "\nBetweenness Centralization:" print betweenness_centralization(G) elif secom[i] == "2": print "\nAverage path length:" print G.average_path_length() elif secom[i] == "3": print "\nAssortativity degree:" print G.assortativity_degree() elif secom[i] == "4": print "\nDiameter:" print G.diameter() elif secom[i] == "5": print "\nDensity:" print G.density() elif secom[i] == "6": print "\nCohesion:" print G.cohesion() elif secom[i] == "7": print("\nRadius:") print nx.radius(nG) ################################### elif secom[i] == "11": print "\nBetweenness:" print G.betweenness(directed=False, cutoff=16) elif secom[i] == "12": print "\nPagerank:" print G.pagerank() elif secom[i] == "13": print "\nEigenVectorCentrality:" print G.eigenvector_centrality() elif secom[i] == "14": print "\nAverage degree connectivity:" print nx.average_degree_connectivity(nG) elif secom[i] == "15": print "\nPeriphery:" print nx.periphery(nG) elif secom[i] == "16": print("\nEccentricity:") print nx.eccentricity(nG) elif secom[i] == "17": print("\nCenter:") print nx.center(nG) #####Parametres exclosos##### #print "Assortativity meu:" # es el mateix que el degree? #print assortativitymeu(G) #print("diameter: %d" % nx.diameter(nG)) #print("density: %s" % nx.density(nG)) #print("richclub coefficient: %s" % nx.rich_club_coefficient(nG.to_undirected())) #print("richclub coefficient: %s" % nx.rich_club_coefficient(nG)) return 2
def average_degree_connectivity(self): return nx.average_degree_connectivity(self.g)
def a_degree_connectivity(G): return np.average(nx.average_degree_connectivity(G).values())
def avg_degree_connectivity(self): return nx.average_degree_connectivity(self._graph)
gr = nx.Graph() #[i for i in itertools.combinations(de, 2) for de in df.topics[:100]] gr.add_edges_from([i for de in df.topics.dropna()[0:200] for i in itertools.combinations(de,2)]) # <codecell> gr2 = nx.Graph() [gr2.add_edge(f[0],t[0]) for f,t in zip(ftdf.fields, ftdf.topics) if f is not NaN and t is not NaN] gr2.size() # <codecell> print('topics network has %s edges and %s nodes'%(gr.number_of_edges(), gr.number_of_nodes())) nx.average_degree_connectivity(gr) #nx.draw_networkx(gr) # <codecell> gr.remove_node('machine learning') # <codecell> deg=nx.degree(gr) # <codecell> deg['support vector machine'] deg_sorted = sorted(deg.iteritems(), key=lambda(k,v):(-v,k)) deg_sorted[:50]
def stats(self): avg_deg_con = nx.average_degree_connectivity(self.graph) print sorted(avg_deg_con)
def getAverageDegreeOfNeighbours(self): averageNeighbourDegrees = nx.average_degree_connectivity(self.amazonGraph) return averageNeighbourDegrees
def parametres(G,nG): print "\nParametres disponibles:\n" ################################### print "\tParametres Globals: \n" print "\t[1] Betweenness Centralization\n" #Aguanta xarxes inconectes # Es la mesura de betweenes centrality feta en tots els nodes de la xarxa. #The network betweenness centralization score is calculated based on the betweenness centrality for each individual in the network. #Betweenness centrality examines the number of times one person lies on the shortest path between two others #A highly centralized network has a great degree of inequality between individual centrality scores, while an uncentralized network has no inequality between individual centrality scores. print "\t[2] Average path length\n" #La longitud de cami mitjana entre dos punts print "\t[3] Assortativity degree\n" #positive values of r indicate a correlation between nodes of similar degree, #while negative values indicate relationships between nodes of different degree. #This coefficient is basically the correlation between the actual connectivity patterns of the vertices and the pattern expected from the #disribution of the vertex types. print "\t[4] Diameter\n" #Diametre de la xarxa print "\t[5] Density\n" #El numero de enllacos respecte al total que hi pot existir a la xarxa #Si el numero s'apropa a 1 sera una xarxa densa, si es un numero molt petit sera una xarxa escasa o sparse graph. print "\t[6] Cohesion\n" #Calcula el numero necesari de vertex per tal de divir una xarxa en dos components. #The vertex connectivity between two given vertices is the number of vertices #that have to be removed in order to disconnect the two vertices into two #separate components. This is also the number of vertex disjoint directed #paths between the vertices (apart from the source and target vertices of #course). The vertex connectivity of the graph is the minimal vertex #connectivity over all vertex pairs. # #This method calculates the vertex connectivity of a given vertex pair if both #the source and target vertices are given. If none of them is given (or they #are both negative), the overall vertex connectivity is returned. print "\t[7] Radius\n" #No funciona amb xarxes inconectes #The radius of a graph is defined as the minimum eccentricity of its vertices #The eccentricity of a vertex is calculated by measuring the shortest distance from (or to) the vertex, to (or from) all other #vertices in the graph, and taking the maximum. print "\tParametres per cada node de la xarxa: \n" print "\t[11] Betweenness\n" #Mostra per cada node el seu valor de betweenness, gran = centric. print "\t[12] Pagerank\n" #Calcula algorisme de google, es una variant del eigenvector centratily, canviar posicio print "\t[13] EigenVectorCentrality\n" #It assigns relative scores to all nodes in the network based on the concept that connections to high-scoring nodes #contribute more to the score of the node in question than equal connections to low-scoring nodes. print "\t[14] Average degree connectivity\n" #The average degree connectivity is the average nearest neighbor degree ofnodes with degree k. print "\t[15] Periphery\n" #No funciona amb xarxes inconectes #The periphery is the set of nodes with eccentricity equal to the diameter. print "\t[16] Eccentricity\n"#No funciona amb xarxes inconectes #The eccentricity of a node v is the maximum distance from v to all other nodes in G. print "\t[17] Center nodes\n"#No funciona amb xarxes inconectes #The center is the set of nodes with eccentricity equal to radius. print "\t[18] Degree Distribution\n" #Grau de distribucio del graf, distribucio de probabilitat de connexions en tota la xarxa #the degree of a node in a network is the number of connections it has to other nodes and the degree distribution is #the probability distribution of these degrees over the whole network. print "\t[19] Count the number of Motif\n" #Troba el numero de motifs, que son agrupacions de nodes, de mida tres o quatre (per defecte 3). #It is argued that the motif profile (ie. the number of different motifs in the graph) #is characteristic for different types of networks and network function is related to the motifs in the graph. # #S. Wernicke and F. Rasche: FANMOD: a tool for fast network motif detection, Bioinformatics 22(9), 1152--1153, 2006. # #Counts the total number of motifs in the graph #Motifs are small subgraphs of a given structure in a graph. #This function counts the total number of motifs in a graph without #assigning isomorphism classes to them. print "\t[20] Similarity Jaccard\n" #The Jaccard similarity coefficient of two vertices is the number of their #common neighbors divided by the number of vertices that are adjacent to #at least one of them. se = raw_input('Escriu els numeros dels parametres que vulguis amb una coma entre mig:\n') secom = se.split(',') for i in range(len(secom)): if secom[i] == "1": print "\nBetweenness Centralization:" p1 = betweenness_centralization(G) print p1 param.append("Betweenness Centralization") param.append(p1) elif secom[i] == "2": print "\nAverage path length:" p2 = G.average_path_length() print p2 param.append("Average path length") param.append(p2) elif secom[i] == "3": print "\nAssortativity degree:" p3 = G.assortativity_degree() print p3 param.append("Assortativity degree") param.append(p3) elif secom[i] == "4": print "\nDiameter:" p4 = G.diameter() print p4 param.append("Diameter") param.append(p4) elif secom[i] == "5": print "\nDensity:" p5 = G.density() print p5 param.append("Density") param.append(p5) elif secom[i] == "6": print "\nCohesion:" p6 = G.cohesion() print p6 param.append("Cohesion") param.append(p6) elif secom[i] == "7": print("\nRadius:") p7 = nx.radius(nG) print p7 param.append("Radius") param.append(p7) ################################### elif secom[i] == "11": print "\nBetweenness:" p11 = G.betweenness(directed=False, cutoff=16) print p11 param.append("Betweenness") param.append(p11) elif secom[i] == "12": print "\nPagerank:" p12 = G.pagerank() print p12 param.append("Pagerank") param.append(p12) elif secom[i] == "13": print "\nEigenVectorCentrality:" p13 = G.eigenvector_centrality() print p13 param.append("EigenVectorCentrality") param.append(p13) elif secom[i] == "14": print "\nAverage degree connectivity:" p14 = nx.average_degree_connectivity(nG) print p14 param.append("Average degree connectivity") param.append(p14) elif secom[i] == "15": print "\nPeriphery:" p15 = nx.periphery(nG) print p15 param.append("Periphery") param.append(p15) elif secom[i] == "16": print("\nEccentricity:") p16 = nx.eccentricity(nG) print p16 param.append("Eccentricity") param.append(p16) elif secom[i] == "17": print("\nCenter:") p17 = nx.center(nG) print p17 param.append("Center") param.append(p17) elif secom[i] == "18": print("\nDegree Distribution:") p18 = G.degree_distribution() print p18 param.append("Degree Distribution") param.append(p18) elif secom[i] == "19": print("\nTotal number of Motif:") p19 = G.motifs_randesu_no() print p19 param.append("Total number of Motif") param.append(p19) elif secom[i] == "20": print("\nSimilarity Jaccard:") p20 = G.similarity_jaccard() print p20 param.append("Similarity Jaccard") param.append(p20) else: print "Paramatre incorrecte" return #####Parametres exclosos##### #print "Assortativity meu:" # es el mateix que el degree? #print assortativitymeu(G) #print("diameter: %d" % nx.diameter(nG)) #print("density: %s" % nx.density(nG)) #print("richclub coefficient: %s" % nx.rich_club_coefficient(nG.to_undirected())) #print("richclub coefficient: %s" % nx.rich_club_coefficient(nG)) return param
def test_invalid_source(self): G = nx.DiGraph() nx.average_degree_connectivity(G, source='bogus')
def get_average_degree_connectivity(self, g): return nx.average_degree_connectivity(g)
def test_invalid_target(self): G = nx.DiGraph() nx.average_degree_connectivity(G, target='bogus')