def get_key_info(G, filename="", weight=None): n = G.number_of_nodes() m = G.number_of_edges() k = m / n r = nx.overall_reciprocity(G) cc = nx.average_clustering(G, weight=weight) ce = nx.average_closure(G, weight=weight) eps = nx.average_eight_patterns(G) if filename: with open(filename, 'w') as f: f.write("|V|: %d\n" % n) f.write("|E|: %d\n" % m) f.write("k: %.2f\n" % k) f.write("r: %.3f\n" % r) f.write("clustering: %.3f\n" % cc) f.write("closure: %.3f\n" % ce) f.write("clustering-head: %.3f\n" % eps[4]) f.write("clustering-end: %.3f\n" % eps[5]) f.write("clustering-mid: %.3f\n" % eps[6]) f.write("clustering-cyc: %.3f\n" % eps[7]) f.write("closure-head: %.3f\n" % eps[0]) f.write("closure-end: %.3f\n" % eps[1]) f.write("closure-mid: %.3f\n" % eps[2]) f.write("closure-cyc: %.3f\n" % eps[3]) res = [ n, m, k, r, cc, ce, eps[0], eps[1], eps[2], eps[3], eps[4], eps[5], eps[6], eps[7] ] return res
def reciprocity(g): ''' Calculate the edge reciprocity of the graph. The reciprocity is defined as the number of edges that have a reciprocal edge (an edge between the same nodes but in the opposite direction) divided by the total number of edges. This is also the probability for any given edge, that its reciprocal edge exists. By definition, the reciprocity of undirected graphs is 1. @todo: check whether we can get this for single nodes for all libraries. Parameters ---------- g : :class:`~nngt.Graph` Graph to analyze. References ---------- .. [nx-reciprocity] :nxdoc:`algorithms.reciprocity.overall_reciprocity` ''' if not g.is_directed(): return 1. return nx.overall_reciprocity(g.graph)
def compute_features(self): # graph clique number self.add_feature( "reciprocity", lambda graph: nx.overall_reciprocity(graph), "fraction of edges pointing in both directions to total number of edges", InterpretabilityScore(3), )
def oper_num(num): mp_dic = {} g1 = nx.directed_configuration_model(in_degree_sequence, out_degree_sequence) tmp_dic = nx.triadic_census(g1) recip_ratio = nx.overall_reciprocity(g1) total_nodes = g1.number_of_nodes() total_edges = g1.number_of_edges() double_link = recip_ratio * total_edges sigle_dyad = total_edges - double_link mutual_dyad = double_link / 2 null_dyad = total_nodes * (total_nodes - 1) / 2 - sigle_dyad - mutual_dyad tmp_dic['null_dyad'] = null_dyad tmp_dic['sigle_dyad'] = sigle_dyad tmp_dic['mutual_dyad'] = mutual_dyad return tmp_dic
print("No. Of Nodes of Wikipedia Network Graph:", G2.number_of_nodes()) print("No. Of Edges of Wikipedia Network Graph:", G2.number_of_edges()) # Average Clustering Coefficient average_clustering_coefficient_G1 = nx.average_clustering(G1) average_clustering_coefficient_G2 = nx.average_clustering(G2) print("Average Clustering Coefficient of Facebook Social Graph: ", average_clustering_coefficient_G1) print("Average Clustering Coefficient of Wikipedia Network Graph: ", average_clustering_coefficient_G2) # ## 1.3 Reciprocity # In[53]: print("Reciprocity of Facebook Social Graph is: ", nx.overall_reciprocity(G1)) print("Reciprocity of Wikipedia Vote Network Graph is: ", nx.overall_reciprocity(G2)) # ## 1.4 Transitivity # In[40]: print("Transitivity of Facebook Social Graph is: ", nx.transitivity(G1)) print("Transitivity of Wikipedia Vote Network Graph is: ", nx.transitivity(G2)) # # Problem Statement 2: Giant component variation # In[41]: giant_facebook = len(max(nx.connected_components(G1), key=len))
# create a copy (otherwise G cannot be modified) SG = nx.DiGraph(tmp) # iterate over members for n in community: # add node if not present in the original graph # this could happen if in that day the player was offline if n not in SG.nodes: SG.add_node(n) # compute measures # if one of the measures fail, put it at 0 density = 0 try: density = nx.density(SG) except: pass reciprocity = 0 try: reciprocity = nx.overall_reciprocity(SG) except: pass res.loc[len(res)] = [alliance, density, reciprocity] if density == 0: # count +1 of community with 0 density counter_density += 1 zeros.loc[len(zeros)] = [time + 1, counter_density / counter_community] res.to_csv(save_path + "/{}_community_density_reciprocity".format(edge_type) + str(time + 1) + ".csv", index=False) zeros.to_csv(save_path + "/{}_zeroes_percentage.csv".format(edge_type), index=False)
def main(): os.system('clear') print "################################################################################" print " " print " Cálculo da reciprocity em cada Layer " print " " print "#################################################################################" print i = 0 if os.path.exists(output_dir_json + "reciprocity.json"): print("Arquivo de destino já existe!" + str(output_dir_json + "reciprocity.json")) else: create_dirs(output_dir_txt, output_dir_json) # Cria diretótio para salvar arquivos. dataset_json = {} # Salvar Arquivos no Formato Json with open(output_dir_txt + "reciprocity.txt", 'w') as out_file: for ego, v in dictionary.iteritems(): i += 1 nets = [ "n1", "n2", "n3", "n4", "n9" ] #[amigos,seguidores,retweets,likes,menções] # Camadas de interações no Twitter dataset = {} for net in nets: if net == "n1": layer = "a" elif net == "n9": layer = "s" elif net == "n2": layer = "r" elif net == "n3": layer = "l" elif net == "n4": layer = "m" else: print("Rede inválida") sys.exit() edge_list = "/home/amaury/graphs_hashmap/" + str( net) + "/graphs_with_ego/" # Diretório da camada i if not os.path.isdir( edge_list): # Verifica se diretório existe print( "Impossível localizar diretório com lista de arestas: " + str(edge_list)) else: source = str(edge_list) + str(ego) + ".edge_list" G = nx.read_weighted_edgelist( str(source), create_using=nx.DiGraph( )) # Carrega o grafo da camada i - Direcionado result = nx.overall_reciprocity( G) # Calcula reciprocity em cada layer dataset[layer] = result dataset_json[ego] = dataset print i, dataset_json[ego] save_file(ego, dataset, out_file) # Salvar arquivo texto print save_json(dataset_json) # Salvar arquivo no formato JSON print( "\n######################################################################\n" ) print("Script finalizado!") print( "\n######################################################################\n" )
for row in katzCentralities: sum+= katzCentralities[row] print('Average Katz Centrality: ' + str(sum/count)) Cluster = nx.clustering(graph,None,None) sum=0 for row in Cluster: sum+=Cluster[row] print('Average Clustering: ' + str(sum/count)) Transitivity = nx.transitivity(graph) print('Average Transitiviy: ' + str(Transitivity)) Reciprocity = nx.overall_reciprocity(graph) print('Reciprocity of the Directed graph: ' + str(Reciprocity)) #### PLOT for N_G and N RATIO n = graph.number_of_nodes() x = [] y = [] k = 0 while(k <= 5): x.append(k) p = k / n g_random = nx.gnp_random_graph(n, p) random_n = g_random.number_of_nodes() random_n_g = 0 for component in nx.connected_components(g_random):
def test_overall_reciprocity_empty_graph(self): with pytest.raises(nx.NetworkXError): DG = nx.DiGraph() nx.overall_reciprocity(DG)
def inspect_graph(dataset): g = nx.read_edgelist(dataset, create_using=nx.Graph(), nodetype=int) directed_g = nx.read_edgelist(dataset, create_using=nx.DiGraph(), nodetype=int) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('GRAPH INFORMATION for \n' + dataset) print(nx.info(g)) n = g.number_of_nodes() # print(g.edges()) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('DEGREE CENTRALITY\n') print(nx.degree_centrality(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('EIGEN VECTOR CENTRALITY\n') print(nx.eigenvector_centrality(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('KATZ CENTRALITY\n') print(nx.katz_centrality(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('PAGERANK\n') print(nx.pagerank(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('BETWEENNESS CENTRALITY\n') print(nx.betweenness_centrality(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('CLOSENESS CENTRALITY\n') print(nx.closeness_centrality(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('LOCAL CLUSTERING COEFFICIENT\n') print(nx.clustering(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('AVERAGE CLUSTERING COEFFICIENT\n') print(nx.average_clustering(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('GLOBAL CLUSTERING COEFFICIENT\n') # triangles = sum(nx.triangles(g).values()) traingles_set = {'210', '300', '120C', '030C', '120U', '030T', '120D'} open_traids_set = {'111D', '201', '021D', '111U', '021U', '021C'} traids_dict = nx.triadic_census(directed_g) open_traids = 0 triangles = 0 for key in traids_dict: if key in open_traids_set: open_traids += traids_dict[key] if key in traingles_set: triangles += traids_dict[key] GCC = (3 * triangles) / ((3 * triangles) + open_traids) print(GCC) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('RECIPROCITY\n') print(nx.overall_reciprocity(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('TRANSITIVITY\n') print(nx.transitivity(g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('GIANT COMPONENT\n') n_g = 0 for component in nx.connected_components(g): n_g = max(n_g, len(component)) print('Size of the giant component - ' + str(n_g)) print( '\n----------------------------------------------------------------------------------------------------------------------\n' ) print('PLOT\n') x = [] y = [] k = 0 while (k <= 5): x.append(k) p = k / n g_random = nx.gnp_random_graph(n, p) random_n = g_random.number_of_nodes() random_n_g = 0 for component in nx.connected_components(g_random): random_n_g = max(random_n_g, len(component)) y.append(random_n_g / random_n) k += 0.1 # print(x) # print(y) plot(x, y, "Average Degree", "N_G/N ratio") print( '\n----------------------------------------------------------------------------------------------------------------------\n' )
'210': [], '300': [], 'null_dyad': [], 'sigle_dyad': [], 'mutual_dyad': [] } for tag in tags: (g1, nodes_has_info) = get_guarantee_network(tag) ###### generate 10000 directed configuration model tmp_dic = nx.triadic_census(g1) for key, value in tmp_dic.items(): glb_dic[key].append(value) recip_ratio = nx.overall_reciprocity(g1) total_nodes = g1.number_of_nodes() total_edges = g1.number_of_edges() double_link = recip_ratio * total_edges sigle_dyad = total_edges - double_link glb_dic['sigle_dyad'].append(sigle_dyad) # sigle_dyad_list.append(sigle_dyad) mutual_dyad = double_link / 2 glb_dic['mutual_dyad'].append(mutual_dyad) # mutual_dyad_list.append (mutual_dyad) null_dyad = total_nodes * (total_nodes - 1) / 2 - sigle_dyad - mutual_dyad # null_dyad_list.append (null_dyad) glb_dic['null_dyad'].append(null_dyad) # fw=open('2_3_nodes_motif_occurance.csv','a') df = pd.DataFrame.from_dict(glb_dic)
def analysis_lsde(file_path,address,flag): #read gpickle G = nx.DiGraph(directed=True) G = nx.read_gpickle(file_path) important_nodes=[address]# import important nodes ##########################for nodes(addresses) take_value_nodes=fl.fun_nodes(G) #nodes_info,clu_coe_filter,avg_coe,cen_in_filter,cen_out_filter,degree_sort nodes_info=take_value_nodes[0] #print(take_value_nodes[1]) ########################for edges(transaction records) take_value_edges=fl.fun_edges(G,important_nodes) #edgesinfo,(important nodes info, important nodes first time&last time&time difference),t-value(total)dataframe,polynomial parameters edges_info=take_value_edges[0] #print(take_value_edges[1][1]) #print(take_value_edges[1][0]) #print(edges_info) ########################for graph characteristics graph_charac=fl.graph_characteristic(G,nodes_info,edges_info) #addr_total_number,trans_total_number,value_max,value_min,value_average,value_median,value_onefourth,value_threefourth ########################for specific nodes specific_node_charac = fl.analy_specific_node(important_nodes[0],nodes_info,edges_info) #somenode,somenode_as_from,somenode_as_to,connected addr info(as from),connected addr info2(as to) ########################for subgraph #G2=fl.subgraph_total(G)#subgraph ########################summary&analyze feature_dic = {} addr_total_number=graph_charac[0] trans_total_number=graph_charac[1] #show in dic feature_dic['Total addresses'] = addr_total_number feature_dic['Total Transaction'] = trans_total_number feature_dic['Normality check(p-value)'] = stats.shapiro(edges_info.Value)[1]#if p > 0.001, transaction value meet the normality requirements feature_dic['Significance test(p-value)'] = stats.pearsonr(edges_info.Time,edges_info.Value)[1] #if p>0.01, time and value has the high significance relationship, i.e. time and value have high relativity print('24/35 successful!') feature_dic['Percentage of cluster coefficient>0 addresses'] = 100*(take_value_nodes[1].shape[0])/addr_total_number feature_dic['Average cluster coefficient of the whole graph'] = take_value_nodes[2] feature_dic['Percentage of in centrality>0.01 addresses']= 100*(take_value_nodes[3].shape[0])/addr_total_number feature_dic['Percentage of out centrality>0.01 addresses']= 100*(take_value_nodes[4].shape[0])/addr_total_number feature_dic['Max value']=graph_charac[2] feature_dic['Min value']=graph_charac[3] feature_dic['Average value']=graph_charac[4] feature_dic['Median value']=graph_charac[5] feature_dic['One fourth value']=graph_charac[6] feature_dic['Three fourth value']=graph_charac[7] feature_dic['The max degree of all addresses'] = len(take_value_nodes[5])-1 # feature_dic['Polynomial parameters']=take_value_edges[3] ttt1 = len(take_value_edges[1][0][0]) feature_dic['Important nodes information'] = ttt1 feature_dic['Important nodes start-time']=take_value_edges[1][1][0][0] feature_dic['Important nodes end-time']=take_value_edges[1][1][0][1] feature_dic['Important nodes time difference']=take_value_edges[1][1][0][2] ttt2=len(specific_node_charac[1]) ttt3=len(specific_node_charac[2]) if ttt1 ==0: feature_dic['This node as from']=0 feature_dic['This node as to']=0 else: feature_dic['This node as from']=ttt2/ttt1 feature_dic['This node as to']=ttt3/ttt1 if specific_node_charac[4]==0: feature_dic['This node connected with(as from)']=0 else: feature_dic['This node connected with(as from)']=ttt2/specific_node_charac[4] if specific_node_charac[3]==0: feature_dic['This node connected with(as to)']=0 else: feature_dic['This node connected with(as to)']=ttt3/specific_node_charac[3] #print(take_value_edges[1][1][0]) #print(len(take_value_edges[1][0][0])) #print(specific_node_charac[3]) if flag == True: add_feature1=nx.pagerank(G) iadd1=0 for v in add_feature1.values(): if v>0.01: iadd1=iadd1+1 # print(iadd1) add_feature2=nx.transitivity(G)#Compute graph transitivity, the fraction of all possible triangles present in G. #Possible triangles are identified by the number of “triads” (two edges with a shared vertex). #print(add_feature2) print('26/35 successful!') add_feature3=nx.degree_assortativity_coefficient(G)#Assortativity measures the similarity of connections in the graph with respect to the node degree. add_feature4=nx.is_chordal(G.to_undirected())#A graph is chordal if every cycle of length at least 4 has a chord (an edge joining two nodes not adjacent in the cycle). print('28/35 successful!') #print(add_feature4) add_feature5=nx.is_weakly_connected(G) add_feature6=nx.is_strongly_connected(G) print('30/35 successful!') add_feature7=nx.global_efficiency(G.to_undirected())#The efficiency of a pair of nodes in a graph is the multiplicative inverse of the shortest path distance between the nodes. # The average global efficiency of a graph is the average efficiency of all pairs of nodes add_feature8=nx.is_branching(G) print('32/35 successful!') add_feature9=sorted(nx.immediate_dominators(G, address).items()) add_domin=len(add_feature9) add_feature10=nx.is_simple_path(G, address)#A simple path in a graph is a nonempty sequence of nodes in which no node appears more than once in the sequence, # and each adjacent pair of nodes in the sequence is adjacent in the graph. print('34/35 successful!') add_feature11=nx.overall_reciprocity(G)# reciprocity for the whole graph print('35/35 successful!') #print(add_feature11) feature_dic['Percentage of page Rank >0.01 address'] = iadd1/addr_total_number feature_dic['Transitivity']=add_feature2 feature_dic['Assortativity']=add_feature3 feature_dic['Chordal']=add_feature4 feature_dic['Strongly connected']=add_feature5 feature_dic['Weakly connected']=add_feature6 feature_dic['Global efficiency']=add_feature7 feature_dic['Is branching'] = add_feature8 feature_dic['Immediate dominator(numbers)']=add_domin feature_dic['Is simple path']=add_feature10 feature_dic['Reciprocity']=add_feature11 Header=['Total addresses','Total Transaction','Normality check(p-value)','Significance test(p-value)','Percentage of cluster coefficient>0 addresses', 'Average cluster coefficient of the whole graph','Percentage of in centrality>0.01 addresses','Percentage of out centrality>0.01 addresses', 'Max value','Min value','Average value','Median value','One fourth value','Three fourth value','The max degree of all addresses', 'Important nodes information','Important nodes start-time','Important nodes end-time','Important nodes time difference','This node as from','This node as to', 'This node connected with(as from)','This node connected with(as to)','Percentage of page Rank >0.01 address','Transitivity','Assortativity','Chordal','Strongly connected', 'Weakly connected','Global efficiency','Is branching','Immediate dominator(numbers)','Is simple path','Reciprocity'] else: print('Finished') Header=['Total addresses','Total Transaction','Normality check(p-value)','Significance test(p-value)','Percentage of cluster coefficient>0 addresses', 'Average cluster coefficient of the whole graph','Percentage of in centrality>0.01 addresses','Percentage of out centrality>0.01 addresses', 'Max value','Min value','Average value','Median value','One fourth value','Three fourth value','The max degree of all addresses', 'Important nodes information','Important nodes start-time','Important nodes end-time','Important nodes time difference','This node as from','This node as to', 'This node connected with(as from)','This node connected with(as to)'] #print(somenodesinfo_dic) #res=stats.pearsonr(edges_info.Time,edges_info.Value) #print(res) end=time.time() print('running time =',end-start) return feature_dic,Header
plt.show() #%% Estimate edge probabilities at the block level blockProb, logLik = sbm.estimateBlockProb(adj, clusterId, directed=True) print(blockProb) print(logLik) # View estimated edge probabilities as a heat map plt.figure() plt.imshow(blockProb) plt.colorbar() plt.show() #%% Compute reciprocity and transitivity of actual network using NetworkX net = nx.DiGraph(adj) recip = nx.overall_reciprocity(net) print(recip) trans = nx.transitivity(net) print(trans) #%% Simulate new networks from SBM fit to check model goodness of fit nRuns = 50 blockProbSim = np.zeros((nClusters, nClusters, nRuns)) recipSim = np.zeros(nRuns) transSim = np.zeros(nRuns) for run in range(nRuns): # Simulate new adjacency matrix and create NetworkX object for it adjSim = sbm.generateAdj(clusterId, blockProb, directed=True) netSim = nx.DiGraph(adjSim) blockProbSim[:, :, run] = sbm.estimateBlockProb(adjSim, clusterId,
def get_feature_vector(self, g, memberships, force_connected): nr_singletons = nx.algorithms.isolate.number_of_isolates(g) nr_nodes = len(g.nodes) nr_edges = len(g.edges) nr_communities = np.amax(memberships) degree_assortativity_coefficient = nx.algorithms.assortativity.degree_assortativity_coefficient(g) #weight="weight" estrada_index = nx.algorithms.centrality.estrada_index(g) transistivity = nx.algorithms.cluster.transitivity(g) average_clustering_coefficient = nx.algorithms.cluster.average_clustering(g) #weight="weight" #average_node_connectivity = nx.algorithms.connectivity.connectivity.average_node_connectivity(g) # Time consuming #local_efficiency = nx.algorithms.efficiency_measures.local_efficiency(g) # <- which for edge reduction global_efficiency = nx.algorithms.efficiency_measures.global_efficiency(g) # <- which for edge reduction overall_reciprocity = nx.overall_reciprocity(g) s_metric = nx.algorithms.smetric.s_metric(g, normalized=False) if force_connected: if len(list(nx.connected_components(g))) > 1: raise ValueError("The provided Graph is not connected. Call force_connected() or provide a connected Grph.") average_shortest_path_length = nx.algorithms.shortest_paths.generic.average_shortest_path_length(g) #weight="weight" # Problems if not connected diameter = nx.algorithms.distance_measures.diameter(g) # Problems if not connected radius = nx.algorithms.distance_measures.radius(g) # Problems if not connected #sw_sigma = nx.algorithms.smallworld.sigma(g, seed=0, niter=100, nrand=10) # Problems if not connected #(default=100)niter=number of rewiring per edge, (default=10)nrand=number of random graphs # Time consuming #sw_omega= nx.algorithms.smallworld.omega(g, seed=0, niter=100, nrand=10) # Problems if not connected #(default=100)niter=number of rewiring per edge, (default=10)nrand=number of random graphs # Time consuming wiener_index = nx.algorithms.wiener.wiener_index(g) # Problems if not connected #weight="weight" feature_vector = [ nr_singletons, nr_nodes, nr_edges, nr_communities, degree_assortativity_coefficient, estrada_index, transistivity, average_clustering_coefficient, #average_node_connectivity, #local_efficiency, global_efficiency, overall_reciprocity, s_metric, average_shortest_path_length, diameter, radius, #sw_sigma, #sw_omega, wiener_index ] self.feature_vector_connected = feature_vector else: feature_vector = [ nr_singletons, nr_nodes, nr_edges, nr_communities, degree_assortativity_coefficient, estrada_index, transistivity, average_clustering_coefficient, #average_node_connectivity, #local_efficiency, global_efficiency, overall_reciprocity, s_metric ] self.feature_vector = feature_vector #node_connectivity = nxapprox.connectivity.node_connectivity(g) #useless? #edge_connectivity = nx.algorithms.connectivity.connectivity.edge_connectivity(g) #useless? return feature_vector
import networkx as nx import sys file = sys.argv[1] G = nx.read_graphml(file) print("Density " + str(nx.density(G))) print("Reciprocity " + str(nx.overall_reciprocity(G))) deg = G.out_degree() res = 0 for n in G.nodes: res += deg[n] print("Mean number of neighbor" + str(res / len(G.nodes)))
def test_overall_reciprocity_empty_graph(self): DG = nx.DiGraph() nx.overall_reciprocity(DG)
f.write("{:>50}".format(g_dir.number_of_edges()) + " \n") f.write("{:>50}".format("Density:") + " ") density = g_dir.number_of_edges() / (g_dir.number_of_nodes() * (g_dir.number_of_nodes() - 1)) f.write("{:50.3f}".format(density) + " \n") f.write("{:>50}".format("Average in-degree:") + " ") f.write("{:50.3f}".format( sum([d for (n, d) in g_dir.in_degree()]) / float(g_dir.number_of_nodes())) + " \n") f.write("{:>50}".format("Average out-degree:") + " ") f.write("{:50.3f}".format( sum([d for (n, d) in g_dir.out_degree()]) / float(g_dir.number_of_nodes())) + " \n") f.write("{:>50}".format("Number of reciprocal edges:") + " ") f.write("{:>50}".format( int(nx.overall_reciprocity(g_dir) * g_dir.number_of_edges())) + " \n") f.write("{:>50}".format("Reciprocity (ratio definition):") + " ") f.write("{:50.3f}".format(nx.overall_reciprocity(g_dir)) + " \n") f.write("{:>50}".format("Reciprocity (statistical definition):") + " ") f.write("{:50.3f}".format((nx.overall_reciprocity(g_dir) - density) / (1 - density)) + " \n") f.write("{:>50}".format("Number of triangles:") + " ") f.write("{:>50}".format( int( np.sum(np.array(list(nx.algorithms.triangles(g_und).values()))) / 3)) + " \n") f.write( "{:>50}".format("Average undirected local clustering coefficient:") + " ") f.write("{:50.3f}".format(nx.average_clustering(g_und)) + " \n") f.write("{:>50}".format("Undirected global clustering coefficient:") + " ")