def graph_1(): G = nx.Graph() G.add_nodes_from([2, 3, 5, 6, 7]) G.add_edges_from([[2, 3], [5, 3], [6, 7], [7, 2], [5, 7]]) print(list(G.nodes())) print(list(G.edges())) print(distance_measures.center(G)) print(distance_measures.periphery(G)) # print(distance_measures.center(G,e={12: 2, 13: 3, 15: 2, 16: 3})) # print(distance_measures.center(G,e={333: 3})) print(distance_measures.eccentricity(G))
def runWith(fname) : print(fname) gm=dr.GraphMaker() gm.load(fname) # dpr=gm.pagerank() dg=gm.graph() #for x in dg: print('VERT::', x) print('nodes:', dg.number_of_nodes()) print('edges:', dg.number_of_edges()) comps=nx.strongly_connected_components(dg) print('strongly connected components:',len(list(comps))) c = max(nx.strongly_connected_components(dg), key=len) mg=dg.subgraph(c) print('attracting components:', co.number_attracting_components(dg)) print('number_weakly_connected_components:',co.number_weakly_connected_components(dg)) print('Transitivity:',cl.transitivity(dg)) return e=dm.eccentricity(mg) dprint('ecc:', e) cent=dm.center(mg,e=e) print('CENTER',cent) p=dm.periphery(mg,e=e) print('perif:', len(list(e))) #dprint('perif:', e) print('diameter:', dm.diameter(nx.Graph(mg))) print('radius:', dm.radius(nx.Graph(mg))) g = nx.Graph(dg) print('omega:', omega(g)) print('sigma:', sigma(g))
def getPeriphery( self ) -> Union[int, list[Unknown], dict[Unknown, Any], float, Any, None]: """ """ return periphery(self.graph)
def ver_medidas(G): print(function.info(G)) """ Numero minimo de nodos que deben ser removidos para desconectar G """ print("Numero minimo de nodos que deben ser removidos para desconectar G :"+str(approximation.node_connectivity(G))) """ average clustering coefficient of G. """ print("average clustering coefficient of G: "+str(approximation.average_clustering(G))) """ Densidad de un Grafo """ print("Densidad de G: "+str(function.density(G))) """ Assortativity measures the similarity of connections in the graph with respect to the node degree. Valores positivos de r indican que existe una correlacion entre nodos con grado similar, mientras que un valor negativo indica correlaciones entre nodos de diferente grado """ print("degree assortativity:"+str(assortativity.degree_assortativity_coefficient(G))) """ Assortativity measures the similarity of connections in the graph with respect to the given attribute. """ print("assortativity for node attributes: "+str(assortativity.attribute_assortativity_coefficient(G,"crime"))) """ Grado promedio vecindad """ plt.plot(assortativity.average_neighbor_degree(G).values()) plt.title("Grado promedio vecindad") plt.xlabel("Nodo") plt.ylabel("Grado") plt.show(); """ Grado de Centralidad de cada nodo """ plt.plot(centrality.degree_centrality(G).values()) plt.title("Grado de centralidad") plt.xlabel("Nodo") plt.ylabel("Centralidad") plt.show(); """ Calcular el coeficiente de agrupamiento para nodos """ plt.plot(cluster.clustering(G).values()) plt.title("coeficiente de agrupamiento") plt.xlabel("Nodo") plt.show(); """ Media coeficiente de Agrupamiento """ print("Coeficiente de agrupamiento de G:"+str(cluster.average_clustering(G))) """ Centro del grafo El centro de un grafo G es el subgrafo inducido por el conjunto de vertices de excentricidad minima. La excentricidad de v in V se define como la distancia maxima desde v a cualquier otro vertice del grafo G siguiendo caminos de longitud minima. """ print("Centro de G:"+ str(distance_measures.center(G))) """ Diametro de un grafo The diameter is the maximum eccentricity. """ print("Diametro de G:"+str(distance_measures.diameter(G))) """ Excentricidad de cada Nodo The eccentricity of a node v is the maximum distance from v to all other nodes in G. """ plt.plot(distance_measures.eccentricity(G).values()) plt.title("Excentricidad de cada Nodo") plt.xlabel("Nodo") plt.show(); """ Periferia The periphery is the set of nodes with eccentricity equal to the diameter. """ print("Periferia de G:") print(distance_measures.periphery(G)) """ Radio The radius is the minimum eccentricity. """ print("Radio de G:"+str(distance_measures.radius(G))) """ PageRank calcula una clasificacion de los nodos en el grafico G en funcion de la estructura de los enlaces entrantes. Originalmente fue disenado como un algoritmo para clasificar paginas web. """ plt.plot(link_analysis.pagerank_alg.pagerank(G).values()) plt.title("Puntaje de cada Nodo") plt.xlabel("Nodo") plt.show(); """ Coeficiente de Small World. A graph is commonly classified as small-world if sigma>1. """ print("Coeficiente de Small World: " + str(smallworld.sigma(G))) """ The small-world coefficient (omega) ranges between -1 and 1. Values close to 0 means the G features small-world characteristics. Values close to -1 means G has a lattice shape whereas values close to 1 means G is a random graph. """ print("Omega coeficiente: "+str(smallworld.omega(G)))
def run_weighted_GT_calcs(G, just_data, Do_kdist, Do_BCdist, Do_CCdist, Do_ECdist, Do_ANC, Do_Ast, Do_WI, multigraph): # includes weight in the calculations klist = [0] BCdist = [0] CCdist = [0] ECdist = [0] if multigraph: Do_BCdist = 0 Do_ECdist = 0 Do_ANC = 0 if Do_ANC: connected_graph = nx.is_connected(G) wdata_dict = {"x": [], "y": []} if (Do_kdist == 1): klist1 = nx.degree(G, weight='weight') ksum = 0 klist = np.zeros(len(klist1)) for j in range(len(klist1)): ksum = ksum + klist1[j] klist[j] = klist1[j] k = ksum / len(klist1) k = round(k, 5) just_data.append(k) wdata_dict["x"].append("Weighted average degree") wdata_dict["y"].append(k) if (Do_WI == 1): WI = wiener_index(G, weight='length') WI = round(WI, 1) just_data.append(WI) wdata_dict["x"].append("Length-weighted Wiener Index") wdata_dict["y"].append(WI) if (Do_ANC == 1): if connected_graph: max_flow = float(0) p = periphery(G) q = len(p) - 1 for s in range(0, q - 1): for t in range(s + 1, q): flow_value = maximum_flow(G, p[s], p[t], capacity='weight')[0] if (flow_value > max_flow): max_flow = flow_value max_flow = round(max_flow, 5) else: max_flow = 'NaN' just_data.append(max_flow) wdata_dict["x"].append("Max flow between periphery") wdata_dict["y"].append(max_flow) if (Do_Ast == 1): Ast = degree_assortativity_coefficient(G, weight='pixel width') Ast = round(Ast, 5) just_data.append(Ast) wdata_dict["x"].append("Weighted assortativity coefficient") wdata_dict["y"].append(Ast) if (Do_BCdist == 1): BCdist1 = betweenness_centrality(G, weight='weight') Bsum = 0 BCdist = np.zeros(len(BCdist1)) for j in range(len(BCdist1)): Bsum += BCdist1[j] BCdist[j] = BCdist1[j] Bcent = Bsum / len(BCdist1) Bcent = round(Bcent, 5) just_data.append(Bcent) wdata_dict["x"].append("Width-weighted average betweenness centrality") wdata_dict["y"].append(Bcent) if (Do_CCdist == 1): CCdist1 = closeness_centrality(G, distance='length') Csum = 0 CCdist = np.zeros(len(CCdist1)) for j in range(len(CCdist1)): Csum += CCdist1[j] CCdist[j] = CCdist1[j] Ccent = Csum / len(CCdist1) Ccent = round(Ccent, 5) just_data.append(Ccent) wdata_dict["x"].append("Length-weighted average closeness centrality") wdata_dict["y"].append(Ccent) if (Do_ECdist == 1): try: ECdist1 = eigenvector_centrality(G, max_iter=100, weight='weight') except: ECdist1 = eigenvector_centrality(G, max_iter=10000, weight='weight') Esum = 0 ECdist = np.zeros(len(ECdist1)) for j in range(len(ECdist1)): Esum += ECdist1[j] ECdist[j] = ECdist1[j] Ecent = Esum / len(ECdist1) Ecent = round(Ecent, 5) just_data.append(Ecent) wdata_dict["x"].append("Width-weighted average eigenvector centrality") wdata_dict["y"].append(Ecent) wdata = pd.DataFrame(wdata_dict) return wdata, just_data, klist, BCdist, CCdist, ECdist
def d_periphery(self): """d_periphery""" from networkx.algorithms import distance_measures return self.to_json(distance_measures.periphery(self.__graph))