def graph_1(): G = nx.Graph() G.add_nodes_from([2, 3, 5, 6, 7]) G.add_edges_from([[2, 3], [5, 3], [6, 7], [7, 2], [5, 7]]) print(list(G.nodes())) print(list(G.edges())) print(distance_measures.center(G)) print(distance_measures.periphery(G)) # print(distance_measures.center(G,e={12: 2, 13: 3, 15: 2, 16: 3})) # print(distance_measures.center(G,e={333: 3})) print(distance_measures.eccentricity(G))
def runWith(fname) : print(fname) gm=dr.GraphMaker() gm.load(fname) # dpr=gm.pagerank() dg=gm.graph() #for x in dg: print('VERT::', x) print('nodes:', dg.number_of_nodes()) print('edges:', dg.number_of_edges()) comps=nx.strongly_connected_components(dg) print('strongly connected components:',len(list(comps))) c = max(nx.strongly_connected_components(dg), key=len) mg=dg.subgraph(c) print('attracting components:', co.number_attracting_components(dg)) print('number_weakly_connected_components:',co.number_weakly_connected_components(dg)) print('Transitivity:',cl.transitivity(dg)) return e=dm.eccentricity(mg) dprint('ecc:', e) cent=dm.center(mg,e=e) print('CENTER',cent) p=dm.periphery(mg,e=e) print('perif:', len(list(e))) #dprint('perif:', e) print('diameter:', dm.diameter(nx.Graph(mg))) print('radius:', dm.radius(nx.Graph(mg))) g = nx.Graph(dg) print('omega:', omega(g)) print('sigma:', sigma(g))
def get_center_nodes(pattern): return center(pattern)
def print_Measures(self, G, blnCalculateDimater=False, blnCalculateRadius=False, blnCalculateExtremaBounding=False, blnCalculateCenterNodes=False, fileName_to_print=None): #verify if graph is connected or not try: blnGraphConnected = is_connected(G) except: blnGraphConnected = False no_nodes = str(len(G.nodes())) no_edges = str(len(G.edges())) print("# Nodes: " + no_nodes) print("# Edges: " + no_edges) #Calculate and print Diameter if blnCalculateDimater == True: if blnGraphConnected == True: diameter_value = str(distance_measures.diameter(G)) print("Diameter: " + diameter_value) else: diameter_value = "Not possible to calculate diameter. Graph must be connected" print(diameter_value) #Calculate and print Radius if blnCalculateRadius == True: if blnGraphConnected == True: radius_value = str(distance_measures.radius(G)) print("Radius: " + radius_value) else: radius_value = "Not possible to calculate radius. Graph must be connected" print(radius_value) #Calculate and print Extrema bounding if blnCalculateExtremaBounding == True: if blnGraphConnected == True: extrema_bounding_value = str( distance_measures.extrema_bounding(G)) print("Extrema bounding: " + extrema_bounding_value) else: extrema_bounding_value = "Not possible to calculate Extrema bounding. Graph must be connected" print(extrema_bounding_value) #Calculate and print Centers if blnCalculateCenterNodes == True: str_centers_nodes = "" if blnGraphConnected == True: centers_nodes = distance_measures.center(G) str_centers_nodes = str( sorted(G.degree(centers_nodes), key=lambda x: x[1], reverse=True)) print("Centers with their degree: " + str_centers_nodes) else: centers_nodes = "Not possible to calculate Centers. Graph must be connected" print(centers_nodes) # if file name is passed in the parameters, we save the measures into a file if fileName_to_print != None: #creates path if does not exists if not os.path.exists(os.path.dirname(fileName_to_print)): os.makedirs(os.path.dirname(fileName_to_print)) f = open(fileName_to_print, "w") f.write("# Nodes: " + no_nodes + "\n") f.write("# Edges: " + no_edges + "\n") if blnCalculateDimater == True: f.write("Diameter: " + diameter_value + "\n") if blnCalculateRadius == True: f.write("Radius: " + radius_value + "\n") #if blnCalculateBaryCenter == True: # f.write("Bary Center: " + barycenter_node + "\n") if blnCalculateExtremaBounding == True: f.write("Extrema bounding: " + extrema_bounding_value + "\n") if blnCalculateCenterNodes == True: f.write("Centers with their degree: " + str_centers_nodes + "\n") f.close()
def getCenter( self ) -> Union[int, list[Unknown], dict[Unknown, Any], float, Any, None]: """ """ return center(self.graph)
def reassignLandowners(self, graph): for node in graph.nodes(): graph.node[node]["coordinates"] = self.nodeCoordinates[str(node)] ''' if self.numLandowners == 3: centers = [129, 145, 487] if self.numLandowners == 4: centers = [130, 143, 455, 468] if self.numLandowners == 5: centers = [78, 96, 312, 528, 546] if self.numLandowners == 6: centers = [129, 137, 145, 479, 487, 495] if self.numLandowners == 7: centers = [77, 97, 162, 312, 487, 527, 547] if self.numLandowners == 8: centers = [153, 159, 165, 171, 453, 459, 465, 471] if self.numLandowners == 9: centers = [84, 90, 101, 123, 312, 501, 523, 534, 540] if self.numLandowners == 10: centers = [127, 132, 137, 142, 147, 477, 482, 487, 492, 497] ''' centerNode = center(graph)[0] graphRadius = radius(graph) xCenter = graph.node[centerNode]["coordinates"]["x"] yCenter = graph.node[centerNode]["coordinates"]["y"] landCenters = [] landArea = 2*math.pi/self.numLandowners for piece in range(self.numLandowners): angle = landArea*piece landAreaX = math.floor(xCenter + graphRadius*math.cos(angle)) landAreaY = math.floor(yCenter + graphRadius*math.sin(angle)) nodeNum = landAreaX*25 + landAreaY landCenters.append(nodeNum) owner = 0 for landCenter in landCenters: graph.node[landCenter]["owner"] = owner owner = owner + 1 for node in graph.nodes(): distList = [] for landCenter in landCenters: distance = ((graph.node[landCenter]["coordinates"]["x"] - graph.node[node]["coordinates"]["x"])**2 + (graph.node[landCenter]["coordinates"]["y"] - graph.node[node]["coordinates"]["y"])**2)**0.5 distList.append(distance) graph.node[node]["owner"] = distList.index(min(distList)) ''' for node in graph.nodes(): distList = [] for center in centers: distance = nx.shortest_path_length(graph, source=node, target=center) distList.append(distance) graph.node[node]["owner"] = distList.index(min(distList)) ''' nodeList = {} for owner in range(self.numLandowners): nodeList[owner] = [] for node in graph.nodes(): if graph.node[node]["owner"] == owner: nodeList[owner].append(node) AreaBelongsToLandowners = [] for owner in nodeList: AreaBelongsToLandowners.append(self.Cell_area*len(nodeList[owner])) return graph, nodeList, AreaBelongsToLandowners
def next_steps(self): #TODO: Refactor everything in this function g = nx.convert_node_labels_to_integers(g) center_list = center(g) contam_num_data = [] # graph_pos_data = [] graph_data = [] sludge_move_data = [] green_move_data = [] polluted_vertices_data = [] green_vertices_data = [] temp_contam_num_dict = {} extended_experiment_data = [] vertex_time_data = [] # begin testing loop for i in range_of_test: v_s_start_time = time.clock() j_time_data = [] # print(i) v_s = i nx.classes.function.set_node_attributes(g, {v_s, 'Source'}, 'label') # Clear Local Lists local_contam_num_data = [] # local_graph_pos_data = [] local_graph_data = [] local_sludge_move_data = [] local_green_move_data = [] local_pol_vertices_data = [] local_green_vertices_data = [] # --- begin results loop for source vertex for j in range(num_of_exper): j_start_time = time.clock() k = k + 1 # initiate move lists sludge_move_num = 0 green_move_num = 0 sludge_move_list = [] green_move_list = [] sludge_move_list.append((sludge_move_num, v_s)) # initiate thr vertices list_thr = list(g.neighbors(v_s)) V_thr = g.subgraph(g.neighbors(v_s)) # initiate pol vertices V_pol = g.subgraph(v_s) list_pol = list(V_pol.nodes()) # initiate Green Vertices V_pro = nx.generators.classic.empty_graph(0) list_green_vertices = [] # --- Sludge Move sludge_move_num = sludge_move_num + 1 v_i = Functions.sludge_vertex_selector(g, V_pol, V_pro, sludge_algo, list_thr, center_list, v_s) # print 'The vertex selected by Sludge is: ' + str(max_deg_thr) # pollute vertex v_i nx.classes.function.set_node_attributes( g, {v_i: "POL"}, 'label') # print(list_thr) list_thr.remove(v_i) list_pol.append(v_i) # update polluted list V_pol = g.subgraph(list_pol) # update polluted vertices for v in g.neighbors(v_i): # update threatened list if (v in list_green_vertices or v in list_pol or v in list_thr): continue else: list_thr.append(v) V_thr = g.subgraph(list_thr) # update threatened subgraph sludge_move_list.append((sludge_move_num, v_i)) # --- Green-Sludge Move Sequence while len(list_thr) > 0: # Check to see if Green can move if len(list_thr) == 0: break # --- Green's Move else: green_move_num = green_move_num + 1 v_i = Functions.green_vertex_selector( g, V_pol, V_pro, green_algo, list_thr, center_list, v_s) # print 'The vertex selected by Green is: ' + str(v_i) nx.classes.function.set_node_attributes( g, {v_i: "GRN"}, 'label') # g.set_vertex(v_i, "GRN") # protect vertex v_i list_thr.remove(v_i) V_thr = g.subgraph(list_thr) # update data tracking list_green_vertices.append(v_i) V_pro = g.subgraph(list_green_vertices) green_move_list.append((green_move_num, v_i)) # Check to see if Sludge can move if len(list_thr) == 0: break # --- Sludge's Move else: sludge_move_num = sludge_move_num + 1 v_i = Functions.sludge_vertex_selector( g, V_pol, V_pro, sludge_algo, list_thr, center_list, v_s) # print 'The vertex slected by Sludge is: ' + str(v_i) nx.classes.function.set_node_attributes( g, {v_i: "POL"}, 'label') # g.set_vertex(v_i, "POL") #pollute vertex v_i list_pol.append(v_i) list_thr.remove(v_i) # update polluted vertices subgraph V_pol = g.subgraph(list_pol) # update threatened list for v in g.neighbors(v_i): if (v in list_green_vertices or v in list_pol or v in list_thr): continue else: list_thr.append(v) # update threatened subgraph V_thr = g.subgraph(list_thr) # update Sludge's move tracker sludge_move_list.append((sludge_move_num, v_i)) # log results time j_end_time = time.clock() j_time = j_end_time - j_start_time j_time_data.append((j, j_time)) # Update Local Contamination Number Data local_contam_num_data.append((j, V_pol.order())) # print('Contam num data(local):' + str(local_contam_num_data)) # local_graph_pos_data.append((j, g.get_pos())) local_graph_data.append((j, nx.convert.to_dict_of_lists(g))) local_sludge_move_data.append((j, sludge_move_list)) local_green_move_data.append((j, green_move_list)) local_pol_vertices_data.append((j, V_pol)) local_green_vertices_data.append((j, list_green_vertices)) # log vertex time data v_s_end_time = time.clock() v_s_time = v_s_end_time - v_s_start_time vertex_time_data.append((v_s, v_s_time)) # --- Map Local Data local_contam_num_dict = {} local_graph_pos_dict = {} local_graph_dict = {} local_sludge_move_dict = {} local_green_move_dict = {} local_pol_vertices_dict = {} local_green_vertices_dict = {} local_contam_num_dict.update(local_contam_num_data) # local_graph_pos_dict.update(local_graph_pos_data) local_graph_dict.update(local_graph_data) local_sludge_move_dict.update(local_sludge_move_data) local_green_move_dict.update(local_green_move_data) local_pol_vertices_dict.update(local_pol_vertices_data) local_green_vertices_dict.update(local_green_vertices_data) # --- Process Data and Transfer to Global Lists # print(local_contam_num_dict) local_max_key = max(local_contam_num_dict, key=lambda key: local_contam_num_dict[key]) contam_num_data.append((v_s, local_contam_num_dict[local_max_key])) # print(local_max_key) # graph_pos_data.append((v_s,local_graph_pos_dict[local_max_key])) graph_data.append((v_s, local_graph_dict[local_max_key])) sludge_move_data.append( (v_s, local_sludge_move_dict[local_max_key])) green_move_data.append((v_s, local_green_move_dict[local_max_key])) polluted_vertices_data.append( (v_s, local_pol_vertices_dict[local_max_key])) green_vertices_data.append( (v_s, local_green_vertices_dict[local_max_key])) # Store and Write Vertex Data extended_experiment_data.append((v_s, local_contam_num_dict)) vertex_experiment_data = { 'Log_time': str(time.ctime()), 'abs_time': time.clock(), 'vertex_time': v_s_time, 'j_test_time': j_time_data, 'local_max_key': local_max_key, 'local_contam_num_dict': local_contam_num_dict, 'local_graph_pos_dict': local_graph_pos_dict, 'local_graph_dict': local_graph_dict, 'local_sludge_move_dict': local_sludge_move_dict, 'local_green_move_dict': local_green_move_dict, 'local_pol_vertices_dict': local_pol_vertices_dict, 'local_green_vertices_dict': local_green_vertices_dict } v_s_file_name = vertex_file_path + str(v_s) v_s_file_out = open(v_s_file_name, 'w') v_s_file_out.write(str(vertex_experiment_data)) v_s_file_out.close() # Progress Outputs progress = k / (num_of_exper * len(range_of_test)) * 100 if int(progress) > 0 and (int(progress) % ell == 0): # temporary data for updates temp_contam_num_dict.update(contam_num_data) # temporary max vertex v_temp_max = max(temp_contam_num_dict, key=(lambda key: temp_contam_num_dict[key])) current_time = time.clock() elapsed_time = current_time - lap_time lap_time = current_time print( ("Progress: %s percent. With %s/%s with an interval of %s" "seconds, the vertex with the highest contamination " "number is %s" % (progress, k, str(num_of_exper * len(range_of_test)), elapsed_time, v_temp_max))) ell = ell + 10 # Clear Local Data (Optional) local_contam_num_dict = {} local_graph_pos_dict = {} local_graph_dict = {} local_sludge_move_dict = {} local_green_move_dict = {} local_pol_vertices_dict = {} local_green_vertices_dict = {} local_contam_num_data = [] # local_graph_pos_data = [] local_graph_data = [] local_sludge_move_data = [] local_green_move_data = [] local_pol_vertices_data = [] local_green_vertices_data = [] # print 'Data Cleared' # --- Data for File # --- Prepare Experiment Data # Time Measurements experiment_end_time = time.ctime() end_time = time.clock() elapsed_time = (end_time) - (start_time) # data for storage experiment_data_list = [] experiment_data_list.extend(contam_num_data) experiment_dict = {} experiment_dict.update(experiment_data_list) extended_experiment_dict = {} extended_experiment_dict.update(extended_experiment_data) combined_experiment_data = { 'extended_experiment_dict': extended_experiment_dict, 'experiment_dict': experiment_dict } # write data to file file_name = str( './Data/SageGraphs/Sage-Output-Files/Test_Results/' + str(start_time) + '/combined_experiment_data.txt') file_out = open(file_name, 'w') file_out.write(str(combined_experiment_data)) file_out.close() # --- Find Contamination Number contam_num_dict = dict(contam_num_data) # graph_pos_dict = dict(graph_pos_data) graph_dict = dict(graph_data) sludge_move_dict = dict(sludge_move_data) green_move_dict = dict(green_move_data) # polluted_vertices_dict = dict(polluted_vertices_data) # green_vertices_dict = dict(green_vertices_data) contam_num_key = max(contam_num_dict, key=lambda key: contam_num_dict[key]) # --- Experiment Results Output results_string = ("""\n Results: \n\n' + 'The vertex with the highest contamination number was %s with a contamination number of \n The mean contamination number for the vertices tested is %s \n""" % (contam_num_key, contam_num_dict[contam_num_key])) # print results_string # --- Test for graphing and evidence collection ### sludge_moves = sludge_move_dict[contam_num_key] green_moves = green_move_dict[contam_num_key] # polluted_vertices_subgraph = polluted_vertices_dict[contam_num_key]; # polluted_vertices_subgraph.remove_node(contam_num_key) # list_orange = polluted_vertices_subgraph.nodes() # list_green = green_vertices_dict[contam_num_key] # --- Print Graph for reference # GP = Graph(graph_dict[contam_num_key]) # GP.set_pos(graph_pos_dict[contam_num_key]); # d = {'#00FF00':list_green, # '#FF9900':list_orange, '#FF0000':[contam_num_key]}; # GP.graphplot(vertex_colors=d).show(figsize=[10, 10]) file_types = ['.eps', '.pdf', '.png', '.ps', '.sobj', '.svg'] for i in file_types: # graph_plot_file_name = graph_file_path+'Maximal-Seepage-Game'+i graph_plot = Graph(graph_dict[contam_num_key]) nx.drawing.nx_pylab.draw_networkx(graph_plot) # graph_plot.set_pos(graph_pos_dict[contam_num_key]); # graph_plot.graphplot(vertex_colors=d).plot(figsize \ # = [10, 10]).save(graph_plot_file_name) conclusion_string = ( """\n Conclusion: \n\n These were Sludge's Moves: \n %s \n\n These were Green's moves: \n %s \n\n The directory for this results can be found here: \n %s \n The overall contamination number data for this experiment is in: \n combined_experiment_data.txt\n The directory containing detailed information for the tests on each vertex is: \n %s \n The directory containing the graphs of the maximal seepage situation is: \n %s \n\n The experiment ended at: %s after %s seconds of computation.\n\n Experiment conducted by: Peter Nicks""" % (sludge_moves, green_moves, file_path, vertex_file_path, graph_file_path, experiment_end_time, elapsed_time)) experiment_log_file_name = file_path + 'experiment_log.txt' experiment_log_file = open(experiment_log_file_name, 'w') experiment_log_file.write(start_string + '\n' + results_string + '\n' + conclusion_string + '\n') experiment_log_file.close() return
def ver_medidas(G): print(function.info(G)) """ Numero minimo de nodos que deben ser removidos para desconectar G """ print("Numero minimo de nodos que deben ser removidos para desconectar G :"+str(approximation.node_connectivity(G))) """ average clustering coefficient of G. """ print("average clustering coefficient of G: "+str(approximation.average_clustering(G))) """ Densidad de un Grafo """ print("Densidad de G: "+str(function.density(G))) """ Assortativity measures the similarity of connections in the graph with respect to the node degree. Valores positivos de r indican que existe una correlacion entre nodos con grado similar, mientras que un valor negativo indica correlaciones entre nodos de diferente grado """ print("degree assortativity:"+str(assortativity.degree_assortativity_coefficient(G))) """ Assortativity measures the similarity of connections in the graph with respect to the given attribute. """ print("assortativity for node attributes: "+str(assortativity.attribute_assortativity_coefficient(G,"crime"))) """ Grado promedio vecindad """ plt.plot(assortativity.average_neighbor_degree(G).values()) plt.title("Grado promedio vecindad") plt.xlabel("Nodo") plt.ylabel("Grado") plt.show(); """ Grado de Centralidad de cada nodo """ plt.plot(centrality.degree_centrality(G).values()) plt.title("Grado de centralidad") plt.xlabel("Nodo") plt.ylabel("Centralidad") plt.show(); """ Calcular el coeficiente de agrupamiento para nodos """ plt.plot(cluster.clustering(G).values()) plt.title("coeficiente de agrupamiento") plt.xlabel("Nodo") plt.show(); """ Media coeficiente de Agrupamiento """ print("Coeficiente de agrupamiento de G:"+str(cluster.average_clustering(G))) """ Centro del grafo El centro de un grafo G es el subgrafo inducido por el conjunto de vertices de excentricidad minima. La excentricidad de v in V se define como la distancia maxima desde v a cualquier otro vertice del grafo G siguiendo caminos de longitud minima. """ print("Centro de G:"+ str(distance_measures.center(G))) """ Diametro de un grafo The diameter is the maximum eccentricity. """ print("Diametro de G:"+str(distance_measures.diameter(G))) """ Excentricidad de cada Nodo The eccentricity of a node v is the maximum distance from v to all other nodes in G. """ plt.plot(distance_measures.eccentricity(G).values()) plt.title("Excentricidad de cada Nodo") plt.xlabel("Nodo") plt.show(); """ Periferia The periphery is the set of nodes with eccentricity equal to the diameter. """ print("Periferia de G:") print(distance_measures.periphery(G)) """ Radio The radius is the minimum eccentricity. """ print("Radio de G:"+str(distance_measures.radius(G))) """ PageRank calcula una clasificacion de los nodos en el grafico G en funcion de la estructura de los enlaces entrantes. Originalmente fue disenado como un algoritmo para clasificar paginas web. """ plt.plot(link_analysis.pagerank_alg.pagerank(G).values()) plt.title("Puntaje de cada Nodo") plt.xlabel("Nodo") plt.show(); """ Coeficiente de Small World. A graph is commonly classified as small-world if sigma>1. """ print("Coeficiente de Small World: " + str(smallworld.sigma(G))) """ The small-world coefficient (omega) ranges between -1 and 1. Values close to 0 means the G features small-world characteristics. Values close to -1 means G has a lattice shape whereas values close to 1 means G is a random graph. """ print("Omega coeficiente: "+str(smallworld.omega(G)))
def d_central(self): """d_center""" from networkx.algorithms import distance_measures return self.to_json(distance_measures.center(self.__graph))