def large_scale_structure(el, dels): # # muss noch umprogrammiert werden, um die verschiedenen großskaliegen Strukturen # auswaehlen zu koennen G = from_edgelist(el, nx.DiGraph()) noac = nx.number_attracting_components(G) print("number of attractings components : ", noac) print("number of nodes : ", nx.number_of_nodes(G)) # # in dem Funktionsausruf wird unterschieden zwischen strongly und # weakly connected componten, der Name ist nicht mehr passend # gwcc_G = len(max(nx.strongly_connected_components(G), key=len)) # print(gwcc_G, " ", nx.number_of_nodes(G)) # hier kommt jetzt die average shortest path length # aspl = nx.average_shortest_path_length(G) # print("average shortest path length complete graph :", aspl) resultslist=[] G = nx.DiGraph() with click.progressbar(dels) as delays: for d in delays: sel = el.ix[el["MELD_DELAY"] == d] G = from_edgelist(sel, G) # # strongly connected component # gwcc_Gd = (len(max(nx.strongly_connected_components(G), key=len))) # # average shortest path length # aspl_d = nx.average_shortest_path_length(G) # # number of attracting components noac_d = nx.number_attracting_components(G) resultslist.append((d, noac, noac_d, float(noac_d)/float(noac))) return resultslist
def draw_graph(nodes, edges, graphs_dir, default_lang='all'): lang_graph = nx.MultiDiGraph() lang_graph.add_nodes_from(nodes) for edge in edges: if edges[edge] == 0: lang_graph.add_edge(edge[0], edge[1]) else: lang_graph.add_edge(edge[0], edge[1], weight=float(edges[edge]), label=str(edges[edge])) # print graph info in stdout # degree centrality print('-----------------\n\n') print(default_lang) print(nx.info(lang_graph)) try: # When ties are associated to some positive aspects such as friendship or collaboration, # indegree is often interpreted as a form of popularity, and outdegree as gregariousness. DC = nx.degree_centrality(lang_graph) max_dc = max(DC.values()) max_dc_list = [item for item in DC.items() if item[1] == max_dc] except ZeroDivisionError: max_dc_list = [] # https://ru.wikipedia.org/wiki/%D0%9A%D0%BE%D0%BC%D0%BF%D0%BB%D0%B5%D0%BA%D1%81%D0%BD%D1%8B%D0%B5_%D1%81%D0%B5%D1%82%D0%B8 print('maxdc', str(max_dc_list), sep=': ') # assortativity coef AC = nx.degree_assortativity_coefficient(lang_graph) print('AC', str(AC), sep=': ') # connectivity print("Слабо-связный граф: ", nx.is_weakly_connected(lang_graph)) print("количество слабосвязанных компонент: ", nx.number_weakly_connected_components(lang_graph)) print("Сильно-связный граф: ", nx.is_strongly_connected(lang_graph)) print("количество сильносвязанных компонент: ", nx.number_strongly_connected_components(lang_graph)) print("рекурсивные? компоненты: ", nx.number_attracting_components(lang_graph)) print("число вершинной связности: ", nx.node_connectivity(lang_graph)) print("число рёберной связности: ", nx.edge_connectivity(lang_graph)) # other info print("average degree connectivity: ", nx.average_degree_connectivity(lang_graph)) print("average neighbor degree: ", sorted(nx.average_neighbor_degree(lang_graph).items(), key=itemgetter(1), reverse=True)) # best for small graphs, and our graphs are pretty small print("pagerank: ", sorted(nx.pagerank_numpy(lang_graph).items(), key=itemgetter(1), reverse=True)) plt.figure(figsize=(16.0, 9.0), dpi=80) plt.axis('off') pos = graphviz_layout(lang_graph) nx.draw_networkx_edges(lang_graph, pos, alpha=0.5, arrows=True) nx.draw_networkx(lang_graph, pos, node_size=1000, font_size=12, with_labels=True, node_color='green') nx.draw_networkx_edge_labels(lang_graph, pos, edges) # saving file to draw it with dot-graphviz # changing overall graph view, default is top-bottom lang_graph.graph['graph'] = {'rankdir': 'LR'} # marking with blue nodes with maximum degree centrality for max_dc_node in max_dc_list: lang_graph.node[max_dc_node[0]]['fontcolor'] = 'blue' write_dot(lang_graph, os.path.join(graphs_dir, default_lang + '_links.dot')) # plt.show() plt.savefig(os.path.join(graphs_dir, 'python_' + default_lang + '_graph.png'), dpi=100) plt.close()
def write_components_info(G, report_file): report_file.write("===COMPONENTS_INFO===\n") report_file.write("Number of strongly connected components: {}\n".format( nx.number_strongly_connected_components(G))) report_file.write("Number of weakly connected components: {}\n".format( nx.number_weakly_connected_components(G))) report_file.write("Number of attractive components: {}\n".format( nx.number_attracting_components(G))) report_file.write("Is semiconnected: {}\n".format(nx.is_semiconnected(G)))
def do_analysis(crate_path): crate_name = crate_path.stem facts_path = crate_path / "nll-facts" writer = csv.writer(sys.stdout) for fn_path in nll_fn_paths(facts_path): fn_facts = read_fn_nll_facts(fn_path) cfg = block_cfg_from_facts(fn_facts) crate_name = crate_path.stem writer.writerow([ crate_name, *facts_to_row(fn_facts), unique_loans(fn_facts), unique_variables(fn_facts), unique_regions(fn_facts), cfg.number_of_nodes(), nx.density(cfg), nx.transitivity(cfg), nx.number_attracting_components(cfg), ])
def test_number_attacting_components(self): assert_equal(nx.number_attracting_components(self.G1), 3) assert_equal(nx.number_attracting_components(self.G2), 1) assert_equal(nx.number_attracting_components(self.G3), 2)
pos=nx.spring_layout(G,k=0.15,iterations=10) # pos=nx.graphviz_layout(G) # pos=layout(G) G.remove_nodes_from(nx.isolates(G)) print str(" ") print 'ATTRACTING CONNECTEDNESS OF DIRECTED GRAPHS' print str(" ") print str(" ") print G.name print str(" ") print 'Does the graph G consist of a single attracting component?', nx.is_attracting_component(G) print 'The number of attracting components of G is:', nx.number_attracting_components(G) print str(" ") print 'List of attracting components:' print sorted(nx.attracting_components(G), key = len, reverse=True) print str(" ") lc=sorted(nx.strongly_connected_components(G), key = len, reverse=True) print 'List of strongly connected components:' print lc print str(" ") colors_list=['c','b','g','y','k','m'] colors_to_select=list(colors_list) graphs =sorted(nx.attracting_component_subgraphs(G), key = len, reverse=True) attracting_component_subgraphs_edges=[]
def summary(h, destin): r = OrderedDict() r['num_edges'] = h.SG[destin].number_of_edges() r['num_nodes'] = h.SG[destin].number_of_nodes() # Spatial distance shortest path sdsp = h.results[destin]['spatial_distance'] # Shortest path from all the nodes in the catchment area to the exit # min,max,mean shortest path length r['min_sdsp'] = np.min(sdsp.values()) r['max_sdsp'] = np.max(sdsp.values()) r['mean_sdsp'] = np.mean(sdsp.values()) # 10%,50% and 90% shortest path lengths r['10pc_sdsp'] = np.percentile(sdsp.values(), 10) r['50pc_sdsp'] = np.percentile(sdsp.values(), 50) r['90pc_sdsp'] = np.percentile(sdsp.values(), 90) nodes_mean_sdsp = [ key for key, value in sdsp.iteritems() if value <= r['mean_sdsp'] ] nodes_10pc_sdsp = [ key for key, value in sdsp.iteritems() if value <= r['10pc_sdsp'] ] nodes_50pc_sdsp = [ key for key, value in sdsp.iteritems() if value <= r['50pc_sdsp'] ] nodes_90pc_sdsp = [ key for key, value in sdsp.iteritems() if value <= r['90pc_sdsp'] ] r['num_nodes_mean_sdsp'] = len(nodes_mean_sdsp) r['num_nodes_10pc_sdsp'] = len(nodes_10pc_sdsp) r['num_nodes_50pc_sdsp'] = len(nodes_50pc_sdsp) r['num_nodes_90pc_sdsp'] = len(nodes_90pc_sdsp) r['frac_nodes_mean_sdsp'] = len(nodes_mean_sdsp) / float(r['num_nodes']) r['frac_nodes_10pc_sdsp'] = len(nodes_10pc_sdsp) / float(r['num_nodes']) r['frac_nodes_50pc_sdsp'] = len(nodes_50pc_sdsp) / float(r['num_nodes']) r['frac_nodes_90pc_sdsp'] = len(nodes_90pc_sdsp) / float(r['num_nodes']) # number of nodes less than 1000,500,250,125 metres away # could be a good indicator of how likely the bottlenecks are nodes_2000m = [key for key, value in sdsp.iteritems() if value <= 2000] nodes_1000m = [key for key, value in sdsp.iteritems() if value <= 1000] nodes_500m = [key for key, value in sdsp.iteritems() if value <= 500] nodes_250m = [key for key, value in sdsp.iteritems() if value <= 250] r['num_nodes_2000m'] = len(nodes_2000m) r['num_nodes_1000m'] = len(nodes_1000m) r['num_nodes_500m'] = len(nodes_500m) r['num_nodes_250m'] = len(nodes_250m) r['frac_nodes_2000m'] = len(nodes_2000m) / float(r['num_nodes']) r['frac_nodes_1000m'] = len(nodes_1000m) / float(r['num_nodes']) r['frac_nodes_500m'] = len(nodes_500m) / float(r['num_nodes']) r['frac_nodes_250m'] = len(nodes_250m) / float(r['num_nodes']) # Topological distance tdsp = h.results[destin]['topological_distance'] # Shortest path from all the nodes in the catchment area to the exit # min,max,mean shortest path length r['min_tdsp'] = np.min(tdsp.values()) r['max_tdsp'] = np.max(tdsp.values()) r['mean_tdsp'] = np.mean(tdsp.values()) # 10%,50% and 90% shortest path topological lengths r['10pc_tdsp'] = np.percentile(tdsp.values(), 10) r['50pc_tdsp'] = np.percentile(tdsp.values(), 50) r['90pc_tdsp'] = np.percentile(tdsp.values(), 90) nodes_mean_tdsp = [ key for key, value in tdsp.iteritems() if value <= r['mean_tdsp'] ] nodes_10pc_tdsp = [ key for key, value in tdsp.iteritems() if value <= r['10pc_tdsp'] ] nodes_50pc_tdsp = [ key for key, value in tdsp.iteritems() if value <= r['50pc_tdsp'] ] nodes_90pc_tdsp = [ key for key, value in tdsp.iteritems() if value <= r['90pc_tdsp'] ] r['num_nodes_mean_tdsp'] = len(nodes_mean_tdsp) r['num_nodes_10pc_tdsp'] = len(nodes_10pc_tdsp) r['num_nodes_50pc_tdsp'] = len(nodes_50pc_tdsp) r['num_nodes_90pc_tdsp'] = len(nodes_90pc_tdsp) r['frac_nodes_mean_tdsp'] = len(nodes_mean_tdsp) / float(r['num_nodes']) r['frac_nodes_10pc_tdsp'] = len(nodes_10pc_tdsp) / float(r['num_nodes']) r['frac_nodes_50pc_tdsp'] = len(nodes_50pc_tdsp) / float(r['num_nodes']) r['frac_nodes_90pc_tdsp'] = len(nodes_90pc_tdsp) / float(r['num_nodes']) # number of nodes in this topological distance from the exit nodes_8_tdsp = [key for key, value in tdsp.iteritems() if value <= 8] nodes_4_tdsp = [key for key, value in tdsp.iteritems() if value <= 4] nodes_2_tdsp = [key for key, value in tdsp.iteritems() if value <= 2] nodes_1_tdsp = [key for key, value in tdsp.iteritems() if value <= 1] r['num_nodes_8_tdsp'] = len(nodes_8_tdsp) r['num_nodes_4_tdsp'] = len(nodes_4_tdsp) r['num_nodes_2_tdsp'] = len(nodes_2_tdsp) r['num_nodes_1_tdsp'] = len(nodes_1_tdsp) r['frac_nodes_8_tdsp'] = len(nodes_8_tdsp) / float(r['num_nodes']) r['frac_nodes_4_tdsp'] = len(nodes_4_tdsp) / float(r['num_nodes']) r['frac_nodes_2_tdsp'] = len(nodes_2_tdsp) / float(r['num_nodes']) r['frac_nodes_1_tdsp'] = len(nodes_1_tdsp) / float(r['num_nodes']) # Total length and area r['sum_edge_length'] = h.SG[destin].size(weight='distance') r['sum_edge_area'] = h.SG[destin].size(weight='area') # Average edge length,width,area r['mean_edge_length'] = r['sum_edge_length'] / r['num_edges'] r['mean_edge_area'] = r['sum_edge_area'] / r['num_edges'] r['mean_edge_width'] = r['sum_edge_area'] / r['sum_edge_length'] # Components # ---------- r['num_strgconcom'] = nx.number_strongly_connected_components(h.SG[destin]) r['num_weakconcom'] = nx.number_weakly_connected_components(h.SG[destin]) r['num_attractcom'] = nx.number_attracting_components(h.SG[destin]) # Degree # ------ r['mean_degree'] = 2 * r['num_edges'] / float(r['num_nodes']) # Number of nodes with no incoming edges - based on leaf node r['num_nodes_0_in_deg'] = len( [n for n, d in h.SG[destin].in_degree().items() if d == 0]) r['num_nodes_1_in_deg'] = len( [n for n, d in h.SG[destin].in_degree().items() if d == 1]) r['num_nodes_2_in_deg'] = len( [n for n, d in h.SG[destin].in_degree().items() if d == 2]) r['num_nodes_3+_in_deg'] = len( [n for n, d in h.SG[destin].in_degree().items() if d >= 3]) r['num_nodes_0_out_deg'] = len( [n for n, d in h.SG[destin].out_degree().items() if d == 0]) r['num_nodes_1_out_deg'] = len( [n for n, d in h.SG[destin].out_degree().items() if d == 1]) r['num_nodes_2_out_deg'] = len( [n for n, d in h.SG[destin].out_degree().items() if d == 2]) r['num_nodes_3+_out_deg'] = len( [n for n, d in h.SG[destin].out_degree().items() if d >= 3]) r['frac_nodes_0_in_deg'] = r['num_nodes_0_in_deg'] / float(r['num_nodes']) r['frac_nodes_1_in_deg'] = r['num_nodes_1_in_deg'] / float(r['num_nodes']) r['frac_nodes_2_in_deg'] = r['num_nodes_2_in_deg'] / float(r['num_nodes']) r['frac_nodes_3+_in_deg'] = r['num_nodes_3+_in_deg'] / float( r['num_nodes']) r['frac_nodes_0_out_deg'] = r['num_nodes_0_out_deg'] / float( r['num_nodes']) r['frac_nodes_1_out_deg'] = r['num_nodes_1_out_deg'] / float( r['num_nodes']) r['frac_nodes_2_out_deg'] = r['num_nodes_2_out_deg'] / float( r['num_nodes']) r['frac_nodes_3+_out_deg'] = r['num_nodes_3+_out_deg'] / float( r['num_nodes']) # Centralities # ------------ def _means(key, all): return [ ('mean_{}'.format(key), np.mean(all.values())), ('mean_{}_mean_sdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_mean_sdsp])), ('mean_{}_10pc_sdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_10pc_sdsp])), ('mean_{}_50pc_sdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_50pc_sdsp])), ('mean_{}_90pc_sdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_90pc_sdsp])), ('mean_{}_2000m'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_2000m])), ('mean_{}_1000m'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_1000m])), ('mean_{}_500m'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_500m])), ('mean_{}_250m'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_250m])), ('mean_{}_mean_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_mean_tdsp])), ('mean_{}_10pc_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_10pc_tdsp])), ('mean_{}_50pc_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_50pc_tdsp])), ('mean_{}_90pc_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_90pc_tdsp])), ('mean_{}_8_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_8_tdsp])), ('mean_{}_4_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_4_tdsp])), ('mean_{}_2_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_2_tdsp])), ('mean_{}_1_tdsp'.format(key), np.mean([v for k, v in all.iteritems() if k in nodes_1_tdsp])), ] # Degree Centrality r.update(_means('deg_cen', h.results[destin]['degree_centrality'])) # In degree centrality r.update(_means('in_deg_cen', h.results[destin]['in_degree_centrality'])) # Out degree centrality r.update(_means('out_deg_cen', h.results[destin]['out_degree_centrality'])) # Betweenness centrality r.update(_means('bet_cen', h.results[destin]['betweenness_centrality'])) # Betweenness centrality subset for exit r.update( _means('bet_cen_exit', h.results[destin]['betweenness_centrality_exit'])) # Edge betweenness centrality r['mean_edge_bet_cen'] = np.mean( h.results[destin]['edge_betweenness_centrality'].values()) # Edge betweenness centrality subset for exit r['mean_edge_bet_cen_exit'] = np.mean( h.results[destin]['edge_betweenness_centrality_exit'].values()) # Load centrality r.update(_means('load_cen_exit', h.results[destin]['load_centrality'])) # Eigenvector centrality r.update( _means('eivec_cen_exit', h.results[destin]['eigenvector_centrality'])) # Closeness centrality r.update( _means('close_cen_exit', h.results[destin]['closeness_centrality'])) # Density - 0 for no edges, 1 for fully connected r['density'] = nx.density(h.SG[destin]) # Clustering # ---------- # Transitivity is the fraction of all possible triangles present in G. r['transitivity'] = nx.transitivity(h.SG[destin]) # Mean square_clustering r.update(_means('sq_clust', h.results[destin]['square_clustering'])) return r
#list(nx.find_cliques(G)) #list(nx.make_max_clique_graph(G)) #list(nx.make_clique_bipartite(G)) #nx.graph_clique_number(G) #nx.graph_number_of_cliques(G) # components nx.is_strongly_connected(G) nx.number_strongly_connected_components(G) scc = nx.strongly_connected_components(G) nx.strongly_connected_components_recursive(G) nx.condensation(G, scc) # attracting components nx.is_attracting_component(G) nx.number_attracting_components(G) nx.attracting_components(G) # directed acyclic graphs nx.is_directed_acyclic_graph(G) nx.is_aperiodic(G) # distance measure (all for connected graph) nx.center(Gcc) nx.diameter(Gcc) nx.eccentricity(Gcc) nx.periphery(Gcc) nx.radius(Gcc) # flows (seg fault currently) #nx.max_flow(Gcc, 1, 2)
def test_number_attacting_components(self): assert nx.number_attracting_components(self.G1) == 3 assert nx.number_attracting_components(self.G2) == 1 assert nx.number_attracting_components(self.G3) == 2 assert nx.number_attracting_components(self.G4) == 0
def number_basal_components(graph): """number_basal_components""" return nx.number_attracting_components(nx.reverse(graph))
# pos=nx.graphviz_layout(G) # pos=layout(G) G.remove_nodes_from(nx.isolates(G)) print str(" ") print 'ATTRACTING CONNECTEDNESS OF DIRECTED GRAPHS' print str(" ") print str(" ") print G.name print str(" ") print 'Does the graph G consist of a single attracting component?', nx.is_attracting_component( G) print 'The number of attracting components of G is:', nx.number_attracting_components( G) print str(" ") print 'List of attracting components:' print sorted(nx.attracting_components(G), key=len, reverse=True) print str(" ") lc = sorted(nx.strongly_connected_components(G), key=len, reverse=True) print 'List of strongly connected components:' print lc print str(" ") colors_list = ['c', 'b', 'g', 'y', 'k', 'm'] colors_to_select = list(colors_list) graphs = sorted(nx.attracting_component_subgraphs(G), key=len, reverse=True) attracting_component_subgraphs_edges = []
def compute_features(self): self.add_feature( "is_connected", lambda graph: nx.is_connected(graph) * 1, "Whether the graph is connected or not", InterpretabilityScore(5), ) self.add_feature( "num_connected_components", lambda graph: len(list(nx.connected_components(graph))), "The number of connected components", InterpretabilityScore(5), ) @lru_cache(maxsize=None) def eval_connectedcomponents(graph): """this evaluates the main function and cach it for speed up.""" return list(nx.connected_components(graph)) self.add_feature( "largest_connected_component", lambda graph: len(eval_connectedcomponents(graph)[0]), "The size of the largest connected component", InterpretabilityScore(4), ) def ratio_largest(graph): if len(eval_connectedcomponents(graph)) == 1: return 0 return len(eval_connectedcomponents(graph)[0]) / len( eval_connectedcomponents(graph)[1] ) self.add_feature( "ratio_largest_connected_components", ratio_largest, "The size ratio of the two largest connected components", InterpretabilityScore(4), ) def ratio_min_max(graph): if len(eval_connectedcomponents(graph)) == 1: return 0 return len(eval_connectedcomponents(graph)[0]) / len( eval_connectedcomponents(graph)[-1] ) self.add_feature( "ratio_maxmin_connected_components", ratio_min_max, "The size ratio of the max and min largest connected components", InterpretabilityScore(4), ) self.add_feature( "number_strongly_connected_components", lambda graph: nx.number_strongly_connected_components(graph), "A strongly connected component is a set of nodes in a directed graph such \ that each node in the set is reachable from any other node in that set", InterpretabilityScore(3), ) self.add_feature( "strongly_connected_component_sizes", lambda graph: [len(i) for i in nx.strongly_connected_components(graph)], "the distribution of strongly connected component sizes", InterpretabilityScore(3), statistics="centrality", ) self.add_feature( "condensation_nodes", lambda graph: nx.condensation(graph).number_of_nodes(), "number of nodes in the condensation of the graph", InterpretabilityScore(3), ) self.add_feature( "condensation_edges", lambda graph: nx.condensation(graph).number_of_edges(), "number of edges in the condensation of the graph", InterpretabilityScore(3), ) self.add_feature( "number_weakly_connected_components", lambda graph: nx.number_weakly_connected_components(graph), "A weakly connected component is a set of nodes in a directed graph such that \ there exists as edge between each node and at least one other node in the set", InterpretabilityScore(3), ) self.add_feature( "weakly_connected_component_sizes", lambda graph: [len(i) for i in nx.weakly_connected_components(graph)], "the distribution of weakly connected component sizes", InterpretabilityScore(3), statistics="centrality", ) self.add_feature( "number_attracting_components", lambda graph: nx.number_attracting_components(graph), "An attracting component is a set of nodes in a directed graph such that that \ once in that set, all other nodes outside that set are not reachable", InterpretabilityScore(3), ) self.add_feature( "attracting_component_sizes", lambda graph: [len(i) for i in nx.attracting_components(graph)], "the distribution of attracting component sizes", InterpretabilityScore(3), statistics="centrality", ) self.add_feature( "number basal_components", lambda graph: nx.number_attracting_components(nx.reverse(graph)), "An basal component is a set of nodes in a directed graph such that there are no \ edges pointing into that set", InterpretabilityScore(3), ) self.add_feature( "basal_component_sizes", lambda graph: [len(i) for i in nx.attracting_components(nx.reverse(graph))], "the distribution of basal component sizes", InterpretabilityScore(3), statistics="centrality", )
def output_graphmetrics(pathadd,paths,file_name,data_dir): ''' output_graphmetrics() Calculates graph theory metrics from package NetworkX, stores in file and outputs in .csv file. ''' # Graph package #print outputGraphMetrics #if outputGraphMetrics: try: import networkx as nx except ImportError: raise ImportError, "NetworkX required." pathG = nx.Graph() # Get nrows and columns nrows = len(pathadd) ncols = len(pathadd[0]) # Now loop through pathadd rows for irow in xrange(nrows): # Begin loop through pathadd columns for icol in xrange(ncols): # Skip -9999. values if pathadd[irow][icol] != -9999.0: # Get spot node number nodenumber = ncols*irow+icol # Add node to pathG pathG.add_node(nodenumber) # Check neighbors for edges all 8 conditions # Count 0,1,2,3,4,5,6,7 in counter-clockwise # around center cell starting 0 in lower # left corner # Left top corner: if irow == 0 and icol == 0: # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 2 if pathadd[irow+1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Right top corner elif irow == 0 and icol == ncols-1: # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 0 if pathadd[irow+1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Left bottom corner elif irow == nrows-1 and icol == 0: # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 4 if pathadd[irow-1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Right bottom corner elif irow == nrows-1 and icol == ncols-1: # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 6 if pathadd[irow-1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Top side elif irow == 0 and icol != 0 and icol != ncols-1: # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 0 if pathadd[irow+1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 2 if pathadd[irow+1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Left side elif icol == 0 and irow != 0 and irow != nrows-1: # Get neighbors -spots 1,2,3,4,5 # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 2 if pathadd[irow+1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 4 if pathadd[irow-1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Right side elif icol == ncols-1 and irow != 0 and irow != nrows-1: # Get neighbors - spots 0,1,5,6,7 # Get neighbors: spot 0 if pathadd[irow+1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 6 if pathadd[irow-1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Bottom side: elif irow == nrows-1 and icol != 0 and icol != ncols-1: # Get neighbors - spots 3,4,5,6,7 # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 4 if pathadd[irow-1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 6 if pathadd[irow-1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Everything else: else: # Get neighbors: spot 0 if pathadd[irow+1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 1 if pathadd[irow+1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 2 if pathadd[irow+1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow+1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 3 if pathadd[irow][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 4 if pathadd[irow-1][icol+1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol+1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 5 if pathadd[irow-1][icol] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+icol # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 6 if pathadd[irow-1][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow-1)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Get neighbors: spot 7 if pathadd[irow][icol-1] != -9999.: # Then get egde number edgenumber = ncols*(irow)+(icol-1) # Then add edge to pathG pathG.add_edge(nodenumber,edgenumber) # Calculate properties from path lengths: min, max, average pathlen = [] for i in xrange(len(paths)): pathlen.append(paths[i][2]) # Create file to write info to try: fout = open(data_dir+file_name, 'w') except(IOerror,OSerror) as e: print("UNICOROutputs %s, error%s"(filename,e)) sys.exit(-1) # Write header information fout.write('Minimum Path Length,') fout.write(str(min(pathlen))+'\n') fout.write('Maximum Path Length,') fout.write(str(max(pathlen))+'\n') fout.write('Average Path Length,') fout.write(str(sum(pathlen)/len(paths))+'\n') fout.write('Density of Graph,') fout.write(str(nx.density(pathG))+'\n') fout.write('Number of nodes,') fout.write(str(nx.number_of_nodes(pathG))+'\n') fout.write('Number of edges,') fout.write(str(nx.number_of_edges(pathG))+'\n') fout.write('Is the graph a bipartite,') fout.write(str(nx.is_bipartite(pathG))+'\n') fout.write('Size of the largest clique,') fout.write(str(nx.graph_clique_number(pathG))+'\n') fout.write('Number of maximal cliques,') fout.write(str(nx.graph_number_of_cliques(pathG))+'\n') fout.write('Transitivity,') fout.write(str(nx.transitivity(pathG))+'\n') fout.write('Average clustering coefficient,') fout.write(str(nx.average_clustering(pathG))+'\n') fout.write('Test graph connectivity,') fout.write(str(nx.is_connected(pathG))+'\n') fout.write('Number of connected components,') fout.write(str(nx.number_connected_components(pathG))+'\n') fout.write('Consists of a single attracting component,') fout.write(str(nx.is_attracting_component(pathG))+'\n') if nx.is_attracting_component(pathG) == True: fout.write('Number of attracting components,') fout.write(str(nx.number_attracting_components(pathG))+'\n') if nx.is_connected(pathG): fout.write('Center,') fout.write(str(nx.center(pathG))+'\n') fout.write('Diameter,') fout.write(str(nx.diameter(pathG))+'\n') #fout.write('Eccentricity,') #fout.write(str(nx.eccentricity(pathG))+'\n') fout.write('Periphery,') fout.write(str(nx.periphery(pathG))+'\n') fout.write('Radius,') fout.write(str(nx.radius(pathG))+'\n') fout.write('Degree assortativity,') fout.write(str(nx.degree_assortativity(pathG))+'\n') fout.write('Degree assortativity Pearsons r,') fout.write(str(nx.degree_pearsonr(pathG))+'\n') # Close file fout.close del(pathG) # End::output_graphmetrics()
rules = range(2**8) n_cells = [12, 14, 16] for rule in rules: for n_cell in n_cells: row = {} row['rule'] = rule row['n_cells'] = n_cell # load the graph STG = nx.read_graphml(snakemake.input.stg_dir + str(rule) + '/' + str(n_cell) + '_cells.graphml') # number and lengths of attractors row['m_attractors'] = nx.number_attracting_components(STG) cycles = nx.simple_cycles(STG) cycle_lens = [len(c) for c in cycles] row['max_period'] = np.max(cycle_lens) # transient lengths t_len = [] cycles = nx.simple_cycles(STG) # need to do this for each attractor for cycle in cycles: # set to ignore cyc_nodes = set(cycle) for node in cycle:
import networkx as nx import sys rule = int(sys.argv[1]) edgelist = 'data/eca/stgs/rule_' + str(rule) + '.edgelist' STG = nx.read_edgelist(edgelist, create_using=nx.DiGraph) # number of attractors n_attrs = nx.number_attracting_components(STG) # we need to get a measure of period length for every state in the STG so that # we can sample from it later c_len = [] CC = [STG.subgraph(c).copy() for c in nx.weakly_connected_components(STG)] for i, cc in enumerate(CC): period = len(next(nx.simple_cycles(cc))) c_len.extend([period] * len(cc)) # transient lengths t_len = [] cycles = nx.simple_cycles(STG) # need to do this for each attractor for cycle in cycles: # set to ignore cyc_nodes = set(cycle) for node in cycle: # initialize the queue and our distance counter queue = [node] level = {node: 0} # keep going til the queue is done