def generate_graph_children(g: nx.Graph): # Inicia as listas de pares: [aresta acrescentada, impacto de wiener]. # O primeiro elemento e [grafo original, 0]. g_comp = nx.complement(g) edges_in_comp = list(nx.edges(g_comp)) # Cria os dicionarios e processa o grafo original calc = WienerCalculator(g) wiener_impact_dict = {} wiener_impact_dict[-1] = calc.average_edge_wiener_impact graph_g6_dict = {} graph_g6_dict[-1] = nx.to_graph6_bytes(g, header=False) # Processa todos os grafos de adicao de uma unica aresta for i, e in enumerate(edges_in_comp): aux_graph = deepcopy(g) aux_graph.add_edges_from([e]) calc = WienerCalculator(aux_graph) wiener_impact_dict[i] = calc.average_edge_wiener_impact graph_g6_dict[i] = nx.to_graph6_bytes(aux_graph, header=False) return graph_g6_dict, wiener_impact_dict
def generate_graph_generator(self, gen_name, vertex_num, graphs_num, file_name): graphs_list_as_g6 = b"" stat_list = [] with open(file_name + '.csv', 'w', newline='') as stat_file, open(file_name + '.g6', 'wb') as graph_file: stat_file_writer = csv.writer(stat_file, delimiter=',') for i in range(graphs_num): if (i + 1) % self.bunchsize == 0: stat_file_writer.writerows(stat_list) graph_file.write(graphs_list_as_g6) graphs_list_as_g6 = b"" stat_list = [] g = self.ana_tool.get_graph_by_gen(gen_name, vertex_num) graphs_list_as_g6 += nx.to_graph6_bytes(g) stat_list.append( self.ana_tool.standardized_data_analysis(g, vertex_num)) stat_file_writer.writerows(stat_list) graph_file.write(graphs_list_as_g6)
def all_graphs(d): header = True ra = list(red_adjacencies(d)) ba = list(blue_adjacencies(d)) canvas = np.zeros((2 * d + 1, 2 * d + 1)) canvas[d:2 * d][:, 0:d] += np.identity(d) canvas[d:2 * d][:, 2 * d] += 1 canvas += canvas.T stderr.write('%d possibilities\n' % (len(ra) * len(ba))) for r in tqdm(ra): for b in ba: if np.sum(r * b) < 0.5: canvas[0:d][:, 0:d] = r canvas[d:2 * d][:, d:2 * d] = b yield nx.to_graph6_bytes(nx.from_numpy_matrix( np.array(canvas, dtype=int)), header=header) header = False
def main(): # Generate a random graph G G = nx.fast_gnp_random_graph(N, P, directed=False) old_component = None # Connect all components for current_component in nx.connected_components(G): if old_component is not None: G.add_edge(next(iter(old_component)), next(iter(current_component))) old_component = current_component print("Generated graph: ") print("Number nodes: ", G.number_of_nodes()) print("Number edges: ", G.number_of_edges()) print("Graph6 Format: ") print(nx.to_graph6_bytes(G, header=False).decode('utf-8')) if TEST_APPROXIMATE: # Initialize Profiler pr = cProfile.Profile() # Calculate the upper bound to the treewidth decomp, tree_width = pr.runcall(treewidth_decomp, G, min_degree_heuristic) print("Approximate Treewidth: ", tree_width) # Print profiler statistics pr.print_stats(sort="cumtime") if TEST_EXACT: # Initialize Profiler pr = cProfile.Profile() # Calculate the exact treewidth tree_width = pr.runcall(exact_tree_width, G) print("Exact Treewidth: ", tree_width) # Print profiler statistics pr.print_stats(sort="cumtime")
for i in range(len(combs)): for pos in combs[i]: comb_vecs[i, pos] = 1 for head in header: for first in comb_vecs: for second in comb_vecs: for third in comb_vecs: yield np.concatenate((top, head, first, second, third)) filepath = sys.argv[1] #save input filepath with open(filepath, 'rb') as fp: #create output file with open(filepath + '.out', 'wb') as op: #open input file s = fp.readline() while s: mat_inv = stringtomat(s.strip()) cache_size = 0 gen = vecgen() for idx, v in enumerate(gen): results = checkVert(v, mat_inv) if not np.array_equiv(results[0], np.array([2.])): verts[cache_size], vecs_prod[cache_size] = results cache_size += 1 if idx % 10000 == 0: print(idx, '/', 1 << N, '\t', round(idx * 100 / (1 << N), 10), '%', '\t', cache_size, 'hits') A = constructGraph(cache_size, vecs_prod, verts) op.write(nx.to_graph6_bytes(nx.Graph(A), header=False)) s = fp.readline()
def to_graph6(graph: nx.Graph) -> str: """Convert a networkx graph to a string in to a graph6 format""" # Networkx makes this surprisingly tricky. return nx.to_graph6_bytes(graph)[10:-1].decode("utf-8")
mo = [i for (i, j) in hd.items() if j == 2] middle12 = {v: k for (k, v) in outers.iteritems() if k not in mo} new10 = [(middle12[x], middle12[y]) for (x, y) in G.subgraph(list(middle12)).edges()] new10 = {(i + len(G)): j for (i, j) in enumerate(new10)} G.add_nodes_from(list(new10)) G.add_edges_from([(x, z) for (x, y) in new10.iteritems() for z in y]) forbidden = [(k, l) for (k, v) in new10.iteritems() for (l, w) in new10.iteritems() if (len(set(v + w)) != 4)] forbidden = set([t for t in forbidden if (t[0] < t[1])]) invols = list(involutions(list(new10), forbidden)) return (G, invols) if __name__ == '__main__': header = True for c in tqdm(get_cases()): G, invols = subdivide_case(c) for iv in invols: H = G.copy() H.add_edges_from(iv) stdout.write(nx.to_graph6_bytes(H, header=header)) header = False