def test_slow_hafnian_four(self): """ Tests slow hafnian function against four-by-four graphs """ for i in range(1): # how many tests to perform graph = gnp_random_graph(10, 0.5) nx.draw(graph) plt.show() # displays graph (cumbersome for large tests) adj = to_numpy_array(graph) # create adjacency matrix walrus = thewalrus.hafnian(adj) # calculate hafnian with Xanadu library haf = hafnian.slow_hafnian(adj) # calculate my hafnian self.assertEqual(walrus, haf) # compare!
def __init__(self, raw_data): # read raw data to graph f_handle = open(raw_data, 'r') V, E = f_handle.readline()[:-1].split(' ') self.V, self.E = int(V), int(E) self.G = nx.Graph() # loop through edge list for l in range(self.E): i,j = f_handle.readline()[:-1].split(' ') i,j = int(i),int(j) self.G.add_edge(i, j) # convert adjacent matrix to sparse matrix self.adj_mat = to_numpy_array(self.G) # assert adj matrix assert self.adj_mat.shape == (self.V, self.V) assert self.G.number_of_edges() == self.E
import networkx as nx import matplotlib.pyplot as plt from networkx.convert import to_dict_of_lists, to_edgelist from networkx.convert_matrix import to_numpy_array """ https://networkx.org/documentation/stable/reference/convert.html """ n_nodes = 20 n_edges = 40 G = nx.generators.random_graphs.gnm_random_graph(n_nodes, n_edges) print("G nodes") print(G.nodes) for node in G.nodes: print(node) print("\nG edges") print(G.edges) print("\nG as dictionary of lists") print(to_dict_of_lists(G)) print("\nG as a numpy array") print(to_numpy_array(G))
'GNN_layer_activation': tf.nn.relu, 'mlp_activation': tf.nn.relu, 'pairs_sample_size': 50, 'margin': 1 } files = ["email.txt"] for file in files: file_tokens = file.split(".") file_name = file_tokens[0] file_type = file_tokens[1] g = get_graph("graphs/" + file, file_type) info_dict = get_bc_info(g) adj_matrix = to_numpy_array(g) adj_matrix_t = np.transpose(adj_matrix) print(adj_matrix.shape) flat_adj_matrix = np.ndarray.flatten(adj_matrix) flat_adj_matrix_t = np.ndarray.flatten(adj_matrix_t) real_BCs = np.asarray(list(info_dict['BC_dict'].values())) optimizer = keras.optimizers.Adam(params['learning_rate']) gnn = GNN_BC(flat_adj_matrix.shape[0], adj_matrix.shape[0], params, real_BCs) gnn.compile(optimizer) gnn.fit(flat_adj_matrix, real_BCs,
# Update dist value of the adjacent vertices # of the picked vertex only if the current # distance is greater than new distance and # the vertex in not in the shotest path tree for v in range(self.V): if self.graph[u][v] > 0 and sptSet[ v] == False and dist[v] > dist[u] + self.graph[u][v]: dist[v] = dist[u] + self.graph[u][v] # self.printSolution(dist) # Driver program lst = [10, 50, 100, 500, 1000, 5000, 10000] for i in lst: print(i) a = nx.fast_gnp_random_graph(i, 0.5) a = nm.to_numpy_array(a) g = Graph(i) g.graph = a.tolist() start = timeit.default_timer() g.dijkstra(0) stop = timeit.default_timer() print('Time: ', stop - start, "\n") # This code is contributed by Divyanshu Mehta