def test_bad_edge_costs_2(self): with self.assertRaises(errors.BadEdgeCostError) as context: edges = [ ("A", "B", 5), ("B", "C", 4.3), ] graph_utils.make_graph(edges) self.assertTrue('must be an integer' in str(context.exception))
def test_good_edge_costs_1(self): from collections import defaultdict edges = [ ("A", "B", 5), ("B", "C", 43), ] graph = graph_utils.make_graph(edges) self.assertTrue(type(graph) == defaultdict)
def setUp(self): edges = [ ("A", "B", 5), ("B", "C", 4), ("C", "D", 8), ("D", "C", 8), ("D", "E", 6), ("A", "D", 5), ("C", "E", 2), ("E", "B", 3), ("A", "E", 7), ] self.graph = graph_utils.make_graph(edges)
def node_out_128(inp_dir, out_dir, out_coor): '''exclude the 128 and -128 values from the node attribute txt files and write new node attribute files''' node_path = inp_dir f = [f for f in listdir(node_path) if isfile(join(node_path, f))] #f = f[0:10] for i in f: print(i) gph = open(node_path + i, 'r') cont = gph.readlines() ls_node, ls_edge = gphtols_view(cont, False) graph = make_graph(ls_node, ls_edge, range(len(ls_node))) nodes = np.asarray(ls_node) wh_128 = np.where(nodes == out_coor) wh_128 = list(wh_128[0]) wh_128n = np.where(nodes == -out_coor) wh_128n = list(wh_128n[0]) full_list = list(set(wh_128) | set(wh_128n)) #union of two lists for j in range(len(full_list)): graph.remove_n(full_list[j]) adj = nx.attr_matrix(graph.get_graph())[0] edges = [] for j in range(adj.shape[0]): for k in range(adj.shape[1]): if adj[j, k] == 1.: edges.append([j, k]) nodes = list( nx.get_node_attributes(graph.get_graph(), name='coor').values()) #print(nodes,edges) write_gph(out_dir + i, nodes, edges)
def createDataRecord(out_filename, addrs_y, img_path, gph_path): array = np.load('./data/numpy_arrays/nodes_out.npy') qt = QuantileTransformer(output_distribution='normal') shob = qt.fit_transform(array) #mean = np.load('./data/numpy_arrays/fixed_node/mean.npy') #std = np.load('./data/numpy_arrays/fixed_node/std.npy') a = np.load('./data/numpy_arrays/range/a.npy') b = np.load('./data/numpy_arrays/range/b.npy') #an = np.load('./data/numpy_arrays/nodes/a.npy') #bn = np.load('./data/numpy_arrays/nodes/b.npy') #num_n = [] writer = tf.io.TFRecordWriter(out_filename) for i in range(len(addrs_y)): print(i) if i == 0: print(addrs_y[i]) img_y = cv2.imread(img_path + str(addrs_y[i])) img_y = img_y / 255 img_y = np.asarray( img_y, dtype=np.float32 ) #all data has to be converted to np.float32 before writing gph = open(gph_path + addrs_y[i].split('.')[0] + '.txt', 'r') cont = gph.readlines() ls_node, ls_edge = gphtols_view(cont, flip=False) if i == 0: print(ls_node) if len(ls_node) == 0: continue #node_attr = np.asarray(ls_node,dtype=np.float32) #print(ls_node) #node_attr = (a*((ls_node - mean)/std))+b node_attr = np.asarray(ls_node, dtype=np.float32) node_attr = qt.transform(node_attr) node_attr = (a * node_attr) + b node_attr = np.asarray(node_attr, dtype=np.float32) #print(node_attr) #ls_node, ls_edge = gphtols(cont) #node = make_gph(ls_node, ls_edge, range(len(ls_node))) graph = make_graph(ls_node, ls_edge, range(len(ls_node))) #num_nodes = graph.get_num_nodes() #num_nodes = np.log(num_nodes) #num_nodes = (an*num_nodes)+bn #num_nodes = np.asarray(num_nodes,dtype=np.float32) adj_mtx = graph.get_adj() adj_mtx = np.asarray(adj_mtx, dtype=np.float32) #num_n.append(num_nodes) if i == 0: print(node_attr, node_attr.shape) print(adj_mtx) #print(num_nodes) print(img_y) feature = { 'image_y': _bytes_feature(img_y.tostring()), 'gph_nodes': _bytes_feature(node_attr.tostring()), 'gph_adj': _bytes_feature(adj_mtx.tostring()) #'gph_node_num' : _bytes_feature(num_nodes.tostring()) } example = tf.train.Example(features=tf.train.Features(feature=feature)) writer.write(example.SerializeToString()) writer.close() sys.stdout.flush()