def test_edge_mapping_toy(self): uv_ids = np.array([[0, 1], [0, 2], [1, 2], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 6], [4, 5]], dtype='int64') # node labeling 0 1 2 3 4 5 6 node_labeling = np.array([0, 1, 0, 2, 1, 2, 3], dtype='uint64') self.assertEqual(len(node_labeling), len(np.unique(uv_ids))) edge_mapping = nt.EdgeMapping(uv_ids, node_labeling) new_uv_ids = edge_mapping.newUvIds() new_uv_ids_exp = np.array([[0, 1], [0, 2], [0, 3], [1, 2], [2, 3]], dtype='uint64') self.assertEqual(new_uv_ids.shape, new_uv_ids_exp.shape) self.assertTrue((new_uv_ids == new_uv_ids_exp).all()) # test edge mappings # mapping # edge 0: 0 -> 1 == 0 -> 1 : 0 # edge 1: 0 -> 2 == 0 -> 0 : Null # edge 2: 1 -> 2 == 1 -> 0 : 0 # edge 3: 2 -> 3 == 0 -> 2 : 1 # edge 4: 2 -> 4 == 0 -> 1 : 0 # edge 5: 2 -> 5 == 0 -> 2 : 1 # edge 6: 2 -> 6 == 0 -> 3 : 2 # edge 7: 3 -> 4 == 2 -> 1 : 3 # edge 8: 3 -> 6 == 2 -> 3 : 4 # edge 9: 4 -> 5 == 1 -> 2 : 3 edge_values = np.ones(len(uv_ids), dtype='float32') # sum mapping new_values = edge_mapping.mapEdgeValues(edge_values, "sum") new_values_exp = np.array([3, 2, 1, 2, 1], dtype='float32') self.assertEqual(new_values.shape, new_values_exp.shape) self.assertTrue(np.allclose(new_values, new_values_exp)) # mean mapping new_values = edge_mapping.mapEdgeValues(edge_values, "mean") new_values_exp = np.array([1, 1, 1, 1, 1], dtype='float32') self.assertEqual(new_values.shape, new_values_exp.shape) self.assertTrue(np.allclose(new_values, new_values_exp)) edge_values = np.ones(len(uv_ids), dtype='float32') edge_values[::2] = 0 # min mapping # edges: 0 1 2 3 4 5 6 7 8 9 # values: 0 1 0 1 0 1 0 1 0 1 new_values = edge_mapping.mapEdgeValues(edge_values, "min") new_values_exp = np.array([0, 1, 0, 1, 0], dtype='float32') self.assertEqual(new_values.shape, new_values_exp.shape) self.assertTrue(np.allclose(new_values, new_values_exp)) # max mapping # edges: 0 1 2 3 4 5 6 7 8 9 # values: 0 1 0 1 0 1 0 1 0 1 new_values = edge_mapping.mapEdgeValues(edge_values, "max") new_values_exp = np.array([0, 1, 0, 1, 0], dtype='float32') self.assertEqual(new_values.shape, new_values_exp.shape) self.assertTrue(np.allclose(new_values, new_values_exp))
def orphan_assignments(job_id, config_path): fu.log("start processing job %i" % job_id) fu.log("reading config from %s" % config_path) # get the config with open(config_path) as f: config = json.load(f) # load from config assignment_path = config['assignment_path'] assignment_key = config['assignment_key'] graph_path = config['graph_path'] graph_key = config['graph_key'] output_path = config['output_path'] output_key = config['output_key'] relabel = config['relabel'] n_threads = config.get('threads_per_job', 1) # load the uv-ids and assignments with vu.file_reader(graph_path) as f: ds = f['%s/edges' % graph_key] ds.n_threads = n_threads uv_ids = ds[:] with vu.file_reader(assignment_path) as f: ds = f[assignment_key] ds.n_threads = n_threads chunks = ds.chunks assignments = ds[:] n_new_nodes = int(assignments.max()) + 1 # find the new uv-ids edge_mapping = nt.EdgeMapping(uv_ids, assignments, numberOfThreads=n_threads) new_uv_ids = edge_mapping.newUvIds() # find all orphans = segments that have node degree one ids, node_degrees = np.unique(new_uv_ids, return_counts=True) orphans = ids[node_degrees == 1] n_orphans = len(orphans) fu.log("Found %i orphans of %i clusters" % (n_orphans, n_new_nodes)) # make graph for fast neighbor search graph = nifty.graph.undirectedGraph(n_new_nodes) graph.insertEdges(new_uv_ids) orphan_assignments = np.array([next(graph.nodeAdjacency(orphan_id))[0] for orphan_id in orphans],) assert len(orphan_assignments) == n_orphans, "%i, %i" % (len(orphan_assignments), n_orphans) assignments[orphans] = orphan_assignments.astype('uint64') if relabel: vigra.analysis.relabelConsecutive(assignments, out=assignments, start_label=1, keep_zeros=True) with vu.file_reader(output_path) as f: ds = f.require_dataset(output_key, shape=assignments.shape, chunks=chunks, compression='gzip', dtype='uint64') ds[:] = assignments fu.log_job_success(job_id)
def get_new_edges(uv_ids, node_labeling, costs, cost_accumulation, n_threads): edge_mapping = nt.EdgeMapping(uv_ids, node_labeling, numberOfThreads=n_threads) new_uv_ids = edge_mapping.newUvIds() edge_labeling = edge_mapping.edgeMapping() new_costs = edge_mapping.mapEdgeValues(costs, cost_accumulation, numberOfThreads=n_threads) assert len(new_uv_ids) == len(new_costs) assert len(edge_labeling) == len(uv_ids) return new_uv_ids, edge_labeling, new_costs
def _get_new_edges(uv_ids, node_labeling, costs, accumulation_method, n_threads): edge_mapping = nt.EdgeMapping(uv_ids, node_labeling, numberOfThreads=n_threads) new_uv_ids = edge_mapping.newUvIds() edge_labeling = edge_mapping.edgeMapping() new_costs = edge_mapping.mapEdgeValues(costs, accumulation_method, numberOfThreads=n_threads) assert new_uv_ids.max() <= node_labeling.max(), "%i, %i" % ( new_uv_ids.max(), node_labeling.max()) assert len(new_uv_ids) == len(new_costs) assert len(edge_labeling) == len(uv_ids) return new_uv_ids, edge_labeling, new_costs