def get_final_graph(self, graph_path, scale, n_blocks): ndist.mergeSubgraphs(graph_path, blockPrefix="sub_graphs/s%i/block_" % scale, blockIds=list(range(n_blocks)), outKey='graph', numberOfThreads=8) graph = self.load_graph(graph_path, 'graph') return graph
def merge_block(block_id): block = blocking.getBlock(block_id) output_key = 'sub_graphs/s%i/block_%i' % (scale, block_id) block_list = previous_blocking.getBlockIdsInBoundingBox( roiBegin=block.begin, roiEnd=block.end, blockHalo=[0, 0, 0]) ndist.mergeSubgraphs(graph_path, blockPrefix="sub_graphs/s%i/block_" % (scale - 1), blockIds=block_list.tolist(), outKey=output_key)
def _merge_graph(graph_path, output_key, scale, block_list, blocking, shape, n_threads): block_prefix = 's%i/sub_graphs/block_' % scale ndist.mergeSubgraphs(graph_path, blockPrefix=block_prefix, blockIds=block_list, outKey=output_key, numberOfThreads=n_threads) with vu.file_reader(graph_path) as f: f[output_key].attrs['shape'] = shape
def _merge_graph(graph_path, output_key, scale, block_list, blocking, shape, n_threads): subgraph_key = 's%i/sub_graphs' % scale ndist.mergeSubgraphs(graph_path, subgraphKey=subgraph_key, blockIds=block_list, outKey=output_key, numberOfThreads=n_threads, serializeToVarlen=False) with vu.file_reader(graph_path) as f: f[output_key].attrs['shape'] = shape
def merge_subblocks(block_id, blocking, previous_blocking, graph_path, scale): t0 = time.time() block = blocking.getBlock(block_id) input_key = 'sub_graphs/s%i/block_' % (scale - 1, ) output_key = 'sub_graphs/s%i/block_%i' % (scale, block_id) block_list = previous_blocking.getBlockIdsInBoundingBox( roiBegin=block.begin, roiEnd=block.end, blockHalo=[0, 0, 0]) ndist.mergeSubgraphs(graph_path, blockPrefix=input_key, blockIds=block_list.tolist(), outKey=output_key) return time.time() - t0
def _merge_subblocks(block_id, blocking, previous_blocking, graph_path, scale): fu.log("start processing block %i" % block_id) block = blocking.getBlock(block_id) input_key = 'sub_graphs/s%i/block_' % (scale - 1, ) output_key = 'sub_graphs/s%i/block_%i' % (scale, block_id) block_list = previous_blocking.getBlockIdsInBoundingBox( roiBegin=block.begin, roiEnd=block.end, blockHalo=[0, 0, 0]) ndist.mergeSubgraphs(graph_path, blockPrefix=input_key, blockIds=block_list.tolist(), outKey=output_key) # log block success fu.log_block_success(block_id)
def _merge_subblocks(block_id, blocking, previous_blocking, graph_path, output_key, scale): fu.log("start processing block %i" % block_id) block = blocking.getBlock(block_id) input_key = 's%i/sub_graphs' % (scale - 1, ) block_list = previous_blocking.getBlockIdsInBoundingBox( roiBegin=block.begin, roiEnd=block.end, blockHalo=[0, 0, 0]) ndist.mergeSubgraphs(graph_path, subgraphKey=input_key, blockIds=block_list.tolist(), outKey=output_key, serializeToVarlen=True) # log block success fu.log_block_success(block_id)
def compute_region_graph(labels_path, labels_key, blocks, graph_path): n_blocks0, shape = subgraphs_from_blocks(labels_path, labels_key, blocks, graph_path) n_blocks1 = merge_subgraphs('./graph.n5', 1, blocks, shape) block_list0 = list(range(n_blocks0)) block_list1 = list(range(n_blocks1)) ndist.mergeSubgraphs(graph_path, 'sub_graphs/s1/block_', block_list1, "graph") ndist.mapEdgeIds(graph_path, 'graph', 'sub_graphs/s0/block_', block_list0) ndist.mapEdgeIds(graph_path, 'graph', 'sub_graphs/s1/block_', block_list1) z5py.File('./graph.n5')['graph'].attrs['shape'] = shape
def compute_graph(self): halo = [1, 1, 1] f_graph = z5py.File(self.graph_path, use_zarr_format=False) f_graph.create_group('sub_graphs/s0') def extract_subgraph(block_id): block = self.blocking.getBlockWithHalo(block_id, halo) outer_block, inner_block = block.outerBlock, block.innerBlock # we only need the halo into one direction, # hence we use the outer-block only for the end coordinate begin = inner_block.begin end = outer_block.end block_key = 'sub_graphs/s0/block_%i' % block_id ndist.computeMergeableRegionGraph(self.path, self.labels_key, begin, end, self.graph_path, block_key) n_threads = 8 with futures.ThreadPoolExecutor(n_threads) as tp: tasks = [ tp.submit(extract_subgraph, block_id) for block_id in range(self.blocking.numberOfBlocks) ] [t.result() for t in tasks] n_blocks = self.blocking.numberOfBlocks ndist.mergeSubgraphs(self.graph_path, blockPrefix="sub_graphs/s0/block_", blockIds=list(range(n_blocks)), outKey='graph', numberOfThreads=8) ndist.mapEdgeIdsForAllBlocks(self.graph_path, 'graph', blockPrefix='sub_graphs/s0/block_', numberOfBlocks=n_blocks, numberOfThreads=8)
def graph_step3(graph_path, last_scale, initial_block_shape, n_threads): t0 = time.time() factor = 2**last_scale block_shape = [factor * bs for bs in initial_block_shape] f_graph = z5py.File(graph_path) shape = f_graph.attrs['shape'] blocking = nifty.tools.blocking(roiBegin=[0, 0, 0], roiEnd=list(shape), blockShape=block_shape) block_prefix = 'sub_graphs/s%i/block_' % last_scale output_key = 'graph' block_list = list(range(blocking.numberOfBlocks)) ndist.mergeSubgraphs(graph_path, blockPrefix=block_prefix, blockIds=block_list, outKey=output_key, numberOfThreads=n_threads) f_graph[output_key].attrs['shape'] = shape print("Success") print("In %f s" % (time.time() - t0, ))
def merge_graph(graph_path, last_scale, config_file, tmp_folder): t0 = time.time() with open(config_file) as f: config = json.load(f) initial_block_shape = config['block_shape'] n_threads = config['n_threads'] roi = config.get('roi', None) factor = 2**last_scale block_shape = [factor * bs for bs in initial_block_shape] f_graph = z5py.File(graph_path) shape = f_graph.attrs['shape'] blocking = nifty.tools.blocking(roiBegin=[0, 0, 0], roiEnd=list(shape), blockShape=block_shape) if roi is None: block_list = list(range(blocking.numberOfBlocks)) else: block_list = blocking.getBlockIdsOverlappingBoundingBox(roi[0], roi[1], [0, 0, 0]).tolist() block_prefix = 'sub_graphs/s%i/block_' % last_scale output_key = 'graph' ndist.mergeSubgraphs(graph_path, blockPrefix=block_prefix, blockIds=block_list, outKey=output_key, numberOfThreads=n_threads) f_graph[output_key].attrs['shape'] = shape res_file = os.path.join(tmp_folder, 'log_merge_graph.log') with open(res_file, 'w') as f: json.dump({'t': time.time() - t0}, f)