def test_worker_failure(self): total_roi = daisy.Roi((0,), (100,)) read_roi = daisy.Roi((0,), (5,)) write_roi = daisy.Roi((0,), (3,)) outdir = self.path_to() ret = daisy.run_blockwise( total_roi=total_roi, read_roi=read_roi, write_roi=write_roi, process_function=lambda: self.worker(outdir, fail=16), num_workers=10) outfiles = glob.glob(os.path.join(outdir, '*.block')) block_ids = sorted([ int(path.split('/')[-1].split('.')[0]) for path in outfiles ]) self.assertFalse(ret) expected_block_ids = list(range(32)) expected_block_ids.remove(16) self.assertEqual(block_ids, expected_block_ids)
def test_multidim(self): total_roi = daisy.Roi( (199, -100, -100, -100), (12, 5140, 2248, 2369)) block_write_roi = daisy.Roi( (0, 0, 0, 0), (5, 500, 500, 500)) block_read_roi = block_write_roi.grow( (1, 100, 100, 100), (1, 100, 100, 100)) outdir = self.path_to() ret = daisy.run_blockwise( total_roi, block_read_roi, block_write_roi, process_function=lambda b: self.process_block(outdir, b), num_workers=8, processes=False, fit='shrink') outfiles = glob.glob(os.path.join(outdir, '*.block')) block_ids = sorted([ int(path.split('/')[-1].split('.')[0]) for path in outfiles ]) self.assertTrue(ret) self.assertEqual(len(block_ids), 500)
def parallel_lsd_agglomerate(lsds, fragments, rag_provider, lsd_extractor, block_size, context, num_workers): '''Agglomerate fragments in parallel using only the shape descriptors. Args: lsds (`class:daisy.Array`): An array containing the LSDs. fragments (`class:daisy.Array`): An array containing fragments. rag_provider (`class:SharedRagProvider`): A RAG provider to read nodes from and write found edges to. lsd_extractor (``LsdExtractor``): The local shape descriptor object used to compute the difference between the segmentation and the target LSDs. block_size (``tuple`` of ``int``): The size of the blocks to process in parallel, in world units. context (``tuple`` of ``int``): The context to consider for agglomeration, in world units. num_workers (``int``): The number of parallel workers. Returns: True, if all tasks succeeded. ''' assert fragments.data.dtype == np.uint64 shape = lsds.shape[1:] context = daisy.Coordinate(context) total_roi = lsds.roi.grow(context, context) read_roi = daisy.Roi((0, ) * lsds.roi.dims(), block_size).grow(context, context) write_roi = daisy.Roi((0, ) * lsds.roi.dims(), block_size) return daisy.run_blockwise( total_roi, read_roi, write_roi, lambda b: agglomerate_in_block(lsds, fragments, rag_provider, lsd_extractor, b), lambda b: block_done(b, rag_provider), num_workers=num_workers, read_write_conflict=False, fit='shrink')
def test_negative_offset(self): logger.warning("A warning") total_roi = daisy.Roi( (-100,), (2369,)) block_write_roi = daisy.Roi( (0,), (500,)) block_read_roi = block_write_roi.grow( (100,), (100,)) outdir = self.path_to() ret = daisy.run_blockwise( total_roi, block_read_roi, block_write_roi, process_function=lambda b: self.process_block(outdir, b), num_workers=1, fit='shrink') outfiles = glob.glob(os.path.join(outdir, '*.block')) block_ids = sorted([ int(path.split('/')[-1].split('.')[0]) for path in outfiles ]) self.assertTrue(ret) self.assertEqual(len(block_ids), 5)
def run_test_graph_write_attributes(self, provider_factory): graph_provider = provider_factory('w') graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] graph.add_node(2, comment="without position") graph.add_node(42, position=(1, 1, 1)) graph.add_node(23, position=(5, 5, 5), swip='swap') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), zap='zip') graph.add_edge(42, 23) graph.add_edge(57, 23) graph.add_edge(2, 42) graph.write_nodes(attributes=['position', 'swip']) graph.write_edges() graph_provider = provider_factory('r') compare_graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] nodes = [] for node, data in graph.nodes(data=True): if node == 2: continue if 'zap' in data: del data['zap'] data['position'] = list(data['position']) nodes.append((node, data)) compare_nodes = compare_graph.nodes(data=True) compare_nodes = [(node_id, data) for node_id, data in compare_nodes if len(data) > 0] self.assertCountEqual(nodes, compare_nodes)
def fetch(in_vol, voxel_size, roi_offset, roi_shape, out_file, out_ds, num_workers): total_roi = daisy.Roi((roi_offset), (roi_shape)) read_roi = daisy.Roi((0, ) * 3, (4800, 1280, 1280)) write_roi = read_roi logging.info('Creating out dataset...') raw_out = daisy.prepare_ds(out_file, out_ds, total_roi, voxel_size, dtype=np.uint8, write_roi=write_roi) logging.info('Writing to dataset...') daisy.run_blockwise(total_roi, read_roi, write_roi, process_function=lambda b: fetch_in_block( b, voxel_size, in_vol, raw_out), fit='shrink', num_workers=num_workers)
def test_workers_close(tmp_path): set_log_basedir(tmp_path) num_workers = 5 def start_worker(): subprocess.run( [sys.executable, "tests/process_block.py", f"{tmp_path}"]) task = daisy.Task( "test_server_task", total_roi=daisy.Roi((0, ), (42, )), read_roi=daisy.Roi((0, ), (10, )), write_roi=daisy.Roi((1, ), (8, )), process_function=start_worker, check_function=None, read_write_conflict=True, fit="valid", num_workers=num_workers, max_retries=2, timeout=None, ) server = daisy.Server() server.run_blockwise([task]) for i in range(num_workers): with FileLock(f"{tmp_path}/worker_{i}.lock", timeout=0.1): pass
def solve(predict_config, worker_config, data_config, graph_config, solve_config, num_block_workers, block_size, roi_offset, roi_size, context, solve_block, base_dir, experiment, train_number, predict_number, graph_number, solve_number, queue, singularity_container, mount_dirs, **kwargs): source_roi = daisy.Roi(daisy.Coordinate(roi_offset), daisy.Coordinate(roi_size)) solve_setup_dir = os.path.join( os.path.join(base_dir, experiment), "04_solve/setup_t{}_p{}_g{}_s{}".format(train_number, predict_number, graph_number, solve_number)) block_write_roi = daisy.Roi((0, 0, 0), block_size) block_read_roi = block_write_roi.grow(context, context) total_roi = source_roi.grow(context, context) logger.info("Solving in %s", total_roi) daisy.run_blockwise( total_roi, block_read_roi, block_write_roi, process_function=lambda: start_worker(predict_config, worker_config, data_config, graph_config, solve_config, queue, singularity_container, mount_dirs, solve_block, solve_setup_dir), num_workers=num_block_workers, fit='shrink') logger.info("Finished solving, parameters id is %s", solve_number)
def test_graph_read_unbounded_roi(self): graph_provider = self.get_mongo_graph_provider('w') roi = daisy.Roi((0, 0, 0), (10, 10, 10)) unbounded_roi = daisy.Roi((None, None, None), (None, None, None)) graph = graph_provider[roi] graph.add_node(2, position=(2, 2, 2), selected=True, test='test') graph.add_node(42, position=(1, 1, 1), selected=False, test='test2') graph.add_node(23, position=(5, 5, 5), selected=True, test='test2') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), selected=True, test='test') graph.add_edge(42, 23, selected=False, a=100, b=3) graph.add_edge(57, 23, selected=True, a=100, b=2) graph.add_edge(2, 42, selected=True, a=101, b=3) graph.write_nodes() graph.write_edges() graph_provider = self.get_mongo_graph_provider('r+') limited_graph = graph_provider.get_graph(unbounded_roi, node_attrs=['selected'], edge_attrs=['c']) seen = [] for node, data in limited_graph.nodes(data=True): self.assertFalse('test' in data) self.assertTrue('selected' in data) data['selected'] = True seen.append(node) self.assertCountEqual(seen, [2, 42, 23, 57])
def run_test_graph_io(self, provider_factory): graph_provider = provider_factory('w') graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] graph.add_node(2, comment="without position") graph.add_node(42, position=(1, 1, 1)) graph.add_node(23, position=(5, 5, 5), swip='swap') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), zap='zip') graph.add_edge(42, 23) graph.add_edge(57, 23) graph.add_edge(2, 42) graph.write_nodes() graph.write_edges() graph_provider = provider_factory('r') compare_graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] nodes = sorted(list(graph.nodes())) nodes.remove(2) # node 2 has no position and will not be queried compare_nodes = sorted(list(compare_graph.nodes())) edges = sorted(list(graph.edges())) edges.remove((2, 42)) # node 2 has no position and will not be queried compare_edges = sorted(list(compare_graph.edges())) self.assertEqual(nodes, compare_nodes) self.assertEqual(edges, compare_edges)
def run_test_graph_write_roi(self, provider_factory): graph_provider = provider_factory('w') graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] graph.add_node(2, comment="without position") graph.add_node(42, position=(1, 1, 1)) graph.add_node(23, position=(5, 5, 5), swip='swap') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), zap='zip') graph.add_edge(42, 23) graph.add_edge(57, 23) graph.add_edge(2, 42) write_roi = daisy.Roi((0, 0, 0), (6, 6, 6)) graph.write_nodes(roi=write_roi) graph.write_edges(roi=write_roi) graph_provider = provider_factory('r') compare_graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] nodes = sorted(list(graph.nodes())) nodes.remove(2) # node 2 has no position and will not be queried nodes.remove(57) # node 57 is outside of the write_roi compare_nodes = compare_graph.nodes(data=True) compare_nodes = [ node_id for node_id, data in compare_nodes if len(data) > 0 ] compare_nodes = sorted(list(compare_nodes)) edges = sorted(list(graph.edges())) edges.remove((2, 42)) # node 2 has no position and will not be queried compare_edges = sorted(list(compare_graph.edges())) self.assertEqual(nodes, compare_nodes) self.assertEqual(edges, compare_edges)
def test_graph(): graph_provider = daisy.persistence.MongoDbGraphProvider( 'test_daisy_graph', '10.40.4.51', nodes_collection='nodes', edges_collection='edges', mode='w') graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] graph.add_node(2, comment="without position") graph.add_node(42, position=(1, 1, 1)) graph.add_node(23, position=(5, 5, 5), swip='swap') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), zap='zip') graph.add_edge(42, 23) for i in range(10000): graph.add_node(i + 100, position=(random.randint(0, 10), random.randint(0, 10), random.randint(0, 10))) start = time.time() graph.write_nodes() graph.write_edges() print("Wrote graph in %.3fs" % (time.time() - start)) start = time.time() graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] print("Read graph in %.3fs" % (time.time() - start))
def overlap(): run_datetime = datetime.datetime.now( pytz.timezone('US/Eastern')).strftime('%Y%m%dT%H%M%S.%f%z') temp_dir = os.path.join(config.temp_path, run_datetime) os.makedirs(temp_dir) fragments = daisy.open_ds(config.fragments_zarr, config.fragments_ds) groundtruth = daisy.open_ds(config.groundtruth_zarr, config.groundtruth_ds) total_roi = daisy.Roi(offset=config.roi_offset, shape=config.roi_shape) start = time.time() daisy.run_blockwise( total_roi=total_roi, read_roi=daisy.Roi(offset=(0, 0, 0), shape=config.block_size), write_roi=daisy.Roi(offset=(0, 0, 0), shape=config.block_size), process_function=lambda block: overlap_in_block( block=block, fragments=fragments, groundtruth=groundtruth, tmp_path=temp_dir), fit='shrink', num_workers=config.num_workers, read_write_conflict=False, max_retries=1) logger.info( f"Blockwise overlapping of fragments and ground truth in {time.time() - start:.3f}s") logger.debug( f"num blocks: {np.prod(np.ceil(np.array(config.roi_shape) / np.array(config.block_size)))}") frag_to_gt = overlap_reduce(tmp_path=temp_dir) pickle.dump(frag_to_gt, open(os.path.join( temp_dir, 'frag_to_gt.pickle'), 'wb')) return frag_to_gt
def relabel_connected_components(array_in, array_out, block_size, num_workers): '''Relabel connected components in an array in parallel. Args: array_in (``daisy.Array``): The array to relabel. array_out (``daisy.Array``): The array to write to. Should initially be empty (i.e., all zeros). block_size (``daisy.Coordinate``): The size of the blocks to relabel in, in world units. num_workers (``int``): The number of workers to use. ''' write_roi = daisy.Roi((0, ) * len(block_size), block_size) read_roi = write_roi.grow(array_in.voxel_size, array_in.voxel_size) total_roi = array_in.roi.grow(array_in.voxel_size, array_in.voxel_size) num_voxels_in_block = (read_roi / array_in.voxel_size).size() with tempfile.TemporaryDirectory() as tmpdir: daisy.run_blockwise( total_roi, read_roi, write_roi, process_function=lambda b: find_components_in_block( array_in, array_out, num_voxels_in_block, b, tmpdir), num_workers=num_workers, fit='shrink') nodes, edges = read_cross_block_merges(tmpdir) components = find_components(nodes, edges) logger.debug("Num nodes: %s", len(nodes)) logger.debug("Num edges: %s", len(edges)) logger.debug("Num components: %s", len(components)) write_roi = daisy.Roi((0, ) * len(block_size), block_size) read_roi = write_roi total_roi = array_in.roi daisy.run_blockwise(total_roi, read_roi, write_roi, process_function=lambda b: relabel_in_block( array_out, nodes, components, b), num_workers=num_workers, fit='shrink')
def extract_edges_blockwise(db_host, db_name, sample, edge_move_threshold, block_size, num_workers, frames=None, frame_context=1, data_dir='../01_data', use_pv_distance=False, **kwargs): voxel_size, source_roi = get_source_roi(data_dir, sample) # limit to specific frames, if given if frames: begin, end = frames begin -= frame_context end += frame_context crop_roi = daisy.Roi((begin, None, None, None), (end - begin, None, None, None)) source_roi = source_roi.intersect(crop_roi) # block size in world units block_write_roi = daisy.Roi((0, ) * 4, daisy.Coordinate(block_size)) pos_context = daisy.Coordinate((0, ) + (edge_move_threshold, ) * 3) neg_context = daisy.Coordinate((1, ) + (edge_move_threshold, ) * 3) logger.debug("Set neg context to %s", neg_context) input_roi = source_roi.grow(neg_context, pos_context) block_read_roi = block_write_roi.grow(neg_context, pos_context) print("Following ROIs in world units:") print("Input ROI = %s" % input_roi) print("Block read ROI = %s" % block_read_roi) print("Block write ROI = %s" % block_write_roi) print("Output ROI = %s" % source_roi) print("Starting block-wise processing...") # process block-wise daisy.run_blockwise(input_roi, block_read_roi, block_write_roi, process_function=lambda b: extract_edges_in_block( db_name, db_host, edge_move_threshold, b, use_pv_distance=use_pv_distance), check_function=lambda b: check_function( b, 'extract_edges', db_name, db_host), num_workers=num_workers, processes=True, read_write_conflict=False, fit='shrink')
def test_intersect(self): a = daisy.Array( np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1)) b = a.intersect(daisy.Roi((1, 1), (10, 10))) assert b.roi == daisy.Roi((1, 1), (1, 4)) np.testing.assert_array_equal(b.to_ndarray(), [[6, 7, 8, 9]])
def parse_rois(block_offsets, block_shapes, padded_offsets=None, padded_shapes=None): sub_blocks_per_block = [3, 3, 3] edge_index_list = [] edge_attr_list = [] pos_list = [] node_ids_list = [] mask_list = [] y_list = [] for i in range(len(block_offsets)): logger.info('read block {} ...'.format(i)) # print('block_offset {} block_shape {}'.format( # block_offsets[i], block_shapes[i])) # Load the padded block if given if padded_offsets and padded_shapes: roi = daisy.Roi(list(padded_offsets[i]), list(padded_shapes[i])) else: roi = daisy.Roi(list(block_offsets[i]), list(block_shapes[i])) # node_attrs, edge_attrs = graph_provider.read_blockwise( # roi=roi, block_size=daisy.Coordinate((block_shape_default / # sub_blocks_per_block).astype(int)), # num_workers=config['num_workers']) node_attrs = graph_provider.read_nodes(roi=roi) edge_attrs = graph_provider.read_edges(roi=roi, nodes=node_attrs) # print('prepare block {} ...'.format(i)) if len(node_attrs) == 0: raise ValueError('No nodes found in roi %s' % roi) if len(edge_attrs) == 0: raise ValueError('No edges found in roi %s' % roi) edge_index, edge_attr, pos, node_ids, mask, y = parse_rag_excerpt( node_attrs, edge_attrs) if padded_offsets and padded_shapes: mask = mask_target_edges(edge_index_padded=edge_index, node_ids_padded=node_ids, inner_roi=daisy.Roi( list(block_offsets[i]), list(block_shapes[i])), mask=mask) mask_list.append(mask) edge_index_list.append(edge_index) edge_attr_list.append(edge_attr) pos_list.append(pos) node_ids_list.append(node_ids) y_list.append(y) logger.info("Parse set of ROIs in %.3fs" % (time.time() - start)) return edge_index_list, edge_attr_list, pos_list, node_ids_list, mask_list, y_list
def prepare_for_fragments(self): '''Get the fragment ID for each site in site_ids.''' logging.info(f"Preparing evaluation for fragments in " f"{self.fragments_file}...") if not os.path.exists(self.site_fragment_lut_directory): logging.info("site-fragment LUT does not exist, creating it...") os.makedirs(self.site_fragment_lut_directory) daisy.run_blockwise(self.roi, daisy.Roi((0, 0, 0), (9000, 9000, 9000)), daisy.Roi((0, 0, 0), (9000, 9000, 9000)), lambda b: self.store_lut_in_block(b), num_workers=48, fit='shrink') else: logging.info( "site-fragment LUT already exists, skipping preparation") logging.info("Reading site-fragment LUTs from " f"{self.site_fragment_lut_directory}...") lut_files = glob.glob( os.path.join(self.site_fragment_lut_directory, '*.npz')) site_fragment_lut = np.concatenate( [np.load(f)['site_fragment_lut'] for f in lut_files], axis=1) self.num_bg_sites = int( np.sum([np.load(f)['num_bg_sites'] for f in lut_files])) assert site_fragment_lut.dtype == np.uint64 logging.info( f"Found {len(site_fragment_lut[0])} sites in site-fragment LUT") # convert to dictionary site_fragment_lut = { site: fragment for site, fragment in zip(site_fragment_lut[0], site_fragment_lut[1]) } # create fragment ID array congruent to site_ids self.site_fragment_ids = np.array([ site_fragment_lut[s] if s in site_fragment_lut else 0 for s in self.site_ids ], dtype=np.uint64)
def test_graph_nonmatching_meta_values(self): roi = daisy.Roi((0, 0, 0), (10, 10, 10)) roi2 = daisy.Roi((1, 0, 0), (10, 10, 10)) self.get_mongo_graph_provider('w', True, None) with self.assertRaises(ValueError): self.get_mongo_graph_provider('r', False, None) self.get_mongo_graph_provider('w', None, roi) with self.assertRaises(ValueError): self.get_mongo_graph_provider('r', None, roi2)
def extract_edges( db_host, db_name, soft_mask_container, soft_mask_dataset, roi_offset, roi_size, distance_threshold, block_size, num_block_workers, graph_number, **kwargs): # Define Rois: source_roi = daisy.Roi(roi_offset, roi_size) block_write_roi = daisy.Roi( (0,) * 3, daisy.Coordinate(block_size)) pos_context = daisy.Coordinate((distance_threshold,)*3) neg_context = daisy.Coordinate((distance_threshold,)*3) logger.debug("Set pos context to %s", pos_context) logger.debug("Set neg context to %s", neg_context) input_roi = source_roi.grow(neg_context, pos_context) block_read_roi = block_write_roi.grow(neg_context, pos_context) logger.info("Following ROIs in world units:") logger.info("Input ROI = %s" % input_roi) logger.info("Block read ROI = %s" % block_read_roi) logger.info("Block write ROI = %s" % block_write_roi) logger.info("Output ROI = %s" % source_roi) logger.info("Starting block-wise processing...") # process block-wise daisy.run_blockwise( input_roi, block_read_roi, block_write_roi, process_function=lambda b: extract_edges_in_block( db_name, db_host, soft_mask_container, soft_mask_dataset, distance_threshold, graph_number, b), num_workers=num_block_workers, processes=True, read_write_conflict=False, fit='shrink')
def prepare(self): total_roi = daisy.Roi((0, ), (10, )) read_roi = daisy.Roi((0, ), (1, )) write_roi = daisy.Roi((0, ), (1, )) self.schedule( total_roi, read_roi, write_roi, process_function=TestMultipleTasks.process_block_null, max_retries=0, fit='shrink')
def prepare(self): total_roi = daisy.Roi((20, ), (10, )) read_roi = daisy.Roi((0, ), (1, )) write_roi = daisy.Roi((0, ), (1, )) self.schedule( total_roi, read_roi, write_roi, process_function=lambda: TestMultipleTasks.worker(self.outdir), max_retries=0, fit='shrink')
def test_materialize(self): a = daisy.Array( np.arange(0, 10).reshape(2, 5), daisy.Roi((0, 0), (2, 5)), (1, 1)) b = a[daisy.Roi((0, 0), (2, 2))] # underlying data did not change assert a.data.shape == b.data.shape assert b.shape == (2, 2) b.materialize() assert b.shape == (2, 2) assert b.data.shape == (2, 2)
def overlay_segmentation(db_host, db_name, roi_offset, roi_size, selected_attr, solved_attr, edge_collection, segmentation_container, segmentation_dataset, segmentation_number, voxel_size=(40, 4, 4)): graph_provider = MongoDbGraphProvider(db_name, db_host, directed=False, position_attribute=['z', 'y', 'x'], edges_collection=edge_collection) graph_roi = daisy.Roi(roi_offset, roi_size) segmentation = daisy.open_ds(segmentation_container, segmentation_dataset) intersection_roi = segmentation.roi.intersect(graph_roi).snap_to_grid( voxel_size) nx_graph = graph_provider.get_graph(intersection_roi, nodes_filter={selected_attr: True}, edges_filter={selected_attr: True}) for node_id, data in nx_graph.nodes(data=True): node_position = daisy.Coordinate((data["z"], data["y"]), data["x"]) nx_graph.nodes[node_id]["segmentation_{}".format( segmentation_number)] = segmentation[node_position] graph_provider.write_nodes(intersection_roi)
def run_test_graph_connected_components(self, provider_factory): graph_provider = provider_factory('w') graph = graph_provider[daisy.Roi((0, 0, 0), (10, 10, 10))] graph.add_node(2, comment="without position") graph.add_node(42, position=(1, 1, 1)) graph.add_node(23, position=(5, 5, 5), swip='swap') graph.add_node(57, position=daisy.Coordinate((7, 7, 7)), zap='zip') graph.add_edge(57, 23) graph.add_edge(2, 42) components = graph.get_connected_components() self.assertEqual(len(components), 2) c1, c2 = components n1 = sorted(list(c1.nodes())) n2 = sorted(list(c2.nodes())) compare_n1 = [2, 42] compare_n2 = [23, 57] if 2 in n2: temp = n2 n2 = n1 n1 = temp self.assertCountEqual(n1, compare_n1) self.assertCountEqual(n2, compare_n2)
def read_data_config(data_config): config = configparser.ConfigParser() config.read(data_config) cfg_dict = {} # Data cfg_dict["sample"] = config.get("Data", "sample") # Don't think I need these offset = config.get("Data", "roi_offset") size = config.get("Data", "roi_size") cfg_dict["roi_offset"] = daisy.Coordinate( tuple(int(x) for x in offset.split(", ")) if not offset == "None" else [None] * 3 ) cfg_dict["roi_size"] = daisy.Coordinate( tuple(int(x) for x in size.split(", ")) if not size == "None" else [None] * 3 ) cfg_dict["roi"] = daisy.Roi(cfg_dict["roi_offset"], cfg_dict["roi_size"]) cfg_dict["location_attr"] = config.get("Data", "location_attr") cfg_dict["penalty_attr"] = config.get("Data", "penalty_attr") cfg_dict["target_edge_len"] = int(config.get("Data", "target_edge_len")) # Database cfg_dict[ "consensus_db" ] = f"mouselight-{cfg_dict['sample']}-{config.get('Data', 'consensus_db')}" cfg_dict[ "subdivided_db" ] = f"mouselight-{cfg_dict['sample']}-{config.get('Data', 'subdivided_db')}" cfg_dict["db_host"] = config.get("Data", "db_host") return cfg_dict
def setUp(self): # y: 1| 0(s)------1(s)------2(s) # 2| # 3| 3(s) 6(s) # 4| | | # 5| 4(s)-(ns)-9(s)-(ns)-7(s) # 6| | | # 7| 5(s) 8(s) # |--------------------------------> # x: 1 2 3 # # s = selected # ns = not selected self.nodes = [ {'id': 0, 'z': 1, 'y': 1, 'x': 1, 'selected': True, 'solved': True}, {'id': 1, 'z': 1, 'y': 1, 'x': 2, 'selected': True, 'solved': True}, {'id': 2, 'z': 1, 'y': 1, 'x': 3, 'selected': True, 'solved': True}, {'id': 3, 'z': 1, 'y': 3, 'x': 1, 'selected': True, 'solved': True}, {'id': 4, 'z': 1, 'y': 5, 'x': 1, 'selected': True, 'solved': True}, {'id': 5, 'z': 1, 'y': 7, 'x': 1, 'selected': True, 'solved': True}, {'id': 6, 'z': 1, 'y': 3, 'x': 3, 'selected': True, 'solved': True}, {'id': 7, 'z': 1, 'y': 5, 'x': 3, 'selected': True, 'solved': True}, {'id': 8, 'z': 1, 'y': 7, 'x': 3, 'selected': True, 'solved': True}, {'id': 9, 'z': 1, 'y': 5, 'x': 2, 'selected': True, 'solved': True} ] self.edges = [{'u': 0, 'v': 1, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 1, 'v': 2, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 3, 'v': 4, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 4, 'v': 5, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 6, 'v': 7, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 7, 'v': 8, 'evidence': 0.5, 'selected': True, 'solved': True}, {'u': 4, 'v': 9, 'evidence': 0.5, 'selected': False, 'solved': False}, {'u': 7, 'v': 9, 'evidence': 0.5, 'selected': False, 'solved': False} ] #self.nodes = self.nodes[:3] self.db_name = 'micron_test_solver' config = configparser.ConfigParser() config.read(os.path.expanduser("../mongo.ini")) self.db_host = "mongodb://{}:{}@{}:{}".format(config.get("Credentials", "user"), config.get("Credentials", "password"), config.get("Credentials", "host"), config.get("Credentials", "port")) self.graph_provider = MongoDbGraphProvider(self.db_name, self.db_host, mode='w', position_attribute=['z', 'y', 'x']) self.roi = daisy.Roi((0,0,0), (4,4,4)) self.graph = self.graph_provider[self.roi] self.graph.add_nodes_from([(node['id'], node) for node in self.nodes]) self.graph.add_edges_from([(edge['u'], edge['v'], edge) for edge in self.edges]) self.solve_params = {"graph": self.graph, "evidence_factor": 12, "comb_angle_factor": 14, "start_edge_prior": 180, "selection_cost": -80}
def getTrack1(self): cells = [ (1, { 't': 0, 'z': 0, 'y': 0, 'x': 0 }), (2, { 't': 1, 'z': 0, 'y': 0, 'x': 0 }), (3, { 't': 2, 'z': 0, 'y': 0, 'x': 0 }), (4, { 't': 3, 'z': 0, 'y': 0, 'x': 0 }), ] edges = [(2, 1), (3, 2), (4, 3)] roi = daisy.Roi((0, 0, 0, 0), (4, 4, 4, 4)) return cells, edges, roi
def test_basic(self): task = daisy.Task('test_server_task', total_roi=daisy.Roi((0, ), (100, )), read_roi=daisy.Roi((0, ), (10, )), write_roi=daisy.Roi((1, ), (8, )), process_function=lambda b: self.process_block(b), check_function=None, read_write_conflict=True, fit='valid', num_workers=1, max_retries=2, timeout=None) server = daisy.Server() server.run_blockwise([task])
def get_array(data_container, data_set, begin, end, context=(0, 0, 0)): context = np.array(context) roi = daisy.Roi(begin - context / 2, end - begin + context) dataset = daisy.open_ds(data_container, data_set) data_array = dataset[roi].to_ndarray() return data_array