def skeletons(path, max_jobs, target): """ Skeletonize segmentation. """ input_key = 'volumes/segmentation/multicut' output_key = 'skeletons' config_dir = './configs' tmp_folder = './tmp_skeletons' os.makedirs(config_dir, exist_ok=True) config = SkeletonWorkflow.get_config() global_config = config['global'] shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster-new/bin/python' global_config.update({'shebang': shebang}) with open(os.path.join(config_dir, 'global.config'), 'w') as f: json.dump(global_config, f) resolution = [40, 4, 4] size_threshold = 2500 task = SkeletonWorkflow(tmp_folder=tmp_folder, config_dir=config_dir, max_jobs=max_jobs, target=target, input_path=path, input_key=input_key, output_path=path, output_key=output_key, resolution=resolution, size_threshold=size_threshold) success = luigi.build([task], local_scheduler=True) assert success
def skeletons(sample, max_jobs=8, target='local'): input_path = '/g/kreshuk/data/cremi/realigned/sample%s_small.n5' % sample input_prefix = 'segmentation/multicut' output_prefix = 'skeletons/multicut' config_dir = './config_skeletons' tmp_folder = './tmp_skeletons_%s' % sample try: os.mkdir(config_dir) except OSError: pass config = SkeletonWorkflow.get_config() global_config = config['global'] shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env/bin/python' global_config.update({'shebang': shebang}) with open(os.path.join(config_dir, 'global.config'), 'w') as f: json.dump(global_config, f) ds_config = config['skeletonize'] ds_config.update({'threads_per_job': 8}) with open(os.path.join(config_dir, 'skeletonize.config'), 'w') as f: json.dump(ds_config, f) task = SkeletonWorkflow(tmp_folder=tmp_folder, max_jobs=1, config_dir=config_dir, target='local', input_path=input_path, output_path=input_path, input_prefix=input_prefix, output_prefix=output_prefix, work_scale=2) success = luigi.build([task], local_scheduler=True) # if success and target == 'local': with z5py.File(input_path) as f: # ds = f['skeletons/multicut/s2'] ds.n_threads = 8 skels = ds[:] # ds = f['raw/s2'] ds.n_threads = 8 raw = ds[:] # ds = f['segmentation/multicut/s2'] ds.n_threads = 8 seg = ds[:] view([raw, seg, skels], ['raw', 'seg', 'skels'])
def test_skeletons_swc(self): config = SkeletonWorkflow.get_config()['skeletonize'] config.update({'chunk_len': 50}) with open(os.path.join(self.config_folder, 'skeletonize.config'), 'w') as f: json.dump(config, f) self._run_skel_wf(format_='swc', max_jobs=8) # check output for correctness seg, ids = self.ids_and_seg() out_folder = os.path.join(self.output_path, self.output_prefix, 's0') for seg_id in ids: # read the result from file out_file = os.path.join(out_folder, '%i.swc' % seg_id) skel_ids, coords, parents = su.read_swc(out_file) coords = np.array(coords, dtype='float') # compute the expected result mask = seg == seg_id skel_vol = skeletonize_3d(mask) try: pix_graph, coords_exp, _ = csr.skeleton_to_csgraph(skel_vol) except ValueError: continue # check coordinates coords_exp = coords_exp[1:] self.assertEqual(coords.shape, coords_exp.shape) self.assertTrue(np.allclose(coords, coords_exp))
def setUp(self): self._mkdir(self.tmp_folder) self._mkdir(self.config_folder) global_config = SkeletonWorkflow.get_config()['global'] global_config['shebang'] = self.shebang global_config['block_shape'] = [10, 256, 256] with open(os.path.join(self.config_folder, 'global.config'), 'w') as f: json.dump(global_config, f)
def _run_skel_wf(self, format_, max_jobs): task = SkeletonWorkflow(tmp_folder=self.tmp_folder, config_dir=self.config_folder, target=self.target, max_jobs=max_jobs, input_path=self.path, input_prefix=self.input_prefix, output_path=self.output_path, output_prefix=self.output_prefix, work_scale=0, skeleton_format=format_) ret = luigi.build([task], local_scheduler=True) self.assertTrue(ret) f = z5py.File(self.output_path) self.assertTrue(self.output_prefix in f) out_key = os.path.join(self.output_prefix, 's0') self.assertTrue(out_key in f)
def skeletonize(scale, target, max_jobs): path = '/g/kreshuk/data/FIB25/data.n5' input_key = 'volumes/paintera/multicut/data/s%i' % scale output_key = 'skeletons/s%i' % scale config_dir = './configs' tmp_folder = './tmp_skeletons_%i' % scale os.makedirs(config_dir, exist_ok=True) config = SkeletonWorkflow.get_config() global_config = config['global'] shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster-new/bin/python' global_config.update({'shebang': shebang}) with open(os.path.join(config_dir, 'global.config'), 'w') as f: json.dump(global_config, f) config = config['skeletonize'] config.update({'time_limit': 600, 'mem_limit': 16}) with open(os.path.join(config_dir, 'skeletonize.config'), 'w') as f: json.dump(config, f) resolution = [8, 8, 8] size_threshold = 2000 max_id = z5py.File(path)['volumes/paintera/multicut/data'].attrs['maxId'] task = SkeletonWorkflow(tmp_folder=tmp_folder, config_dir=config_dir, max_jobs=max_jobs, target=target, input_path=path, input_key=input_key, output_path=path, output_key=output_key, resolution=resolution, size_threshold=size_threshold, max_id=max_id) success = luigi.build([task], local_scheduler=True) assert success
def test_skeletons_volume(self): config = SkeletonWorkflow.get_config()['skeletonize'] config.update({'threads_per_job': 8}) with open(os.path.join(self.config_folder, 'skeletonize.config'), 'w') as f: json.dump(config, f) self._run_skel_wf(format_='volume', max_jobs=1) # check output for correctness seg, ids = self.ids_and_seg() out_key = os.path.join(self.output_prefix, 's0') result = z5py.File(self.output_path)[out_key][:] for seg_id in ids: res = result == seg_id mask = seg == seg_id exp = skeletonize_3d(mask) > 0 self.assertTrue(np.allclose(res, exp))
def test_skeletons_n5(self): config = SkeletonWorkflow.get_config()['skeletonize'] config.update({'chunk_len': 50}) with open(os.path.join(self.config_folder, 'skeletonize.config'), 'w') as f: json.dump(config, f) self._run_skel_wf(format_='n5', max_jobs=8) # check output for correctness seg, ids = self.ids_and_seg() out_key = os.path.join(self.output_prefix, 's0') ds = z5py.File(self.output_path)[out_key] for seg_id in ids: # read the result from file coords, edges = su.read_n5(ds, seg_id) # compute the expected result mask = seg == seg_id skel_vol = skeletonize_3d(mask) try: pix_graph, coords_exp, _ = csr.skeleton_to_csgraph(skel_vol) except ValueError: continue # check coordinates coords_exp = coords_exp[1:].astype('uint64') self.assertEqual(coords.shape, coords_exp.shape) self.assertTrue(np.allclose(coords, coords_exp)) # check edges graph = csr.numba_csgraph(pix_graph) n_points = len(coords) edges_exp = [[u, v] for u in range(1, n_points + 1) for v in graph.neighbors(u) if u < v] edges_exp = np.array(edges_exp) edges_exp -= 1 self.assertEqual(edges.shape, edges_exp.shape) self.assertTrue(np.allclose(edges, edges_exp))