def setup_tsv_input(setup_dvid_repo): dvid_address, repo_uuid = setup_dvid_repo input_segmentation_name = 'segmentation-decimatemeshes-input' test_volume, object_boxes, object_sizes = create_test_segmentation() create_labelmap_instance(dvid_address, repo_uuid, input_segmentation_name, max_scale=3) post_labelmap_voxels(dvid_address, repo_uuid, input_segmentation_name, (0,0,0), test_volume, downres=True, noindexing=False) tsv_name = 'segmentation-decimatemeshes-tsv' create_tarsupervoxel_instance(dvid_address, repo_uuid, tsv_name, input_segmentation_name, '.drc') # Post supervoxel meshes meshes = Mesh.from_label_volume(test_volume, progress=False) meshes_data = {f"{label}.drc": mesh.serialize(fmt='drc') for label, mesh in meshes.items()} post_load(dvid_address, repo_uuid, tsv_name, meshes_data) # Merge two of the objects (100 and 300) post_merge(dvid_address, repo_uuid, input_segmentation_name, 100, [300]) object_boxes[100] = box_union(object_boxes[100], object_boxes[300]) del object_boxes[300] object_sizes[100] += object_sizes[300] del object_sizes[300] meshes[100] = Mesh.concatenate_meshes((meshes[100], meshes[300])) del meshes[300] return dvid_address, repo_uuid, tsv_name, object_boxes, object_sizes, meshes
def test_post_hierarchical_cleaves(labelmap_setup): dvid_server, dvid_repo, _merge_table_path, _mapping_path, _supervoxel_vol = labelmap_setup uuid = post_branch(dvid_server, dvid_repo, 'segmentation-post_hierarchical_cleaves', '') instance_info = dvid_server, uuid, 'segmentation-post_hierarchical_cleaves' create_labelmap_instance(*instance_info) svs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] groups = [1, 1, 2, 2, 3, 3, 3, 3, 3, 4] svs = np.asarray(svs, np.uint64) # Post some supervoxels in multiple blocks, just to prove that post_hierarchical_cleaves() # doesn't assume that the labelindex has the same length as the mapping. sv_vol = np.zeros((128, 64, 64), np.uint64) sv_vol[0, 0, :len(svs)] = svs sv_vol[64, 0, 0:len(svs):2] = svs[::2] post_labelmap_voxels(*instance_info, (0, 0, 0), sv_vol) post_merge(*instance_info, 1, svs[1:]) group_mapping = pd.Series(index=svs, data=groups) final_table = post_hierarchical_cleaves(*instance_info, 1, group_mapping) assert (fetch_mapping(*instance_info, svs) == final_table['body'].values).all() assert (final_table.drop_duplicates( ['group']) == final_table.drop_duplicates(['group', 'body'])).all().all() assert (final_table.drop_duplicates( ['body']) == final_table.drop_duplicates(['group', 'body'])).all().all() # Since the mapping included all supervoxels in the body, # the last group is left with the original label. assert final_table.iloc[-1]['body'] == 1 # Now merge them all together and try again, but leave # two supevoxels out of the groups this time. merges = set(pd.unique(final_table['body'].values)) - set([1]) post_merge(*instance_info, 1, list(merges)) group_mapping = pd.Series(index=svs[:-2], data=groups[:-2]) final_table = post_hierarchical_cleaves(*instance_info, 1, group_mapping) assert len( final_table.query('body == 1') ) == 0, "Did not expect any of the groups to retain the original body ID!" assert (fetch_mapping(*instance_info, svs[:-2]) == final_table['body'].values).all() assert (final_table.drop_duplicates( ['group']) == final_table.drop_duplicates(['group', 'body'])).all().all() assert (final_table.drop_duplicates( ['body']) == final_table.drop_duplicates(['group', 'body'])).all().all() assert (fetch_mapping(*instance_info, svs[-2:]) == 1).all()
def test_fetch_sparsevol_coarse_via_labelindex(labelmap_setup): dvid_server, dvid_repo, _merge_table_path, _mapping_path, _supervoxel_vol = labelmap_setup # Create a labelmap volume with 3 blocks. # # Supervoxels are arranged like this: # # | 1 2 | 3 4 | 5 6 | # # After merging [2,3,4,5], bodies will be: # # | 1 2 | 2 4 | 5 6 | # vol_shape = (64, 64, 256) sv_vol = np.zeros(vol_shape, np.uint64) sv_vol[:, :, 0:32] = 1 sv_vol[:, :, 32:64] = 2 sv_vol[:, :, 64:96] = 3 sv_vol[:, :, 96:128] = 4 sv_vol[:, :, 128:160] = 5 sv_vol[:, :, 160:192] = 6 instance_info = dvid_server, dvid_repo, 'segmentation-test-sparsevol-coarse' create_labelmap_instance(*instance_info) post_labelmap_voxels(*instance_info, (0, 0, 0), sv_vol) post_merge(*instance_info, 2, [3, 4, 5]) body_svc = fetch_sparsevol_coarse_via_labelindex(*instance_info, 2, method='protobuf') expected_body_svc = fetch_sparsevol_coarse(*instance_info, 2) assert sorted(body_svc.tolist()) == sorted(expected_body_svc.tolist()) body_svc = fetch_sparsevol_coarse_via_labelindex(*instance_info, 2, method='pandas') expected_body_svc = fetch_sparsevol_coarse(*instance_info, 2) assert sorted(body_svc.tolist()) == sorted(expected_body_svc.tolist()) sv_svc = fetch_sparsevol_coarse_via_labelindex(*instance_info, 3, supervoxels=True, method='protobuf') expected_sv_svc = fetch_sparsevol_coarse(*instance_info, 3, supervoxels=True) assert sorted(sv_svc.tolist()) == sorted(expected_sv_svc.tolist()) sv_svc = fetch_sparsevol_coarse_via_labelindex(*instance_info, 3, supervoxels=True, method='pandas') expected_sv_svc = fetch_sparsevol_coarse(*instance_info, 3, supervoxels=True) assert sorted(sv_svc.tolist()) == sorted(expected_sv_svc.tolist())
def test_extract_edges_with_large_gap(labelmap_setup): """ If a large gap exists between a supervoxel and the rest of the body, we won't find an edge for it, but there should be no crash. """ dvid_server, dvid_repo, merge_table_path, mapping_path, _supervoxel_vol = labelmap_setup orig_merge_table = load_merge_table(merge_table_path, mapping_path, normalize=True) merge_graph = LabelmapMergeGraph(merge_table_path) merge_graph.apply_mapping(mapping_path) uuid = post_branch(dvid_server, dvid_repo, f'test_extract_edges_large_gap', '') # Exercise a corner case: # Add a new supervoxel to the body, far away from the others. # (No edge will be added for that supervoxel.) block_99 = 99 * np.ones((64, 64, 64), np.uint64) DVIDNodeService(dvid_server, uuid).put_labels3D('segmentation', block_99, (128, 0, 0)) post_merge(dvid_server, uuid, 'segmentation', 1, [99]) root_logger = logging.getLogger() oldlevel = root_logger.level try: # Hide warnings for this call; they're intentional. logging.getLogger().setLevel(logging.ERROR) _mutid, dvid_supervoxels, edges, _scores = merge_graph.extract_edges( dvid_server, uuid, 'segmentation', 1) finally: root_logger.setLevel(oldlevel) assert (dvid_supervoxels == [1, 2, 3, 4, 5, 99]).all() assert (orig_merge_table[['id_a', 'id_b']].values == edges).all().all(), \ f"Original merge table doesn't match fetched:\n{orig_merge_table}\n\n{edges}\n"
def test_fetch_mutations(labelmap_setup): dvid_server, dvid_repo, _merge_table_path, _mapping_path, _supervoxel_vol = labelmap_setup uuid = post_branch(dvid_server, dvid_repo, 'segmentation-fetch_mutations', '') instance = 'segmentation-fetch_mutations' create_labelmap_instance(dvid_server, uuid, instance) voxels = np.zeros((64, 64, 64), dtype=np.uint64) voxels[0, 0, :10] = [*range(1, 11)] post_labelmap_voxels(dvid_server, uuid, instance, (0, 0, 0), voxels) post_merge(dvid_server, uuid, instance, 1, [2, 3, 4]) post_merge(dvid_server, uuid, instance, 5, [6, 7, 8]) post_commit(dvid_server, uuid, '') uuid2 = post_newversion(dvid_server, uuid, '') post_merge(dvid_server, uuid2, instance, 9, [10]) post_merge(dvid_server, uuid2, instance, 1, [5, 10]) mut_df = fetch_mutations(dvid_server, uuid2, instance, dag_filter='leaf-only') assert len(mut_df) == 2 assert (mut_df['uuid'] == uuid2).all() assert (mut_df['action'] == 'merge').all() assert (mut_df['target_body'] == [9, 1]).all() mut_df = fetch_mutations(dvid_server, uuid2, instance, dag_filter='leaf-and-parents') assert len(mut_df) == 4 assert (mut_df['uuid'] == [uuid, uuid, uuid2, uuid2]).all() assert (mut_df['action'] == 'merge').all() assert (mut_df['target_body'] == [1, 5, 9, 1]).all()
def setup_dvid_segmentation_input(setup_dvid_repo, random_segmentation): dvid_address, repo_uuid = setup_dvid_repo # Since the same UUID is re-used for each test case, # this counter is a little hack used to make sure the segmentation # has a unique name each time, so that previous test cases don't # affect subsequent test casess. global test_case_counter test_case_counter += 1 # Normally the MaskSegmentation workflow is used to update # a segmentation instance from a parent uuid to a child uuid. # But for this test, we'll simulate that by writing to two # different instances in the same uuid. input_segmentation_name = f'masksegmentation-input-{test_case_counter}' output_segmentation_name = f'masksegmentation-output-from-dvid-{test_case_counter}' # Agglomerate some supervoxels into bodies # Choose supervoxels that intersect three Z-planes at 64, 128, 192 svs_1 = np.unique(random_segmentation[64]) svs_2 = np.unique(random_segmentation[128]) svs_3 = np.unique(random_segmentation[192]) for instance in (input_segmentation_name, output_segmentation_name): create_labelmap_instance(dvid_address, repo_uuid, instance, max_scale=MAX_SCALE) # Start with an empty mapping (the repo/instance are re-used for each test case) post_labelmap_voxels(dvid_address, repo_uuid, instance, (0, 0, 0), random_segmentation, downres=True) post_merge(dvid_address, repo_uuid, instance, svs_1[0], svs_1[1:]) post_merge(dvid_address, repo_uuid, instance, svs_2[0], svs_2[1:]) post_merge(dvid_address, repo_uuid, instance, svs_3[0], svs_3[1:]) # Create an ROI to test with -- a sphere with scale-5 resolution shape_s5 = np.array(random_segmentation.shape) // 2**5 midpoint_s5 = shape_s5 / 2 radius = midpoint_s5.min() coords_s5 = ndindex_array(*shape_s5) distances = np.sqrt(np.sum((coords_s5 - midpoint_s5)**2, axis=1)) keep = (distances < radius) coords_s5 = coords_s5[keep, :] roi_ranges = runlength_encode_to_ranges(coords_s5) roi_name = 'masksegmentation-test-roi' try: create_instance(dvid_address, repo_uuid, roi_name, 'roi') except HTTPError as ex: if ex.response is not None and 'already exists' in ex.response.content.decode( 'utf-8'): pass post_roi(dvid_address, repo_uuid, roi_name, roi_ranges) roi_mask_s5 = np.zeros(shape_s5, dtype=bool) roi_mask_s5[(*coords_s5.transpose(), )] = True template_dir = tempfile.mkdtemp( suffix="masksegmentation-from-dvid-template") config_text = textwrap.dedent(f"""\ workflow-name: masksegmentation cluster-type: {CLUSTER_TYPE} input: dvid: server: {dvid_address} uuid: {repo_uuid} segmentation-name: {input_segmentation_name} supervoxels: true geometry: # Choose a brick that doesn't cleanly divide into the bounding box message-block-shape: [192,64,64] output: dvid: server: {dvid_address} uuid: {repo_uuid} segmentation-name: {output_segmentation_name} supervoxels: true disable-indexing: true masksegmentation: mask-roi: {roi_name} batch-size: 5 block-statistics-file: erased-block-statistics.h5 """) with open(f"{template_dir}/workflow.yaml", 'w') as f: f.write(config_text) yaml = YAML() with StringIO(config_text) as f: config = yaml.load(f) return template_dir, config, random_segmentation, dvid_address, repo_uuid, roi_mask_s5, input_segmentation_name, output_segmentation_name
def init_labelmap_nodes(): # Five supervoxels are each 1x3x3, arranged in a single row like this: # [[[1 1 1 2 2 2 3 3 3 4 4 4 5 5 5] # [1 1 1 2 2 2 3 3 3 4 4 4 5 5 5] # [1 1 1 2 2 2 3 3 3 4 4 4 5 5 5]]] supervoxel_vol = np.zeros((1,3,15), np.uint64) supervoxel_vol[:] = (np.arange(15, dtype=np.uint64) // 3).reshape(1,1,15) supervoxel_vol += 1 np.set_printoptions(linewidth=100) #print(supervoxel_vol) # Merge table: Merge them all together id_a = np.array([1, 2, 3, 4], np.uint64) id_b = np.array([2, 3, 4, 5], np.uint64) xa = np.array([2, 5, 8, 11], np.uint32) ya = np.array([1, 1, 1, 1], np.uint32) za = np.array([0, 0, 0, 0], np.uint32) xb = np.array([3, 6, 9, 12], np.uint32) yb = np.array([1, 1, 1, 1], np.uint32) zb = np.array([0, 0, 0, 0], np.uint32) # Weak edge between 3 and 4 score = np.array([0.4, 0.4, 0.8, 0.4], np.float32) merge_table = pd.DataFrame({'id_a': id_a, 'id_b': id_b, 'xa': xa, 'ya': ya, 'za': za, 'xb': xb, 'yb': yb, 'zb': zb, 'score': score}) merge_table = merge_table[['id_a', 'id_b', 'xa', 'ya', 'za', 'xb', 'yb', 'zb', 'score']] merge_table_path = f'{TEST_DATA_DIR}/merge-table.npy' np.save(merge_table_path, merge_table.to_records(index=False)) create_labelmap_instance(TEST_SERVER, TEST_REPO, 'segmentation', max_scale=2) create_labelmap_instance(TEST_SERVER, TEST_REPO, 'segmentation-scratch', max_scale=2) # Expand to 64**3 supervoxel_block = np.zeros((64,64,64), np.uint64) supervoxel_block[:1,:3,:15] = supervoxel_vol post_labelmap_voxels(TEST_SERVER, TEST_REPO, 'segmentation', (0,0,0), supervoxel_block) post_labelmap_voxels(TEST_SERVER, TEST_REPO, 'segmentation-scratch', (0,0,0), supervoxel_block) post_commit(TEST_SERVER, TEST_REPO, 'supervoxels') # # Create a child node for agglo mappings # r = requests.post(f'http://{TEST_SERVER}/api/node/{TEST_REPO}/newversion', json={'note': 'agglo'}) # r.raise_for_status() # agglo_uuid = r.json["child"] # Merge everything agglo_uuid = TEST_REPO post_merge(TEST_SERVER, agglo_uuid, 'segmentation', 1, [2, 3, 4, 5]) mapping = np.array([[1,1],[2,1],[3,1],[4,1],[5,1]], np.uint64) #mapping = pd.DataFrame(mapping, columns=['sv', 'body']).set_index('sv')['body'] mapping_path = f'{TEST_DATA_DIR}/mapping.npy' np.save(mapping_path, mapping) return merge_table_path, mapping_path, supervoxel_vol