def agglomerate(cv_path_1, cv_path_2, contiguous=False, inplace=False, no_zero=True): """Given two cloudvolumes, intersect and perform agglomeration""" cv_args = dict(bounded=True, fill_missing=True, autocrop=False, cache=False, compress_cache=None, cdn_cache=False, progress=False, provenance=None, compress=True, non_aligned_writes=True, parallel=True) cv1 = cloudvolume.CloudVolume('file://' + cv_path_1, mip=0, **cv_args) cv2 = cloudvolume.CloudVolume('file://' + cv_path_2, mip=0, **cv_args) bb1 = get_bbox_from_cv(cv1) bb2 = get_bbox_from_cv(cv2) int_bb = Bbox.intersection(bb1, bb2) data_1 = cv1[int_bb] data_2 = cv2[int_bb] if contiguous: data_1, map_1 = make_labels_contiguous(data_1) data_2, map_2 = make_labels_contiguous(data_2) data_1 = np.uint32(data_1) data_2 = np.uint32(data_2) # find remap from 2 to 1 remap_label = find_remap(data_2, data_1) if no_zero: # filter out ones with either key or val == 0 remap_label = { k: v for k, v in remap_label.items() if k != 0 and v != 0 } data_2_full = cv2[bb2] data_2_full_remap = perform_remap(data_2_full, remap_label) if inplace: cv2[bb2] = data_2_full_remap update_mips(cv_path_2, bb2, **cv_args) return remap_label
def update_ids_and_write(seg_map, sub_indices): pbar = tqdm(sub_indices, desc='Update ids to be globally unique') for k in pbar: s = seg_map[k]['input'] seg, offset_zyx = load_inference(s) offset_xyz = offset_zyx[::-1] size_xyz = seg.shape[::-1] bbox = seg_map[k]['bbox'] seg = np.transpose(seg, [2, 1, 0]) # # make labels contiguous and unique globally but keep 0 intact seg = np.uint32(seg) zeros = seg == 0 seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) # local_max = np.max(seg) seg += seg_map[k]['global_offset'] seg[zeros] = 0 # convert to precomputed precomputed_path = seg_map[k]['output'] #seg_map[i] = (bbox, precomputed_path) resolution = seg_map[k]['resolution'] chunk_size = seg_map[k]['chunk_size'] cv = prepare_precomputed(precomputed_path, offset_xyz, size_xyz, resolution, chunk_size) cv[bbox] = seg
def get_seg_map(input_dir, output_dir, resolution, chunk_size, sub_indices, post_clean_up=False): if not sub_indices.size: return {} get_name = lambda s: re.sub('seg-', 'precomputed-', s) seg_list = glob.glob(os.path.join(input_dir, 'seg*')) seg_list.sort() seg_map = dict() pbar = tqdm(sub_indices, desc='Generating seg map') for i in pbar: # for i in tqdm(sub_indices): # pbar.set_description("Generating Seg Map:") seg_path = seg_list[i] if not len( glob.glob(os.path.join(seg_path, '**/*.npz'), recursive=True)): continue seg, offset_zyx = load_inference(seg_path) offset_xyz = offset_zyx[::-1] size_xyz = seg.shape[::-1] bbox = Bbox(a=offset_xyz, b=offset_xyz + size_xyz) seg = np.transpose(seg, [2, 1, 0]) # make labels contiguous and unique globally but keep 0 intact min_particle = 1000 if post_clean_up: clean_up(seg, min_particle) # non_background = (seg > 0).astype(np.uint8) # seg = connected_components(non_background) seg = np.uint32(seg) #zeros = seg==0 seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) local_max = np.max(seg) #seg += global_max # global_max = np.max(seg) # seg[zeros] = 0 precomputed_path = os.path.join(output_dir, get_name(os.path.basename(seg_path))) seg_map[i] = { 'bbox': bbox, 'input': os.path.abspath(seg_path), 'output': os.path.abspath(precomputed_path), 'resolution': resolution, 'chunk_size': chunk_size, 'local_max': local_max } return seg_map
def get_seg_map(input_dir, output_dir, resolution, chunk_size, sub_indices, post_clean_up=False, verbose=False): if not sub_indices.size: return {} get_name = lambda s: re.sub('seg-', 'precomputed-', s) seg_list = glob.glob(os.path.join(input_dir, 'seg*')) seg_list.sort() seg_map = dict() if verbose: pbar = tqdm(sub_indices, desc='Generating seg map') else: pbar = sub_indices for i in pbar: seg_path = seg_list[i] if not len( glob.glob(os.path.join(seg_path, '**/*.npz'), recursive=True)): seg_map[i] = {} continue seg, offset_zyx = load_inference(seg_path) offset_xyz = offset_zyx[::-1] size_xyz = seg.shape[::-1] bbox = Bbox(a=offset_xyz, b=offset_xyz + size_xyz) seg = np.transpose(seg, [2, 1, 0]) # make labels contiguous and unique globally but keep 0 intact min_particle = 1000 if post_clean_up: clean_up(seg, min_particle) seg = np.uint32(seg) seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) local_max = np.max(seg) precomputed_path = os.path.join(output_dir, get_name(os.path.basename(seg_path))) seg_map[i] = { 'bbox': bbox, 'input': os.path.abspath(seg_path), 'output': os.path.abspath(precomputed_path), 'resolution': resolution, 'chunk_size': chunk_size, 'local_max': local_max } return seg_map
def agglomerate(cv_path_1, cv_path_2, overlap_thresh, contiguous=False, mutual_exclusive=False, no_zero=True): """Given two cloudvolumes, intersect and perform agglomeration""" cv_args = dict(bounded=True, fill_missing=True, autocrop=False, cache=False, compress_cache=None, cdn_cache=False, progress=False, provenance=None, compress=True, non_aligned_writes=True, parallel=False) cv1 = cloudvolume.CloudVolume('file://' + cv_path_1, mip=0, **cv_args) cv2 = cloudvolume.CloudVolume('file://' + cv_path_2, mip=0, **cv_args) bb1 = get_bbox_from_cv(cv1) bb2 = get_bbox_from_cv(cv2) int_bb = Bbox.intersection(bb1, bb2) data_1 = cv1[int_bb] data_2 = cv2[int_bb] if contiguous: data_1, map_1 = make_labels_contiguous(data_1) data_2, map_2 = make_labels_contiguous(data_2) data_1 = np.uint32(data_1) data_2 = np.uint32(data_2) if mutual_exclusive: remap_label = find_remap_v2(data_2, data_1, overlap_thresh) else: remap_label = find_remap(data_2, data_1, overlap_thresh) return remap_label
def get_seg_map(input_dir, output_dir, resolution, chunk_size, sub_indices): if not sub_indices.size: return {} get_name = lambda s: re.sub('seg-', 'precomputed-', s) seg_list = glob.glob(os.path.join(input_dir, 'seg*')) seg_list.sort() seg_map = dict() pbar = tqdm(sub_indices, desc='Generating seg map') for i in pbar: # for i in tqdm(sub_indices): # pbar.set_description("Generating Seg Map:") seg_path = seg_list[i] seg, offset_zyx = load_inference(seg_path) offset_xyz = offset_zyx[::-1] size_xyz = seg.shape[::-1] bbox = Bbox(a=offset_xyz, b=offset_xyz + size_xyz) seg = np.transpose(seg, [2, 1, 0]) # make labels contiguous and unique globally but keep 0 intact seg = np.uint32(seg) #zeros = seg==0 seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) local_max = np.max(seg) #seg += global_max # global_max = np.max(seg) # seg[zeros] = 0 precomputed_path = os.path.join(output_dir, get_name(os.path.basename(seg_path))) seg_map[i] = { 'bbox': bbox, 'input': os.path.abspath(seg_path), 'output': os.path.abspath(precomputed_path), 'resolution': resolution, 'chunk_size': chunk_size, 'local_max': local_max } return seg_map
def get_seg_map_cv(input_dir, output_dir, sub_indices, post_clean_up=False, verbose=False): if not sub_indices.size: return {} seg_list = glob.glob(os.path.join(input_dir, 'precomputed*')) seg_list.sort() seg_map = dict() if verbose: pbar = tqdm(sub_indices, desc='Generating seg map') else: pbar = sub_indices for i in pbar: seg_path = seg_list[i] seg_cv = cloudvolume.CloudVolume('file://%s' % seg_path, mip=0, progress=False, parallel=False) bbox = seg_cv.meta.bounds(0) seg = np.array(seg_cv[bbox][..., 0]) resolution = seg_cv.meta.resolution(0) chunk_size = seg_cv.meta.chunk_size(0) # make labels contiguous and unique globally but keep 0 intact min_particle = 1000 if post_clean_up: clean_up(seg, min_particle) seg = np.uint32(seg) seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) local_max = np.max(seg) precomputed_path = os.path.join( output_dir, os.path.basename(seg_path)) seg_map[i] = { 'bbox': bbox, 'input': os.path.abspath(seg_path), 'output': os.path.abspath(precomputed_path), 'resolution': resolution, 'chunk_size': chunk_size, 'local_max': local_max} return seg_map
def update_ids_and_write_cv(seg_map, sub_indices, verbose=False): if verbose: pbar = tqdm(sub_indices, desc='Update ids to be globally unique') else: pbar = sub_indices for k in pbar: if not seg_map[k]: continue seg_path = seg_map[k]['input'] # seg, offset_zyx = load_inference(s) seg_cv = cloudvolume.CloudVolume('file://%s' % seg_path, mip=0, progress=False, parallel=False) bbox = seg_cv.meta.bounds(0) seg = np.array(seg_cv[bbox][..., 0]) # offset_xyz = offset_zyx[::-1] # size_xyz = seg.shape[::-1] # bbox = seg_map[k]['bbox'] # seg = np.transpose(seg, [2, 1, 0]) # # make labels contiguous and unique globally but keep 0 intact seg = np.uint32(seg) zeros = seg == 0 seg, _ = make_labels_contiguous(seg) seg = np.uint32(seg) # local_max = np.max(seg) seg += seg_map[k]['global_offset'] seg[zeros] = 0 # convert to precomputed precomputed_path = seg_map[k]['output'] resolution = seg_map[k]['resolution'] chunk_size = seg_map[k]['chunk_size'] out_info = seg_cv.info.copy() # out_cv = cloudvo # cv = prepare_precomputed(precomputed_path, offset_xyz, size_xyz, resolution, chunk_size) # cv[bbox] = seg out_cv = cloudvolume.CloudVolume('file://%s' % precomputed_path, info=out_info, mip=0) out_cv.commit_info() out_cv[...] = seg