def test_append_edges_for_focused_merges(labelmap_setup): dvid_server, dvid_repo, merge_table_path, _mapping_path, _supervoxel_vol = labelmap_setup decision_instance = 'segmentation_merged_TEST' create_instance(dvid_server, dvid_repo, decision_instance, 'keyvalue') # Post a new 'decision' between 1 and 5 post_key( dvid_server, dvid_repo, decision_instance, '1+5', json={ 'supervoxel ID 1': 1, 'supervoxel ID 2': 5, 'body ID 1': 1, 'body ID 2': 1, 'result': 'merge', 'supervoxel point 1': [0, 0, 0], # xyz 'supervoxel point 2': [12, 0, 0] }) # xyz merge_graph = LabelmapMergeGraph(merge_table_path) merge_graph.append_edges_for_focused_merges(dvid_server, dvid_repo, decision_instance) assert len( merge_graph.merge_table_df.query('id_a == 1 and id_b == 5')) == 1
def create_body_tarball_from_sv_tarball(instance_info, body_id): """ Download a supervoxel mesh tarball from the given key-value instance, concatenate together the component meshes into a single body tarball, and upload it. """ keyEncodeLevel0 = 10000000000000 keyEncodeLevel1 = 10100000000000 encoded_sv = str(body_id + keyEncodeLevel0) sv_tarball_path = f'/tmp/{encoded_sv}.tar' logger.info(f'Fetching {encoded_sv}.tar') tarball_contents = fetch_key(*instance_info, f'{encoded_sv}.tar') with open(sv_tarball_path, 'wb') as f: f.write(tarball_contents) logger.info(f'Unpacking {encoded_sv}.tar') sv_dir = f'/tmp/{encoded_sv}' os.makedirs(sv_dir, exist_ok=True) os.chdir(sv_dir) subprocess.check_call(f'tar -xf {sv_tarball_path}', shell=True) encoded_body = str(body_id + keyEncodeLevel1) body_tarball_path = f'/tmp/{encoded_body}.tar' logger.info(f"Constructing {encoded_body}.drc") mesh = Mesh.from_directory(sv_dir) mesh.serialize(f'/tmp/{encoded_body}.drc') subprocess.check_call(f'tar -cf {body_tarball_path} /tmp/{encoded_body}.drc', shell=True) with open(body_tarball_path, 'rb') as f: logger.info(f'Posting {encoded_body}.tar') post_key(*instance_info, f'{encoded_body}.tar', f)
def copy_meshes_to_keyvalue(src_info, dest_info, body_list): failed_bodies = [] for body_id in tqdm(body_list): try: tar_bytes = fetch_tarfile(*src_info, body_id) except: logger.error(f"Failed to copy {body_id}") failed_bodies.append(body_id) continue encoded_body = np.uint64(keyEncodeLevel0 + body_id) assert isinstance(encoded_body, np.uint64) post_key(*dest_info, f'{encoded_body}.tar', tar_bytes) return failed_bodies
def process_point(seg_src, seg_dst, point, radius, src_body, dst_body): """ Generate a neighborhood segment around a particular point. Upload the voxels for the segment and the corresponding mesh. """ r = radius src_box = np.asarray((point - r, point + r + 1)) src_vol = fetch_labelmap_voxels(*seg_src, src_box) if src_body is None: src_body = src_vol[r, r, r] if dst_body is None: # Generate a neighborhood segment ID from the coordinate. # Divide by 4 to ensure the coordinates fit within 2^53. # (The segment ID will not retain the full resolution of # the coordinate, but that's usually OK for our purposes.) dst_body = encode_point_to_uint64(point // 4, 17) mask = (src_vol == src_body) & sphere_mask(r) dst_box = round_box(src_box, 64, 'out') dst_vol = fetch_labelmap_voxels(*seg_dst, dst_box) dst_view = dst_vol[b2s(*(src_box - dst_box[0]))] dst_view[mask] = dst_body post_labelmap_voxels(*seg_dst, dst_box[0], dst_vol, downres=True) # Mesh needs to be written in nm, hence 8x mesh = Mesh.from_binary_vol(mask, 8 * src_box, smoothing_rounds=2) mesh.simplify(0.05, in_memory=True) post_key(*seg_dst[:2], f'{seg_dst[2]}_meshes', f'{dst_body}.ngmesh', mesh.serialize(fmt='ngmesh')) centroid = src_box[0] + mask_centroid(mask, True) top_z = mask.sum(axis=(1, 2)).nonzero()[0][0] top_coords = np.transpose(mask[top_z].nonzero()) top_point = src_box[0] + (top_z, *top_coords[len(top_coords) // 2]) return point, centroid, top_point, src_body, dst_body, mask.sum()
def _generate_and_store_mesh(): try: dvid = request.args['dvid'] body = request.args['body'] except KeyError as ex: return Response(f"Missing required parameter: {ex.args[0]}", 400) segmentation = request.args.get('segmentation', 'segmentation') mesh_kv = request.args.get('mesh_kv', f'{segmentation}_meshes') uuid = request.args.get('uuid') or find_master(dvid) if not uuid: uuid = find_master(dvid) scale = request.args.get('scale') if scale is not None: scale = int(scale) smoothing = int(request.args.get('smoothing', 2)) # Note: This is just the effective desired decimation assuming scale-1 data. # If we're forced to select a higher scale than scale-1, then we'll increase # this number to compensate. decimation = float(request.args.get('decimation', 0.1)) user = request.args.get('u') user = user or request.args.get('user', "UNKNOWN") # TODO: The global cache of DVID sessions should store authentication info # and use it as part of the key lookup, to avoid creating a new dvid # session for every single cloud call! dvid_session = default_dvid_session('cloud-meshgen', user) auth = request.headers.get('Authorization') if auth: dvid_session = copy.deepcopy(dvid_session) dvid_session.headers['Authorization'] = auth with Timer(f"Body {body}: Fetching coarse sparsevol"): svc_ranges = fetch_sparsevol_coarse(dvid, uuid, segmentation, body, format='ranges', session=dvid_session) #svc_mask, _svc_box = fetch_sparsevol_coarse(dvid, uuid, segmentation, body, format='mask', session=dvid_session) #np.save(f'mask-{body}-svc.npy', svc_mask) box_s6 = rle_ranges_box(svc_ranges) box_s0 = box_s6 * (2**6) logger.info(f"Body {body}: Bounding box: {box_s0[:, ::-1].tolist()}") if scale is None: # Use scale 1 if possible or a higher scale # if necessary due to bounding-box RAM usage. scale = max(1, select_scale(box_s0)) if scale > 1: # If we chose a low-res scale, then we # can reduce the decimation as needed. decimation = min(1.0, decimation * 4**(scale - 1)) with Timer(f"Body {body}: Fetching scale-{scale} sparsevol"): mask, mask_box = fetch_sparsevol(dvid, uuid, segmentation, body, scale=scale, format='mask', session=dvid_session) # np.save(f'mask-{body}-s{scale}.npy', mask) # Pad with a thin halo of zeros to avoid holes in the mesh at the box boundary mask = np.pad(mask, 1) mask_box += [(-1, -1, -1), (1, 1, 1)] with Timer(f"Body {body}: Computing mesh"): # The 'ilastik' marching cubes implementation supports smoothing during mesh construction. mesh = Mesh.from_binary_vol(mask, mask_box * VOXEL_NM * (2**scale), smoothing_rounds=smoothing) logger.info(f"Body {body}: Decimating mesh at fraction {decimation}") mesh.simplify(decimation) logger.info(f"Body {body}: Preparing ngmesh") mesh_bytes = mesh.serialize(fmt='ngmesh') if scale > 2: logger.info(f"Body {body}: Not storing to dvid (scale > 2)") else: with Timer( f"Body {body}: Storing {body}.ngmesh in DVID ({len(mesh_bytes)/MB:.1f} MB)" ): try: post_key(dvid, uuid, mesh_kv, f"{body}.ngmesh", mesh_bytes, session=dvid_session) except HTTPError as ex: err = ex.response.content.decode('utf-8') if 'locked node' in err: logger.info( "Body {body}: Not storing to dvid (uuid {uuid[:4]} is locked)." ) else: logger.warning("Mesh could not be cached to dvid:\n{err}") r = make_response(mesh_bytes) r.headers.set('Content-Type', 'application/octet-stream') return r
def decimate_existing_mesh(server, uuid, instance, body_id, fraction, max_vertices=1e9, rescale=1.0, output_format=None, output_path=None, output_dvid=None, tar_bytes=None): """ Fetch all supervoxel meshes for the given body, combine them into a single mesh, and then decimate that mesh at the specified fraction. The output will be written to a file, or to a dvid instance (or both). Args: tar_bytes: Optional. You can provide the tarfile contents (as bytes) directly, in which case the input server will not be used. """ if output_path is not None: fmt = os.path.splitext(output_path)[1][1:] if output_format is not None and output_format != fmt: raise RuntimeError(f"Mismatch between output format '{output_format}'" f" and output file extension in '{output_path}'") output_format = fmt if output_format is None: raise RuntimeError("You must specify an output format (or an output path with a file extension)") assert output_format in Mesh.MESH_FORMATS, \ f"Unknown output format: {output_format}" assert output_path is not None or output_dvid is not None, \ "No output location specified" if tar_bytes is None: with Timer(f"Body: {body_id} Fetching tarfile", logger): tar_bytes = fetch_tarfile(server, uuid, instance, body_id) with Timer(f"Body: {body_id}: Loading mesh for body {body_id}", logger): mesh = Mesh.from_tarfile(tar_bytes, keep_normals=False) mesh_mb = mesh.uncompressed_size() / 1e6 orig_vertices = len(mesh.vertices_zyx) logger.info(f"Body: {body_id}: Original mesh has {orig_vertices} vertices and {len(mesh.faces)} faces ({mesh_mb:.1f} MB)") fraction = min(fraction, max_vertices / len(mesh.vertices_zyx)) with Timer(f"Body: {body_id}: Decimating at {fraction:.2f}", logger): mesh.simplify(fraction, in_memory=True) mesh_mb = mesh.uncompressed_size() / 1e6 logger.info(f"Body: {body_id}: Final mesh has {len(mesh.vertices_zyx)} vertices and {len(mesh.faces)} faces ({mesh_mb:.1f} MB)") if not isinstance(rescale, Iterable): rescale = 3*[rescale] rescale = np.asarray(rescale) if not (rescale == 1.0).all(): mesh.vertices_zyx[:] *= rescale with Timer(f"Body: {body_id}: Serializing", logger): mesh_bytes = None if output_dvid is not None: assert len(output_dvid) == 3 mesh_bytes = mesh.serialize(fmt=output_format) post_key(*output_dvid, f"{body_id}.{output_format}", mesh_bytes) if output_path: if mesh_bytes is None: mesh.serialize(output_path) else: with open(output_path, 'wb') as f: f.write(mesh_bytes) return len(mesh.vertices_zyx), fraction, orig_vertices