コード例 #1
0
def setup_tsv_input(setup_dvid_repo):
    dvid_address, repo_uuid = setup_dvid_repo

    input_segmentation_name = 'segmentation-decimatemeshes-input'
    test_volume, object_boxes, object_sizes = create_test_segmentation()

    create_labelmap_instance(dvid_address, repo_uuid, input_segmentation_name, max_scale=3)
    post_labelmap_voxels(dvid_address, repo_uuid, input_segmentation_name, (0,0,0), test_volume, downres=True, noindexing=False)

    tsv_name = 'segmentation-decimatemeshes-tsv'
    create_tarsupervoxel_instance(dvid_address, repo_uuid, tsv_name, input_segmentation_name, '.drc')
 
    # Post supervoxel meshes
    meshes = Mesh.from_label_volume(test_volume, progress=False)
    meshes_data = {f"{label}.drc": mesh.serialize(fmt='drc') for label, mesh in meshes.items()}
    post_load(dvid_address, repo_uuid, tsv_name, meshes_data)
    
    # Merge two of the objects (100 and 300)
    post_merge(dvid_address, repo_uuid, input_segmentation_name, 100, [300])
    object_boxes[100] = box_union(object_boxes[100], object_boxes[300])
    del object_boxes[300]
    
    object_sizes[100] += object_sizes[300]
    del object_sizes[300]
    
    meshes[100] = Mesh.concatenate_meshes((meshes[100], meshes[300]))
    del meshes[300]
    
    return dvid_address, repo_uuid, tsv_name, object_boxes, object_sizes, meshes
コード例 #2
0
def test_stitchedmeshes_with_badobject(setup_stitchedmeshes_config):
    template_dir, config, _dvid_address, _repo_uuid, object_boxes = setup_stitchedmeshes_config

    # Body 250 doesn't exist, but it shouldn't fail the whole workflow.
    config["stitchedmeshes"]["bodies"] = [250,300]
    dump_config(config, f"{template_dir}/workflow.yaml")

    execution_dir, _workflow = launch_flow(template_dir, 1)
    #final_config = workflow.config

    assert os.path.exists(f"{execution_dir}/meshes/300.obj")
    
    mesh300 = Mesh.from_file(f"{execution_dir}/meshes/300.obj")
    assert (mesh300.vertices_zyx[:] >= object_boxes[2][0]).all()
    assert (mesh300.vertices_zyx[:] <= object_boxes[2][1]).all()
    
    # Here's where our test meshes ended up:
    #print(f"{execution_dir}/meshes/300.obj")
    #print(f'{execution_dir}/mesh-stats.csv')
    
    df = pd.read_csv(f'{execution_dir}/mesh-stats.csv')
    assert len(df) == 4
    df.set_index('body', inplace=True)

    assert df.loc[250, 'result'] == 'error'  # intentional error
    assert df.loc[300, 'result'] == 'success'
コード例 #3
0
def mesh_from_dvid_tarfile(server,
                           uuid,
                           tsv_instance,
                           bodies,
                           simplify=1.0,
                           drop_normals=False,
                           rescale_factor=1.0,
                           output_path='{body}.obj'):
    from neuclease.dvid import fetch_tarfile

    for body in bodies:
        logger.info(f"Body {body}: Fetching tarfile")
        tar_bytes = fetch_tarfile(server, uuid, tsv_instance, body)

        logger.info(f"Body {body}: Loading mesh")
        mesh = Mesh.from_tarfile(tar_bytes)

        if simplify != 1.0:
            logger.info(f"Body {body}: Simplifying")
            mesh.simplify(simplify, in_memory=True)

        if drop_normals:
            mesh.drop_normals()

        if rescale_factor != 1.0:
            logger.info(f"Body {body}: Scaling by {rescale_factor}x")
            mesh.vertices_zyx[:] *= rescale_factor

        p = output_path.format(body=body)
        logger.info(f"Body {body}: Serializing to {p}")
        mesh.serialize(p)
コード例 #4
0
def create_body_tarball_from_sv_tarball(instance_info, body_id):
    """
    Download a supervoxel mesh tarball from the given key-value instance,
    concatenate together the component meshes into a single body tarball,
    and upload it.
    """
    keyEncodeLevel0 = 10000000000000
    keyEncodeLevel1 = 10100000000000
    
    encoded_sv = str(body_id + keyEncodeLevel0)
    sv_tarball_path = f'/tmp/{encoded_sv}.tar'
    
    logger.info(f'Fetching {encoded_sv}.tar')
    tarball_contents = fetch_key(*instance_info, f'{encoded_sv}.tar')
    with open(sv_tarball_path, 'wb') as f:
        f.write(tarball_contents)
    
    logger.info(f'Unpacking {encoded_sv}.tar')
    sv_dir = f'/tmp/{encoded_sv}'
    os.makedirs(sv_dir, exist_ok=True)
    os.chdir(sv_dir)
    subprocess.check_call(f'tar -xf {sv_tarball_path}', shell=True)

    encoded_body = str(body_id + keyEncodeLevel1)
    body_tarball_path = f'/tmp/{encoded_body}.tar'
    
    logger.info(f"Constructing {encoded_body}.drc")
    mesh = Mesh.from_directory(sv_dir)
    mesh.serialize(f'/tmp/{encoded_body}.drc')
    subprocess.check_call(f'tar -cf {body_tarball_path} /tmp/{encoded_body}.drc', shell=True)
    
    with open(body_tarball_path, 'rb') as f:
        logger.info(f'Posting {encoded_body}.tar')
        post_key(*instance_info, f'{encoded_body}.tar', f)
コード例 #5
0
def check_outputs(execution_dir, object_boxes, subset_labels=None, skipped_labels=[], stats_dir=None, max_vertices=None):
    """
    Basic checks to make sure the meshes for the given labels were
    generated and not terribly wrong.
    """
    stats_dir = stats_dir or execution_dir
    
    # Check all test objects by default.
    if subset_labels is None:
        subset_labels = sorted(object_boxes.keys())

    df = pd.DataFrame( np.load(f'{stats_dir}/mesh-stats.npy', allow_pickle=True) )
    
    assert len(df) == (len(subset_labels) + len(skipped_labels))
    df.set_index('body', inplace=True)

    for label in subset_labels:
        assert df.loc[label, 'result'] == 'success'

        # Here's where our test meshes ended up:
        #print(f"{execution_dir}/meshes/{label}.obj")
        assert os.path.exists(f"{execution_dir}/meshes/{label}.ngmesh")

        # Make sure the mesh vertices appeared in the right place.
        # (If they weren't rescaled, this won't work.)
        mesh = Mesh.from_file(f"{execution_dir}/meshes/{label}.ngmesh")
        assert np.allclose(mesh.vertices_zyx.min(axis=0), object_boxes[label][0], 1)
        assert np.allclose(mesh.vertices_zyx.max(axis=0), object_boxes[label][1], 1)

        if max_vertices is not None:
            assert len(mesh.vertices_zyx) <= 1.1*max_vertices
コード例 #6
0
ファイル: sv_to_mesh.py プロジェクト: fegonda/vol2mesh
def sv_to_mesh(server,
               uuid,
               instance,
               sv,
               smoothing_iterations=0,
               simplification_fraction=1.0,
               max_box_volume=DEFAULT_MAX_BOUNDING_BOX_VOL):
    """
    Download a mask for the given supervoxel and generate a mesh from it.
    If the mask bounding box would be large at scale 0, a smaller scale will be used.
    The returned mesh will always use scale-0 coordinates, though.
    """
    with Timer("Fetching supervoxel mask", logger):
        mask, scale, scaled_box = fetch_supervoxel_mask(
            server, uuid, instance, sv, max_box_volume)
        fullres_box = scaled_box * (2**scale)

    with Timer(f"Generating mesh from scale {scale}", logger):
        mesh = Mesh.from_binary_vol(mask, fullres_box)

    with Timer(f"Smoothing ({smoothing_iterations})", logger):
        mesh.laplacian_smooth(smoothing_iterations)

    # If we chose a scale other than 0, automatically reduce the
    # amount of decimation, since there will already be fewer vertices at lower resolution.
    simplification_fraction *= (2**scale)**2
    simplification_fraction = min(1.0, simplification_fraction)

    with Timer(f"Decimating ({simplification_fraction})", logger):
        mesh.simplify(simplification_fraction, in_memory=True)

    logger.info(
        f"Mesh has {len(mesh.vertices_zyx)} vertices and {len(mesh.faces)} faces"
    )
    return mesh
コード例 #7
0
def test_stitchedmeshes(setup_stitchedmeshes_config, disable_auto_retry):
    template_dir, _config, _dvid_address, _repo_uuid, object_boxes = setup_stitchedmeshes_config
    
    execution_dir, _workflow = launch_flow(template_dir, 1)
    #final_config = workflow.config

    assert os.path.exists(f"{execution_dir}/meshes/100.obj")
    assert os.path.exists(f"{execution_dir}/meshes/200.obj")
    assert os.path.exists(f"{execution_dir}/meshes/300.obj")
    
    # Make sure the mesh vertices appeared in the right place.
    # (If they weren't rescaled, this won't work.)
    mesh100 = Mesh.from_file(f"{execution_dir}/meshes/100.obj")
    assert (mesh100.vertices_zyx[:] >= object_boxes[0][0]).all()
    assert (mesh100.vertices_zyx[:] <= object_boxes[0][1]).all()
    
    mesh200 = Mesh.from_file(f"{execution_dir}/meshes/200.obj")
    assert (mesh200.vertices_zyx[:] >= object_boxes[1][0]).all()
    assert (mesh200.vertices_zyx[:] <= object_boxes[1][1]).all()
    
    mesh300 = Mesh.from_file(f"{execution_dir}/meshes/300.obj")
    assert (mesh300.vertices_zyx[:] >= object_boxes[2][0]).all()
    assert (mesh300.vertices_zyx[:] <= object_boxes[2][1]).all()
    
    # Here's where our test meshes ended up:
    #print(f"{execution_dir}/meshes/100.obj")
    #print(f"{execution_dir}/meshes/200.obj")
    #print(f"{execution_dir}/meshes/300.obj")
    #print(f'{execution_dir}/mesh-stats.csv')
    
    df = pd.read_csv(f'{execution_dir}/mesh-stats.csv')
    assert len(df) == 3
    df.set_index('body', inplace=True)

    assert df.loc[100, 'result'] == 'success'
    assert df.loc[200, 'result'] == 'success'
    assert df.loc[300, 'result'] == 'success'
コード例 #8
0
def process_point(seg_src, seg_dst, point, radius, src_body, dst_body):
    """
    Generate a neighborhood segment around a particular point.
    Upload the voxels for the segment and the corresponding mesh.
    """
    r = radius
    src_box = np.asarray((point - r, point + r + 1))
    src_vol = fetch_labelmap_voxels(*seg_src, src_box)

    if src_body is None:
        src_body = src_vol[r, r, r]

    if dst_body is None:
        # Generate a neighborhood segment ID from the coordinate.
        # Divide by 4 to ensure the coordinates fit within 2^53.
        # (The segment ID will not retain the full resolution of
        # the coordinate, but that's usually OK for our purposes.)
        dst_body = encode_point_to_uint64(point // 4, 17)

    mask = (src_vol == src_body) & sphere_mask(r)

    dst_box = round_box(src_box, 64, 'out')
    dst_vol = fetch_labelmap_voxels(*seg_dst, dst_box)

    dst_view = dst_vol[b2s(*(src_box - dst_box[0]))]
    dst_view[mask] = dst_body

    post_labelmap_voxels(*seg_dst, dst_box[0], dst_vol, downres=True)

    # Mesh needs to be written in nm, hence 8x
    mesh = Mesh.from_binary_vol(mask, 8 * src_box, smoothing_rounds=2)
    mesh.simplify(0.05, in_memory=True)
    post_key(*seg_dst[:2], f'{seg_dst[2]}_meshes', f'{dst_body}.ngmesh',
             mesh.serialize(fmt='ngmesh'))

    centroid = src_box[0] + mask_centroid(mask, True)
    top_z = mask.sum(axis=(1, 2)).nonzero()[0][0]
    top_coords = np.transpose(mask[top_z].nonzero())
    top_point = src_box[0] + (top_z, *top_coords[len(top_coords) // 2])

    return point, centroid, top_point, src_body, dst_body, mask.sum()
コード例 #9
0
def create_precomputed_ngmeshes(vol, vol_fullres_box, names, bucket_name, bucket_path, localdir=None, decimation=0.01):
    """
    Create meshes for the given labelvolume and upload them to a google bucket in
    neuroglancer legacy mesh format (i.e. what flyem calls "ngmesh" format).
    """
    from vol2mesh import Mesh
    if not bucket_name.startswith('gs://'):
        bucket_name = 'gs://' + bucket_name

    if localdir is None:
        localdir = bucket_path.split('/')[-1]

    os.makedirs(f"{localdir}/mesh", exist_ok=True)
    dump_json({"@type": "neuroglancer_legacy_mesh"}, f"{localdir}/mesh/info")

    logger.info("Generating meshes")
    meshes = Mesh.from_label_volume(vol, vol_fullres_box, smoothing_rounds=2)

    logger.info("Simplifying meshes")
    for mesh in meshes.values():
        mesh.simplify(decimation)

    logger.info("Serializing meshes")
    for label, mesh in meshes.items():
        name = names.get(label, str(label))
        mesh.serialize(f"{localdir}/mesh/{name}.ngmesh")
        dump_json({"fragments": [f"{name}.ngmesh"]}, f"{localdir}/mesh/{label}:0")

    subprocess.run(f"gsutil cp {bucket_name}/{bucket_path}/info {localdir}/info", shell=True)
    with open(f"{localdir}/info", 'r') as f:
        info = json.load(f)

    info["mesh"] = "mesh"
    dump_json(info, f"{localdir}/info", unsplit_int_lists=True)

    logger.info("Uploading")
    subprocess.run(f"gsutil cp {localdir}/info {bucket_name}/{bucket_path}/info", shell=True)
    subprocess.run(f"gsutil cp -R {localdir}/mesh {bucket_name}/{bucket_path}/mesh", shell=True)
コード例 #10
0
def test_createmeshes_to_keyvalue(setup_createmeshes_config,
                                  disable_auto_retry):
    template_dir, config, _dvid_address, _repo_uuid, object_boxes, _object_sizes = setup_createmeshes_config

    kv_instance = 'test_createmeshes_to_keyvalue'
    config['output'] = {
        'keyvalue': {
            'instance': kv_instance,
            'create-if-necessary': True
        }
    }
    YAML().dump(config, open(f"{template_dir}/workflow.yaml", 'w'))

    _execution_dir, _workflow = launch_flow(template_dir, 1)

    server = config['input']['dvid']['server']
    uuid = config['input']['dvid']['uuid']
    for sv in [100, 200, 300]:
        mesh_bytes = fetch_key(server, uuid, kv_instance, f'{sv}.obj')
        mesh = Mesh.from_buffer(mesh_bytes, fmt='obj')
        assert np.allclose(mesh.vertices_zyx.min(axis=0), object_boxes[sv][0],
                           1)
        assert np.allclose(mesh.vertices_zyx.max(axis=0), object_boxes[sv][1],
                           1)
コード例 #11
0
def _generate_and_store_mesh():
    try:
        dvid = request.args['dvid']
        body = request.args['body']
    except KeyError as ex:
        return Response(f"Missing required parameter: {ex.args[0]}", 400)

    segmentation = request.args.get('segmentation', 'segmentation')
    mesh_kv = request.args.get('mesh_kv', f'{segmentation}_meshes')

    uuid = request.args.get('uuid') or find_master(dvid)
    if not uuid:
        uuid = find_master(dvid)

    scale = request.args.get('scale')
    if scale is not None:
        scale = int(scale)

    smoothing = int(request.args.get('smoothing', 2))

    # Note: This is just the effective desired decimation assuming scale-1 data.
    # If we're forced to select a higher scale than scale-1, then we'll increase
    # this number to compensate.
    decimation = float(request.args.get('decimation', 0.1))

    user = request.args.get('u')
    user = user or request.args.get('user', "UNKNOWN")

    # TODO: The global cache of DVID sessions should store authentication info
    #       and use it as part of the key lookup, to avoid creating a new dvid
    #       session for every single cloud call!
    dvid_session = default_dvid_session('cloud-meshgen', user)
    auth = request.headers.get('Authorization')
    if auth:
        dvid_session = copy.deepcopy(dvid_session)
        dvid_session.headers['Authorization'] = auth

    with Timer(f"Body {body}: Fetching coarse sparsevol"):
        svc_ranges = fetch_sparsevol_coarse(dvid,
                                            uuid,
                                            segmentation,
                                            body,
                                            format='ranges',
                                            session=dvid_session)

    #svc_mask, _svc_box = fetch_sparsevol_coarse(dvid, uuid, segmentation, body, format='mask', session=dvid_session)
    #np.save(f'mask-{body}-svc.npy', svc_mask)

    box_s6 = rle_ranges_box(svc_ranges)
    box_s0 = box_s6 * (2**6)
    logger.info(f"Body {body}: Bounding box: {box_s0[:, ::-1].tolist()}")

    if scale is None:
        # Use scale 1 if possible or a higher scale
        # if necessary due to bounding-box RAM usage.
        scale = max(1, select_scale(box_s0))

    if scale > 1:
        # If we chose a low-res scale, then we
        # can reduce the decimation as needed.
        decimation = min(1.0, decimation * 4**(scale - 1))

    with Timer(f"Body {body}: Fetching scale-{scale} sparsevol"):
        mask, mask_box = fetch_sparsevol(dvid,
                                         uuid,
                                         segmentation,
                                         body,
                                         scale=scale,
                                         format='mask',
                                         session=dvid_session)
        # np.save(f'mask-{body}-s{scale}.npy', mask)

        # Pad with a thin halo of zeros to avoid holes in the mesh at the box boundary
        mask = np.pad(mask, 1)
        mask_box += [(-1, -1, -1), (1, 1, 1)]

    with Timer(f"Body {body}: Computing mesh"):
        # The 'ilastik' marching cubes implementation supports smoothing during mesh construction.
        mesh = Mesh.from_binary_vol(mask,
                                    mask_box * VOXEL_NM * (2**scale),
                                    smoothing_rounds=smoothing)

        logger.info(f"Body {body}: Decimating mesh at fraction {decimation}")
        mesh.simplify(decimation)

        logger.info(f"Body {body}: Preparing ngmesh")
        mesh_bytes = mesh.serialize(fmt='ngmesh')

    if scale > 2:
        logger.info(f"Body {body}: Not storing to dvid (scale > 2)")
    else:
        with Timer(
                f"Body {body}: Storing {body}.ngmesh in DVID ({len(mesh_bytes)/MB:.1f} MB)"
        ):
            try:
                post_key(dvid,
                         uuid,
                         mesh_kv,
                         f"{body}.ngmesh",
                         mesh_bytes,
                         session=dvid_session)
            except HTTPError as ex:
                err = ex.response.content.decode('utf-8')
                if 'locked node' in err:
                    logger.info(
                        "Body {body}: Not storing to dvid (uuid {uuid[:4]} is locked)."
                    )
                else:
                    logger.warning("Mesh could not be cached to dvid:\n{err}")

    r = make_response(mesh_bytes)
    r.headers.set('Content-Type', 'application/octet-stream')
    return r
コード例 #12
0
        def compute_mesh_and_write(body):
            with Timer() as timer:
                # Fetch the sparsevol to determine the bounding-box size (in scale-0 voxels)
                try:
                    with mgr_client.access_context(server, True, 1, 0):
                        # sparsevol-coarse is at scale-6
                        coords_s6 = fetch_sparsevol_coarse(
                            server, uuid, instance, body, is_supervoxels)
                except:
                    return (body, 0, 0, 0, 0.0, timer.seconds,
                            'error-sparsevol-coarse')

                box_s6 = np.array(
                    [coords_s6.min(axis=0), 1 + coords_s6.max(axis=0)])
                box_s0 = (2**6) * box_s6
                shape_s0 = (box_s0[1] - box_s0[0])
                box_voxels_s0 = np.prod(shape_s0.astype(float))

                # Determine the scale we'll use.
                # Solve for 'scale' in the following relationship:
                #
                #   box_voxels_s0/((2^scale)^3) <= max_box_voxels
                #
                scale = log2(pow(box_voxels_s0 / max_box_voxels, 1 / 3))
                scale = max(ceil(scale), min_scale)

                if scale > max_scale:
                    raise RuntimeError(
                        f"Can't compute mesh for body {body}. Bounding box is {box_s0[:, ::-1].tolist()}, "
                        f"which is too large to fit in desired RAM, even at scale {max_scale}"
                    )

                try:
                    with mgr_client.access_context(server, True, 1, 0):
                        coords = fetch_sparsevol(server,
                                                 uuid,
                                                 instance,
                                                 body,
                                                 is_supervoxels,
                                                 scale,
                                                 dtype=np.int16)
                except:
                    return (body, 0, 0, 0, 0.0, timer.seconds,
                            'error-sparsevol')

                box = box_s0 // (2**scale)
                coords -= box[0]
                num_voxels = len(coords)

                shape = box[1] - box[0]
                vol = np.zeros(shape, np.uint8)
                vol[(*coords.transpose(), )] = 1
                del coords

                try:
                    mesh = Mesh.from_binary_vol(vol, box_s0)
                except:
                    return (body, scale, num_voxels, 0, 0.0, timer.seconds,
                            'error-construction')

                del vol
                try:
                    mesh.laplacian_smooth(smoothing_iterations)
                except:
                    return (body, scale, num_voxels, 0.0,
                            len(mesh.vertices_zyx), timer.seconds,
                            'error-smoothing')

                fraction = decimation_fraction
                if scale > min_scale:
                    # Since we're starting from a lower resolution than the user requested,
                    # Reduce the decimation we're applying accordingly.
                    # Since meshes are 2D surfaces, we approximate the difference in
                    # vertexes as the SQUARE of the difference in resolution.
                    fraction *= (2**(scale - min_scale))**2
                    fraction = min(fraction, 1.0)

                try:
                    mesh.simplify(fraction, in_memory=True)
                except:
                    return (body, scale, num_voxels, 0.0,
                            len(mesh.vertices_zyx), timer.seconds,
                            'error-decimation')

                output_path = f'{options["output-directory"]}/{body}.{options["format"]}'
                mesh.serialize(output_path)

                return (body, scale, num_voxels, fraction,
                        len(mesh.vertices_zyx), timer.seconds, 'success')
コード例 #13
0
def decimate_existing_mesh(server, uuid, instance, body_id, fraction, max_vertices=1e9, rescale=1.0, output_format=None, output_path=None, output_dvid=None, tar_bytes=None):
    """
    Fetch all supervoxel meshes for the given body, combine them into a
    single mesh, and then decimate that mesh at the specified fraction.
    The output will be written to a file, or to a dvid instance (or both).
    
    Args:
        tar_bytes:
            Optional. You can provide the tarfile contents (as bytes) directly,
            in which case the input server will not be used.
    """
    if output_path is not None:
        fmt = os.path.splitext(output_path)[1][1:]
        if output_format is not None and output_format != fmt:
            raise RuntimeError(f"Mismatch between output format '{output_format}'"
                               f" and output file extension in '{output_path}'")
        output_format = fmt
    
    if output_format is None:
        raise RuntimeError("You must specify an output format (or an output path with a file extension)")

    assert output_format in Mesh.MESH_FORMATS, \
        f"Unknown output format: {output_format}"

    assert output_path is not None or output_dvid is not None, \
        "No output location specified"

    if tar_bytes is None:
        with Timer(f"Body: {body_id} Fetching tarfile", logger):
            tar_bytes = fetch_tarfile(server, uuid, instance, body_id)
    
    with Timer(f"Body: {body_id}: Loading mesh for body {body_id}", logger):
        mesh = Mesh.from_tarfile(tar_bytes, keep_normals=False)

    mesh_mb = mesh.uncompressed_size() / 1e6
    orig_vertices = len(mesh.vertices_zyx)
    logger.info(f"Body: {body_id}: Original mesh has {orig_vertices} vertices and {len(mesh.faces)} faces ({mesh_mb:.1f} MB)")

    fraction = min(fraction, max_vertices / len(mesh.vertices_zyx))    
    with Timer(f"Body: {body_id}: Decimating at {fraction:.2f}", logger):
        mesh.simplify(fraction, in_memory=True)

    mesh_mb = mesh.uncompressed_size() / 1e6
    logger.info(f"Body: {body_id}: Final mesh has {len(mesh.vertices_zyx)} vertices and {len(mesh.faces)} faces ({mesh_mb:.1f} MB)")

    if not isinstance(rescale, Iterable):
        rescale = 3*[rescale]
    
    rescale = np.asarray(rescale)
    if not (rescale == 1.0).all():
        mesh.vertices_zyx[:] *= rescale

    with Timer(f"Body: {body_id}: Serializing", logger):
        mesh_bytes = None
        if output_dvid is not None:
            assert len(output_dvid) == 3
            mesh_bytes = mesh.serialize(fmt=output_format)
            post_key(*output_dvid, f"{body_id}.{output_format}", mesh_bytes)
            
        if output_path:
            if mesh_bytes is None:
                mesh.serialize(output_path)
            else:
                with open(output_path, 'wb') as f:
                    f.write(mesh_bytes)
    
    return len(mesh.vertices_zyx), fraction, orig_vertices
コード例 #14
0
 def create_brick_mesh(brick):
     mesh = Mesh.from_binary_vol(brick.volume, brick.physical_box)
     if rescale != 1.0:
         mesh.vertices_zyx *= rescale
     return mesh