def setup_tsv_input(setup_dvid_repo):
    dvid_address, repo_uuid = setup_dvid_repo

    input_segmentation_name = 'segmentation-decimatemeshes-input'
    test_volume, object_boxes, object_sizes = create_test_segmentation()

    create_labelmap_instance(dvid_address, repo_uuid, input_segmentation_name, max_scale=3)
    post_labelmap_voxels(dvid_address, repo_uuid, input_segmentation_name, (0,0,0), test_volume, downres=True, noindexing=False)

    tsv_name = 'segmentation-decimatemeshes-tsv'
    create_tarsupervoxel_instance(dvid_address, repo_uuid, tsv_name, input_segmentation_name, '.drc')
 
    # Post supervoxel meshes
    meshes = Mesh.from_label_volume(test_volume, progress=False)
    meshes_data = {f"{label}.drc": mesh.serialize(fmt='drc') for label, mesh in meshes.items()}
    post_load(dvid_address, repo_uuid, tsv_name, meshes_data)
    
    # Merge two of the objects (100 and 300)
    post_merge(dvid_address, repo_uuid, input_segmentation_name, 100, [300])
    object_boxes[100] = box_union(object_boxes[100], object_boxes[300])
    del object_boxes[300]
    
    object_sizes[100] += object_sizes[300]
    del object_sizes[300]
    
    meshes[100] = Mesh.concatenate_meshes((meshes[100], meshes[300]))
    del meshes[300]
    
    return dvid_address, repo_uuid, tsv_name, object_boxes, object_sizes, meshes
Beispiel #2
0
def create_precomputed_ngmeshes(vol, vol_fullres_box, names, bucket_name, bucket_path, localdir=None, decimation=0.01):
    """
    Create meshes for the given labelvolume and upload them to a google bucket in
    neuroglancer legacy mesh format (i.e. what flyem calls "ngmesh" format).
    """
    from vol2mesh import Mesh
    if not bucket_name.startswith('gs://'):
        bucket_name = 'gs://' + bucket_name

    if localdir is None:
        localdir = bucket_path.split('/')[-1]

    os.makedirs(f"{localdir}/mesh", exist_ok=True)
    dump_json({"@type": "neuroglancer_legacy_mesh"}, f"{localdir}/mesh/info")

    logger.info("Generating meshes")
    meshes = Mesh.from_label_volume(vol, vol_fullres_box, smoothing_rounds=2)

    logger.info("Simplifying meshes")
    for mesh in meshes.values():
        mesh.simplify(decimation)

    logger.info("Serializing meshes")
    for label, mesh in meshes.items():
        name = names.get(label, str(label))
        mesh.serialize(f"{localdir}/mesh/{name}.ngmesh")
        dump_json({"fragments": [f"{name}.ngmesh"]}, f"{localdir}/mesh/{label}:0")

    subprocess.run(f"gsutil cp {bucket_name}/{bucket_path}/info {localdir}/info", shell=True)
    with open(f"{localdir}/info", 'r') as f:
        info = json.load(f)

    info["mesh"] = "mesh"
    dump_json(info, f"{localdir}/info", unsplit_int_lists=True)

    logger.info("Uploading")
    subprocess.run(f"gsutil cp {localdir}/info {bucket_name}/{bucket_path}/info", shell=True)
    subprocess.run(f"gsutil cp -R {localdir}/mesh {bucket_name}/{bucket_path}/mesh", shell=True)