Exemple #1
0
def process_skeleton_from_mesh(mesh,
                               root_loc=None,
                               root_is_soma=False,
                               close_mean=10000,
                               sk_kwargs={}):
    if root_is_soma:
        soma_loc = root_loc
    else:
        soma_loc = None
    sk = skeletonize.skeletonize_mesh(mesh, soma_pt=soma_loc, **sk_kwargs)

    if not root_is_soma and root_loc is not None:
        _, skind = sk.kdtree.query(root_loc)
        sk.reroot(skind)

    _, lbls = sparse.csgraph.connected_components(sk.csgraph)
    _, cnt = np.unique(lbls, return_counts=True)

    if soma_loc is not None:
        modest_ccs = np.flatnonzero(cnt < MIN_CC_SIZE)
        remove_ccs = []
        for cc in modest_ccs:
            if np.mean(
                    np.linalg.norm(sk.vertices[lbls == cc] - soma_loc,
                                   axis=1)) < close_mean:
                remove_ccs.append(cc)

        keep_mask = ~np.isin(lbls, remove_ccs)
        skout = sk.apply_mask(keep_mask)
    else:
        skout = sk
    return skout
Exemple #2
0
def skeletonize_lvl2_graph(mesh_chunk,
                           root_pt=None,
                           cv=None,
                           client=None,
                           voxel_resolution=None,
                           mip_scaling=None,
                           invalidation_d=3,
                           point_radius=200):
    sk_ch = skeletonize.skeletonize_mesh(mesh_chunk,
                                         invalidation_d=invalidation_d,
                                         collapse_soma=False,
                                         compute_radius=False,
                                         remove_zero_length_edges=False)
    return sk_ch
Exemple #3
0
def test_link_edges(full_cell_mesh, full_cell_merge_log, full_cell_soma_pt,
                    monkeypatch):
    class MyChunkedGraph(object):
        def __init__(a, **kwargs):
            pass

        def get_merge_log(self, atomic_id):
            return full_cell_merge_log

    monkeypatch.setattr(trimesh_io.trimesh_repair.chunkedgraph,
                        'ChunkedGraphClient', MyChunkedGraph)

    full_cell_mesh.add_link_edges('test', 5)

    out = mesh_filters.filter_largest_component(full_cell_mesh)
    mesh_filter = full_cell_mesh.apply_mask(out)
    skel = skeletonize.skeletonize_mesh(mesh_filter,
                                        invalidation_d=10000,
                                        soma_pt=full_cell_soma_pt)
    assert (skel.n_branch_points == 83)
Exemple #4
0
def chunk_index_skeleton(
    root_id,
    client=None,
    datastack_name=None,
    cv=None,
    root_point=None,
    invalidation_d=3,
    return_mesh=False,
    return_l2dict=False,
    return_mesh_l2dict=False,
    root_point_resolution=None,
    root_point_search_radius=300,
    n_parallel=1,
):
    """Generate a basic skeleton with chunked-graph index vertices.

    Parameters
    ----------
    root_id : np.uint64
        Neuron root id
    client : caveclient.CAVEclient, optional
        CAVEclient for a datastack, by default None. If None, you must specify a datastack name.
    datastack_name : str, optional
        Datastack name to create a CAVEclient, by default None. Only used if client is None.
    cv : cloudvolume.CloudVolume, optional
        CloudVolume associated with the object, by default None. If None, one is created based on the client info.
    root_point : array, optional
        Point in voxel space to set the root vertex. By default None, which makes a random tip root.
    invalidation_d : int, optional
        TEASAR invalidation radius in chunk space, by default 3
    return_mesh : bool, optional
        If True, returns the pre-skeletonization mesh with vertices in chunk index space, by default False
    return_l2dict : bool, optional
        If True, returns the level 2 id to vertex index dict. By default True
    n_parallel : int, optional
        Sets number of parallel threads for cloudvolume, by default 1

    Returns
    -------
    sk : meshparty.skeleton.Skeleton
        Skeleton object
    mesh : meshparty.trimesh_io.Mesh
        Mesh object, only if return_mesh is True
    level2_dict : dict
        Level 2 id to vertex map, only if return_l2dict is True.
    """
    if client is None:
        client = CAVEclient(datastack_name)
    if n_parallel is None:
        n_parallel = 1
    if cv is None:
        cv = cloudvolume.CloudVolume(
            client.info.segmentation_source(),
            parallel=n_parallel,
            use_https=True,
            progress=False,
            bounded=False,
            fill_missing=True,
            secrets={"token": client.auth.token},
        )

    if root_point_resolution is None:
        root_point_resolution = cv.mip_resolution(0)

    # lvl2_eg = client.chunkedgraph.level2_chunk_graph(root_id)

    # eg, l2dict_mesh, l2dict_r_mesh, x_ch = build_spatial_graph(lvl2_eg, cv)
    # mesh_chunk = trimesh_io.Mesh(vertices=x_ch, faces=[], link_edges=eg)

    mesh_chunk, l2dict_mesh, l2dict_r_mesh = chunk_index_mesh(
        root_id, client=client, cv=cv, return_l2dict=True)

    if root_point is not None:
        lvl2_root_chid, lvl2_root_loc = chunk_tools.get_closest_lvl2_chunk(
            root_point,
            root_id,
            client=client,
            cv=None,
            radius=root_point_search_radius,
            voxel_resolution=root_point_resolution,
            return_point=True,
        )  # Need to have cv=None because of a cloudvolume inconsistency
        root_mesh_index = l2dict_mesh[lvl2_root_chid]
    else:
        root_mesh_index = None

    metameta = {"space": "chunk", "datastack": client.datastack_name}
    sk_ch = skeletonize.skeletonize_mesh(
        mesh_chunk,
        invalidation_d=invalidation_d,
        collapse_soma=False,
        compute_radius=False,
        cc_vertex_thresh=0,
        root_index=root_mesh_index,
        remove_zero_length_edges=False,
        meta={
            "root_id": root_id,
            "skeleton_type": skeleton_type,
            "meta": metameta,
        },
    )

    l2dict, l2dict_r = sk_utils.filter_l2dict(sk_ch, l2dict_r_mesh)

    out_list = [sk_ch]
    if return_mesh:
        out_list.append(mesh_chunk)
    if return_l2dict:
        out_list.append((l2dict, l2dict_r))
    if return_mesh_l2dict:
        out_list.append((l2dict_mesh, l2dict_r_mesh))
    if len(out_list) == 1:
        return out_list[0]
    else:
        return tuple(out_list)
Exemple #5
0
def get_lvl2_skeleton(client,
                      root_id,
                      convert_to_nm=False,
                      refine_branch_points=False,
                      root_point=None,
                      point_radius=200,
                      invalidation_d=3,
                      verbose=False,
                      auto_remesh=False):
    """Get branch points of the level 2 skeleton for a root id.

    Parameters
    ----------
    datastack : str
        Datastack name
    root_id : int
        Root id of object to skeletonize
    invalidation_d : int, optional
        Invalidation distance in chunk increments

    Returns
    -------
    Branch point locations
        Branch point locations in mesh space (nms)
    """
    if verbose:
        import time
        t0 = time.time()

    cv = cloudvolume.CloudVolume(client.info.segmentation_source(),
                                 use_https=True,
                                 progress=False,
                                 bounded=False)

    lvl2_eg = get_lvl2_graph(root_id, client)
    if verbose:
        t1 = time.time()
        print('\nTime to return graph: ', t1 - t0)

    eg, l2dict, l2dict_reversed, x_ch = build_spatial_graph(lvl2_eg, cv)

    mesh_chunk = trimesh_io.Mesh(vertices=x_ch, faces=[], link_edges=eg)

    if root_point is not None:
        if verbose:
            t2 = time.time()
        lvl2_root_chid, lvl2_root_loc = get_closest_lvl2_chunk(
            root_point,
            root_id,
            client=client,
            cv=cv,
            radius=point_radius,
            return_point=True)
        root_mesh_index = l2dict[lvl2_root_chid]
        if verbose:
            print('\n Time to get root index: ', time.time() - t2)
    else:
        root_mesh_index = None
        lvl2_root_loc = None

    if verbose:
        t3 = time.time()
    sk_ch = skeletonize.skeletonize_mesh(mesh_chunk,
                                         invalidation_d=invalidation_d,
                                         collapse_soma=False,
                                         compute_radius=False,
                                         root_index=root_mesh_index,
                                         remove_zero_length_edges=False)
    if verbose:
        print('\n Time to skeletonize: ', time.time() - t3)

    if refine_branch_points:
        if verbose:
            t4 = time.time()
        sk_ch, missing_ids = refine_skeleton(sk_ch,
                                             l2dict_reversed,
                                             cv,
                                             convert_to_nm,
                                             root_location=lvl2_root_loc)
        if verbose:
            print('\n Time to refine branch points, ', time.time() - t4)
        if len(missing_ids) > 0:
            if auto_remesh:
                client.chunkedgraph.remesh_level2_chunks(missing_ids)
                raise ValueError(
                    f'Regenerating mesh for level 2 ids: {missing_ids}. Try again in a few minutes.'
                )
            else:
                raise ValueError(
                    f'No mesh found for level 2 ids: {missing_ids}')

    return sk_ch, l2dict_reversed
Exemple #6
0
        root_id,
        client=client,
        return_l2dict=True,
        nan_rounds=nan_rounds,
        require_complete=require_complete,
    )

    metameta = {"space": "l2cache", "datastack": client.datastack_name}
    sk = skeletonize.skeletonize_mesh(
        mesh,
        invalidation_d=invalidation_d,
        soma_pt=root_point,
        collapse_soma=collapse_soma,
        soma_radius=collapse_radius,
        compute_radius=False,
        cc_vertex_thresh=0,
        remove_zero_length_edges=True,
        meta={
            "root_id": root_id,
            "skeleton_type": skeleton_type,
            "meta": metameta,
        },
    )

    l2dict, l2dict_r = sk_utils.filter_l2dict(sk, l2dict_r_mesh)

    out_list = [sk]
    if return_mesh:
        out_list.append(mesh)
    if return_l2dict:
        out_list.append((l2dict, l2dict_r))