Пример #1
0
def test_skeleton_creation(simple_skeleton):
    sk = simple_skeleton
    new_sk = skeleton.Skeleton(sk.vertices, sk.edges, root=None)
    assert new_sk.root is not None

    new_sk = skeleton.Skeleton(sk.vertices, sk.edges, root=3)
    assert new_sk.root == 3

    with pytest.raises(ValueError):
        skeleton.Skeleton(sk.vertices, sk.edges, root=len(sk.vertices) + 1)
Пример #2
0
def read_skeleton_h5(filename, remove_zero_length_edges=False):
    '''
    Reads a skeleton and its properties from an hdf5 file.

    Parameters
    ----------
    filename: str
        path to skeleton file
    remove_zero_length_edges: bool, optional
        If True, post-processes the skeleton data to removes any zero
        length edges. Default is False.
    Returns
    -------
    :obj:`meshparty.skeleton.Skeleton`
        skeleton object loaded from the h5 file

    '''
    vertices, edges, mesh_to_skel_map, vertex_properties, creation_parameters, root, seg_id = read_skeleton_h5_by_part(
        filename)

    return skeleton.Skeleton(vertices=vertices,
                             edges=edges,
                             mesh_to_skel_map=mesh_to_skel_map,
                             vertex_properties=vertex_properties,
                             creation_parameters=creation_parameters,
                             root=root,
                             seg_id=seg_id,
                             remove_zero_length_edges=remove_zero_length_edges)
Пример #3
0
def refine_skeleton(l2_sk,
                    l2dict_reversed,
                    cv,
                    convert_to_nm=True,
                    root_location=None):
    verts = l2_sk.vertices

    l2br_locs, missing_ids = lvl2_branch_fragment_locs(l2_sk, l2dict_reversed,
                                                       cv)
    # if np.any(np.isnan(l2br_locs)):
    #     bad_l2_inds = np.any(np.isnan(l2br_locs), axis=1)
    #     bad_minds = l2_sk.mesh_index[l2_sk.branch_points_undirected[bad_l2_inds]]
    #     bad_l2_ids = [l2dict_reversed[mind] for mind in bad_minds]
    #     raise ValueError(f'No mesh found for lvl2 ids: {bad_l2_ids}')

    missing_brinds = np.any(np.isnan(l2br_locs), axis=1)
    verts[l2_sk.branch_points_undirected[~missing_brinds]] = l2br_locs[
        ~missing_brinds]

    if convert_to_nm:
        other_inds = np.full(len(verts), True)
        other_inds[l2_sk.branch_points_undirected[~missing_brinds]] = False
        verts[other_inds] = chunk_to_nm(
            verts[other_inds],
            cv) + chunk_dims(cv) // 2  # Move to center of chunks

    if root_location is not None:
        verts[l2_sk.root] = root_location

    return skeleton.Skeleton(vertices=verts,
                             edges=l2_sk.edges,
                             root=l2_sk.root,
                             remove_zero_length_edges=False), missing_ids
Пример #4
0
def test_segments(simple_skeleton):
    sk = skeleton.Skeleton(simple_skeleton.vertices,
                           simple_skeleton.edges,
                           root=0)
    assert len(sk.segments) == 3
    assert len(np.unique(np.concatenate(sk.segments))) == len(sk.vertices)
    assert np.all(sk.segment_map == np.array([0, 0, 0, 1, 1, 2, 2]))
Пример #5
0
def collapse_pcg_skeleton(soma_pt, sk, soma_r):
    """Use soma point vertex and collapse soma as sphere
    Parameters
    ----------
    soma_pt : array
        3-element location of soma center (in nm)
    sk: skeleton.Skeleton
        Coarse skeleton
    soma_r : float
        Soma collapse radius (in nm)
    Returns
    -------
    skeleton
        New skeleton with updated properties
    """
    soma_verts, _ = skeletonize.soma_via_sphere(soma_pt, sk.vertices, sk.edges,
                                                soma_r)
    min_soma_vert = np.argmin(
        np.linalg.norm(sk.vertices[soma_verts] - soma_pt, axis=1))
    root_vert = soma_verts[min_soma_vert]

    (
        new_v,
        new_e,
        new_skel_map,
        vert_filter,
        root_ind,
    ) = skeletonize.collapse_soma_skeleton(
        soma_verts[soma_verts != root_vert],
        soma_pt,
        sk.vertices,
        sk.edges,
        sk.mesh_to_skel_map,
        collapse_index=root_vert,
        return_soma_ind=True,
        return_filter=True,
    )

    new_mesh_index = sk.mesh_index[vert_filter]
    new_skeleton = skeleton.Skeleton(
        new_v,
        new_e,
        root=root_ind,
        mesh_to_skel_map=new_skel_map,
        mesh_index=new_mesh_index,
        remove_zero_length_edges=False,
        meta=sk.meta,
    )

    new_skeleton.meta.soma_pt_x = soma_pt[0]
    new_skeleton.meta.soma_pt_y = soma_pt[1]
    new_skeleton.meta.soma_pt_z = soma_pt[2]
    new_skeleton.meta.soma_radius = soma_r
    new_skeleton.meta.collapse_soma = True
    new_skeleton.meta.collapse_function = "sphere"

    return new_skeleton
Пример #6
0
def simple_skeleton_with_properties():
    verts = simple_verts
    edges = simple_edges
    test_prop = np.arange(len(verts))
    mesh_to_skel_map = np.arange(0, 10 * len(verts), 10)
    yield skeleton.Skeleton(verts,
                            edges,
                            mesh_to_skel_map=mesh_to_skel_map,
                            vertex_properties={'test': test_prop},
                            root=0)
Пример #7
0
def simple_skeleton_with_properties():
    verts = simple_verts
    edges = simple_edges
    mesh_index = np.arange(0, 10 * len(verts), 10)
    mesh_to_skel_map = np.repeat(np.arange(0, len(verts)), 3)
    test_prop = mesh_index.copy()
    yield skeleton.Skeleton(verts,
                            edges,
                            mesh_index=mesh_index,
                            mesh_to_skel_map=mesh_to_skel_map,
                            vertex_properties={'test': test_prop},
                            root=0)
Пример #8
0
def read_skeleton_h5(filename):
    '''
    Reads a skeleton and its properties from an hdf5 file.

    Parameters
    ----------
    filename: str
        path to skeleton file

    Returns
    -------
    :obj:`meshparty.skeleton.Skeleton`
        skeleton object loaded from the h5 file
    
    '''
    vertices, edges, mesh_to_skel_map, vertex_properties, root = read_skeleton_h5_by_part(
        filename)
    return skeleton.Skeleton(vertices=vertices,
                             edges=edges,
                             mesh_to_skel_map=mesh_to_skel_map,
                             vertex_properties=vertex_properties,
                             root=root)
def viz_3d_network(positions, edgelist):
    """
    Makes an interactive, 3d visualization of a spatial network.

    Parameters
    ----------
    positions: n_nodes x 3
        The 3d coordinates of each node in the network.

    edgelist: n_edges x 2
        List of the edges. If edges are labeled by node
        names (as opposed to node indexes) then positions
        must be a pd.DataFrame indexed by the node names.
    """

    # if nodes are named then convert node names to indexes
    if isinstance(positions, pd.DataFrame) and \
            isinstance(edgelist, pd.DataFrame):

        # convert edges to node index format i.e. (idx_0, idx_9) -> (0, 9)
        rootid2index = {idx: i for i, idx in enumerate(positions.index)}
        to_idxer = np.vectorize(lambda x: rootid2index[x])
        edgelist = to_idxer(edgelist)

    positions = np.array(positions)
    edgelist = np.array(edgelist)

    sk = skeleton.Skeleton(positions, edgelist)
    edge_actor = trimesh_vtk.skeleton_actor(sk,
                                            color=(0, 0, 0),
                                            opacity=.7,
                                            line_width=1)

    vert_actor = trimesh_vtk.point_cloud_actor(positions,
                                               size=3000,
                                               color=(1, 0, 0))
    trimesh_vtk.render_actors([edge_actor, vert_actor])
Пример #10
0
def simple_skeleton():
    verts = simple_verts
    edges = simple_edges
    yield skeleton.Skeleton(verts, edges, root=0)
Пример #11
0
def refine_chunk_index_skeleton(
    sk_ch,
    l2dict_reversed,
    cv,
    refine_inds="all",
    scale_chunk_index=True,
    root_location=None,
    nan_rounds=20,
    return_missing_ids=False,
    segmentation_fallback=False,
    fallback_mip=2,
    cache=None,
    save_to_cache=False,
):
    """Refine skeletons in chunk index space to Euclidean space.

    Parameters
    ----------
    sk_ch : meshparty.skeleton.Skeleton
        Skeleton in chunk index space
    l2dict_reversed : dict
        Mapping between skeleton vertex index and level 2 id.
    cv : cloudvolume.CloudVolume
        Associated cloudvolume
    refine_inds : str, None or list-like, optional
        Skeleton indices to refine, 'all', or None. If 'all', does all skeleton indices.
        If None, downloads no index but can use other options.
        By default 'all'.
    scale_chunk_index : bool, optional
        If True, maps unrefined chunk index locations to the center of the chunk in
        Euclidean space, by default True
    root_location : list-like, optional
        3-element euclidean space location to which to map the root vertex location, by default None
    nan_rounds : int, optional
        Number of passes to smooth over any missing values by averaging proximate vertex locations.
        Only used if refine_inds is 'all'. Default is 20.
    return_missing_ids : bool, optional
        If True, returns ids of any missing level 2 meshes. Default is False
    cache : str
        Filename for a sqlite database storing locations associated with level 2 ids.

    Returns
    -------
    meshparty.skeleton.Skeleton
        Skeleton with remapped vertex locations
    """
    if nan_rounds is None:
        convert_missing = True
    else:
        convert_missing = False

    refine_out = chunk_tools.refine_vertices(
        sk_ch.vertices,
        l2dict_reversed=l2dict_reversed,
        cv=cv,
        refine_inds=refine_inds,
        scale_chunk_index=scale_chunk_index,
        convert_missing=convert_missing,
        return_missing_ids=return_missing_ids,
        segmentation_fallback=segmentation_fallback,
        fallback_mip=fallback_mip,
        cache=cache,
        save_to_cache=save_to_cache,
    )
    if return_missing_ids:
        new_verts, missing_ids = refine_out
    else:
        new_verts = refine_out

    if root_location is not None:
        new_verts[sk_ch.root] = root_location

    l2_sk = skeleton.Skeleton(
        vertices=new_verts,
        edges=sk_ch.edges,
        root=sk_ch.root,
        remove_zero_length_edges=False,
        mesh_index=sk_ch.mesh_index,
        mesh_to_skel_map=sk_ch.mesh_to_skel_map,
        meta=sk_ch.meta,
    )
    metameta = {
        "space": "euclidean",
    }
    try:
        l2_sk.meta.update_metameta(metameta)
    except:
        pass

    if refine_inds == "all":
        sk_utils.fix_nan_verts(l2_sk, num_rounds=nan_rounds)

    if return_missing_ids:
        return l2_sk, missing_ids
    else:
        return l2_sk
Пример #12
0
def update_lvl2_skeleton(l2_sk, l2br_locs):
    verts = l2_sk.vertices
    verts[l2_sk.branch_points_undirected] = l2br_locs
    return skeleton.Skeleton(vertices=verts,
                             edges=l2_sk.edges,
                             remove_zero_length_edges=False)
Пример #13
0
def refine_chunk_index_skeleton(
    sk_ch,
    l2dict_reversed,
    cv,
    refine_inds="all",
    scale_chunk_index=True,
    root_location=None,
    nan_rounds=20,
    return_missing_ids=False,
    segmentation_fallback=False,
    fallback_mip=2,
    cache=None,
    save_to_cache=False,
    client=None,
    l2cache=False,
):
    """Refine skeletons in chunk index space to Euclidean space.

    Parameters
    ----------
    sk_ch : meshparty.skeleton.Skeleton
        Skeleton in chunk index space
    l2dict_reversed : dict
        Mapping between skeleton vertex index and level 2 id.
    cv : cloudvolume.CloudVolume
        Associated cloudvolume
    refine_inds : str, None or list-like, optional
        Skeleton indices to refine, 'all', or None. If 'all', does all skeleton indices.
        If None, downloads no index but can use other options.
        By default 'all'.
    scale_chunk_index : bool, optional
        If True, maps unrefined chunk index locations to the center of the chunk in
        Euclidean space, by default True
    root_location : list-like, optional
        3-element euclidean space location to which to map the root vertex location, by default None
    nan_rounds : int, optional
        Number of passes to smooth over any missing values by averaging proximate vertex locations.
        Only used if refine_inds is 'all'. Default is 20.
    return_missing_ids : bool, optional
        If True, returns ids of any missing level 2 meshes. Default is False
    segmentation_fallback : bool, optional
        If True, downloads the segmentation at mip level in fallback_mip to get a location. Very slow. Default is False.
    fallback_mip : int, optional
        The mip level used in segmentation fallback. Default is 2.
    cache : str, optional
        If set to 'service', uses the l2cache service if available available. Otherwise, a filename for a sqlite database storing locations associated with level 2 ids. Default is None.
    save_to_cache : bool, optional
        If using a sqlite database, setting this to True will add values to the cache as downloads occur.
    client : CAVEclient, optional
        If using the l2cache service, provides a client that can access it.
    l2cache : bool, optional,
        Set to True if using a l2cache to localize vertices. Same as setting cache to 'service'. Default is False.
    Returns
    -------
    meshparty.skeleton.Skeleton
        Skeleton with remapped vertex locations
    """
    if nan_rounds is None:
        convert_missing = True
    else:
        convert_missing = False

    if l2cache:
        cache = "service"

    refine_out = chunk_tools.refine_vertices(
        sk_ch.vertices,
        l2dict_reversed=l2dict_reversed,
        cv=cv,
        refine_inds=refine_inds,
        scale_chunk_index=scale_chunk_index,
        convert_missing=convert_missing,
        return_missing_ids=return_missing_ids,
        segmentation_fallback=segmentation_fallback,
        fallback_mip=fallback_mip,
        cache=cache,
        save_to_cache=save_to_cache,
        client=client,
    )
    if return_missing_ids:
        new_verts, missing_ids = refine_out
    else:
        new_verts = refine_out

    if root_location is not None:
        new_verts[sk_ch.root] = root_location

    l2_sk = skeleton.Skeleton(
        vertices=new_verts,
        edges=sk_ch.edges,
        root=sk_ch.root,
        remove_zero_length_edges=False,
        mesh_index=sk_ch.mesh_index,
        mesh_to_skel_map=sk_ch.mesh_to_skel_map,
        meta=sk_ch.meta,
    )
    metameta = {
        "space": "euclidean",
    }
    try:
        l2_sk.meta.update_metameta(metameta)
    except:
        pass

    if isinstance(refine_inds, str) and refine_inds == "all":
        sk_utils.fix_nan_verts(l2_sk, num_rounds=nan_rounds)

    if return_missing_ids:
        return l2_sk, missing_ids
    else:
        return l2_sk