Example #1
0
def test_meta_mesh(cv_path, basic_mesh_id, full_cell_mesh_id, tmpdir):
    mm = trimesh_io.MeshMeta(cv_path=cv_path)
    mesh = mm.mesh(seg_id=basic_mesh_id)
    full_cell_mesh = mm.mesh(seg_id=full_cell_mesh_id,
                             merge_large_components=False)
    assert (mesh is not None)
    assert (full_cell_mesh is not None)
Example #2
0
def generate_proofreading_state(
    datastack,
    root_id,
    branch_points=True,
    end_points=True,
    root_loc=None,
    root_is_soma=False,
    auth_token=None,
    return_as='html',
    min_mesh_component_size=1000,
):
    """Go through the steps to generate a proofreading state from the root id"""
    client = FrameworkClient(datastack, auth_token=auth_token)
    mm = trimesh_io.MeshMeta(cache_size=0,
                             cv_path=client.info.segmentation_source())
    mesh = process_mesh(root_id,
                        client,
                        mm,
                        min_component_size=min_mesh_component_size)
    skf = process_skeleton_from_mesh(mesh,
                                     root_loc=root_loc,
                                     root_is_soma=root_is_soma,
                                     sk_kwargs=SK_KWARGS)
    pf_sb, sb_dfs = process_points_from_skeleton(root_id, skf, client)

    state = pf_sb.render_state(sb_dfs,
                               return_as=return_as,
                               url_prefix=client.info.viewer_site())

    return state
Example #3
0
def test_mesh_meta_rescale(cv_path, full_cell_mesh_id, tmpdir):
    mm = trimesh_io.MeshMeta(cv_path=cv_path,
                             cache_size=0,
                             disk_cache_path=os.path.join(
                                 tmpdir, 'mesh_cache'),
                             voxel_scaling=[2, 2, 1])
    mmesh = mm.mesh(seg_id=full_cell_mesh_id)
    assert np.isclose(51309567968.120735, mmesh.area, atol=1)
Example #4
0
def test_masked_mesh(cv_path, full_cell_mesh_id, full_cell_soma_pt, tmpdir):
    mm = trimesh_io.MeshMeta(cv_path=cv_path,
                             cache_size=0,
                             disk_cache_path=os.path.join(
                                 tmpdir, 'mesh_cache'))
    mmesh = mm.mesh(seg_id=full_cell_mesh_id)

    assert (mmesh is not None)
    # read again to test file caching with memory caching on

    mmesh_cache = mm.mesh(seg_id=full_cell_mesh_id)

    # now set it up with memory caching enabled
    mm = trimesh_io.MeshMeta(cv_path=cv_path, cache_size=1)
    # read it again with memory caching enabled
    mmesh = mm.mesh(seg_id=full_cell_mesh_id)
    # read it again to use memory cache
    mmesh_mem_cache = mm.mesh(seg_id=full_cell_mesh_id)

    ds = np.linalg.norm(mmesh.vertices - full_cell_soma_pt, axis=1)
    soma_mesh = mmesh.apply_mask(ds < 15000)

    is_big = mesh_filters.filter_largest_component(soma_mesh)
    soma_mesh = soma_mesh.apply_mask(is_big)

    ds = np.linalg.norm(soma_mesh.vertices - full_cell_soma_pt, axis=1)
    double_soma_mesh = soma_mesh.apply_mask(ds < 10000)

    with pytest.raises(ValueError):
        bad_mask = mmesh.apply_mask([True, True])

    random_indices = np.array([0, 500, 1500])
    orig_indices = double_soma_mesh.map_indices_to_unmasked(random_indices)
    back_indices = double_soma_mesh.filter_unmasked_indices(orig_indices)

    assert np.all(random_indices == back_indices)

    fname = os.path.join(tmpdir, 'test_mask_mesh.h5')
    double_soma_mesh.write_to_file(fname)

    double_soma_read = mm.mesh(filename=fname)
Example #5
0
    def set_vtk_window(self):
        self.curId = self.allids[self.curIndex]
        filename = self.meshdirectoryline.text() + '/' + self.curId + '.h5'
        label_dir = self.labelline.text()
        if len(label_dir) > 0:
            label_file = os.path.join(label_dir, self.curId + ".h5")
            with h5py.File(label_file, "r") as f:
                labels = np.array(f['mask'])
                print(labels.shape)
        else:
            labels = None
        if self.curId in self.LABELS.keys():
            labeltext = "Current Label = %s" % self.LABELS[self.curId]
        else:
            labeltext = "No Current Label"
        self.filenameline.setText(filename + ", " + labeltext)
        print("this is filename")
        print(filename)
        cvpath = 'https://storage.googleapis.com/neuroglancer/basil_v0/basil_full/seg-aug'
        mm = trimesh_io.MeshMeta()
        mesh = mm.mesh(filename=filename)
        print(mesh.vertices.shape)
        print(labels.dtype)
        #mesh_poly = trimesh_vtk.trimesh_to_vtk(mesh.vertices, mesh.faces, mesh.mesh_edges)
        #reader = vtk.vtkDataReader()
        #reader.SetFileName(filename)
        #mapper = vtk.vtkPolyDataMapper()
        #mapper.SetInputConnection(mesh_poly.GetOutputPort())

        # Create an actor
        # actor = vtk.vtkActor()
        actor = trimesh_vtk.mesh_actor(mesh,
                                       vertex_colors=labels.astype(np.float64),
                                       opacity=float(self.opacityline.text()))
        if self.curId in self.LABELS.keys():
            actor.GetProperty().SetColor(0.5, 0.5, 0.0)
        else:
            actor.GetProperty().SetColor(0.0, 0.5, 0.5)
        # actor.GetProperty().SetOpacity(float(self.opacityline.text()))
        # actor.SetMapper(mapper)

        #render
        self.ren.RemoveActor(self.curActor)
        self.curActor = actor
        self.ren.AddActor(actor)
        self.ren.ResetCamera()
        cam1 = self.ren.GetActiveCamera()
        cam1.Zoom(1.4)
        self.ren.SetBackground((1.0, 1.0, 1.0))
        self.iren.Initialize()
        self.iren.Start()
Example #6
0
def get_topo_points(client,
                    segs,
                    annos,
                    min_size=1000,
                    voxel_resolution=np.array([4, 4, 40])):
    mm = trimesh_io.MeshMeta(cv_path=client.info.segmentation_source(),
                             map_gs_to_https=True,
                             disk_cache_path='meshes')

    multi_seg = len(segs) > 1

    all_bps = []
    all_eps = []
    for ii, oid in enumerate(segs):
        mesh = mm.mesh(seg_id=oid)

        lcs = mesh_filters.filter_largest_component(mesh)
        meshf = mesh.apply_mask(lcs)

        nrn = meshwork.Meshwork(meshf)
        if len(annos['center_point']) > 0:
            ctr_pt = np.array(annos['center_point']) * np.array([4, 4, 40])
            nrn.skeletonize_mesh(soma_pt=ctr_pt,
                                 soma_thresh_distance=15000,
                                 compute_radius=False,
                                 collapse_function='sphere')
        else:
            nrn.skeletonize_mesh(compute_radius=False)

        if len(annos['ignore']) > 0:
            anno_df = pd.DataFrame({'pts': annos['ignore']})
            nrn.add_annotations('ignore_beyond', anno_df, point_column='pts')
            ignore_masks = []
            for ii in nrn.anno['ignore_beyond'].mesh_index:
                ignore_masks.append(
                    np.invert(nrn.downstream_of(ii).to_mesh_mask_base))
            for mask in ignore_masks:
                nrn.apply_mask(mask)

        bps, eps = _branch_and_end_points_ordered(nrn)
        all_bps.append([bp / voxel_resolution for bp in bps])
        all_eps.append(eps / voxel_resolution)
        # bps = _branch_points_ordered(nrn) / voxel_resolution
        # all_bps.append(bps)

        # eps = _end_points_ordered(nrn) / voxel_resolution
        # all_eps.append(eps)

    return [all_bps, np.vstack(all_eps), np.vstack(annos['ignore'])]
def sk_dist(neuronID,
            pts,
            data_root,
            dataset_name='pinky100',
            filt=True,
            max_dist=2000):
    '''
    Calculates distance of any point/points in the space to the closest point on the skeleton neuron of interest
    
    INPUTS:
        neuronID                    = String, id associated with the neuron of interest
        pts                         = np.array 3xN, N: number of points
        data_root                   = Location of the dataset  
        datset_name (optional)      = string,(defaul pinky100)
        filt (optional)             = bool, filter the mesh based on segment size(default True)
        max_dist (optional)         = float, maximum expected distance from the neuron in nm (defaul 500nm)
    
    RETURNS: 
        dists              = Nx1 np array, distances of each point to soma in nm
    '''

    # set values
    voxel_size = [4, 4, 40]
    # Folders for the mesh and skeleton
    mesh_folder = os.path.join(data_root, 'meshes')
    skeleton_folder = os.path.join(data_root, 'skeletons')
    # Mesh meta data
    mm = trimesh_io.MeshMeta(
        cv_path=
        'graphene://https://swdb.dynamicannotationframework.com/segmentation/1.0/pinky100_sv16',
        disk_cache_path=mesh_folder,
        cache_size=2)

    # load the mesh for the neuron
    mesh = mm.mesh(seg_id=neuronID)
    #load the skeleton for the neuron
    sk = skeleton_io.read_skeleton_h5(skeleton_folder + '/' + str(neuronID) +
                                      '.h5')

    if filt:
        # filter out the segmented portions of the mesh
        mask = mesh_filters.filter_largest_component(mesh)
        neuron_mesh = mesh.apply_mask(mask)
    else:
        neuron_mesh = mesh

    #load the skeleton for the neuron
    sk = skeleton_io.read_skeleton_h5(skeleton_folder + '/' + str(neuron) +
                                      '.h5')

    #convert vertecies to nm
    pt_nm = np.vstack(pts) * np.array(voxel_size)

    # use kdtree to find the shortest distance from the point to the mesh and the index associated with that
    dist, ind = neuron_mesh.kdtree.query(pt_nm, distance_upper_bound=max_dist)
    if filt:
        #find the index on original mask
        ind = neuron_mesh.map_indices_to_unmasked(ind_masked)
        neuron_mesh = mesh

    #find skeleton vertex of the point on the mesh
    syn_sk_ind = sk.mesh_to_skel_map[ind]
    syn_sk_mesh_ind = np.array(sk.vertex_properties['mesh_index'])[syn_sk_ind]

    dd = scipy.sparse.csgraph.dijkstra(neuron_mesh.csgraph,
                                       directed=False,
                                       indices=ind)

    dists = [dd[ind, mesh_ind] for ind, mesh_ind in enumerate(syn_sk_mesh_ind)]
    dists = np.array(dists)
    return dists
def soma_dist(neuronID,
              pts,
              data_root,
              dataset_name='pinky100',
              filt=True,
              max_dist=500):
    '''
    Calculates distance of any point/points in the space to the center of the soma of neuron of interest
    
    INPUTS:
        neuronID                    = String, id associated with the neuron of interest
        pts                         = np.array 3xN, N: number of points
        data_root                   = Location of the dataset  
        datset_name (optional)      = string,(defaul pinky100)
        filt (optional)             = bool, filter the mesh based on segment size(default True)
        max_dist (optional)         = float, maximum expected distance from the neuron in nm (defaul 500nm)
    
    RETURNS: 
        pt_soma_dist              = Nx1 np array, distances of each point to soma in nm
    '''

    # set values
    voxel_size = [4, 4, 40]
    # Folders for the mesh and skeleton
    mesh_folder = os.path.join(data_root, 'meshes')
    skeleton_folder = os.path.join(data_root, 'skeletons')
    # Mesh meta data
    mm = trimesh_io.MeshMeta(
        cv_path=
        'graphene://https://swdb.dynamicannotationframework.com/segmentation/1.0/pinky100_sv16',
        disk_cache_path=mesh_folder,
        cache_size=2)

    # load the mesh for the neuron
    mesh = mm.mesh(seg_id=neuronID)
    #load the skeleton for the neuron
    sk = skeleton_io.read_skeleton_h5(skeleton_folder + '/' + str(neuronID) +
                                      '.h5')

    if filt:
        # filter out the segmented portions of the mesh
        mask = mesh_filters.filter_largest_component(mesh)
        neuron_mesh = mesh.apply_mask(mask)
    else:
        neuron_mesh = mesh

    # convert vertecies to nm
    pt_nm = np.vstack(pts) * np.array(voxel_size)
    # use kdtree to find the shortest distance from the point to the mesh and the index associated with that
    dist, ind = neuron_mesh.kdtree.query(pt_nm, distance_upper_bound=500)

    # use kdtree to find the shortest distance from the point to the mesh and the index associated with that
    dist, ind = neuron_mesh.kdtree.query(pt_nm, distance_upper_bound=max_dist)

    #find the vertices of the synapse point on the mesh
    pt_pos_mesh = neuron_mesh.vertices[ind]

    #find skeleton vertex of the point on the mesh
    if filt:
        ind_orig = neuron_mesh.map_indices_to_unmasked(ind)
        pt_sk_vert = sk.mesh_to_skel_map[ind_orig]
    else:
        pt_sk_vert = sk.mesh_to_skel_map[ind]

    pt_soma_dist = sk.distance_to_root[pt_sk_vert] + dist
    +sk_dist(neuronID,
             pts,
             data_root,
             dataset_name='pinky100',
             filt=True,
             max_dist=2000)
    return pt_soma_dist
Example #9
0
def test_get_mesh(cv_path, basic_mesh_id, tmpdir):
    mm = trimesh_io.MeshMeta(cv_path=cv_path, disk_cache_path=tmpdir)
    basic_mesh = mm.mesh(seg_id=basic_mesh_id)
    assert (basic_mesh.n_vertices == 5)
Example #10
0
import os
import operator
from collections import Counter

import numpy as np
from scipy import sparse as sp
from scipy.sparse import csgraph
from scipy.spatial import cKDTree
from meshparty import trimesh_io
import networkx as nx
import h5py

from . import ngl

MESHMETA = trimesh_io.MeshMeta()
MITOMESHDIR = "data/mitomeshes"
CELLMESHDIR = "../data/neuron_meshes_v185"


def download_meshes(segids,
                    overwrite=False,
                    parallel=1,
                    meshdir=MITOMESHDIR,
                    cvpath=ngl.MITO_CVPATH,
                    **kwargs):
    trimesh_io.download_meshes(segids,
                               meshdir,
                               cvpath,
                               overwrite=overwrite,
                               n_threads=parallel,
                               **kwargs)
Example #11
0
PACKAGE_PATH = "%s/../" % os.path.dirname(os.path.abspath(__file__))
EXAMPLE_PATH = "%s/example/" % PACKAGE_PATH

if not os.path.exists(EXAMPLE_PATH):
    os.makedirs(EXAMPLE_PATH)


def save_local_views(mesh, n_points, n_samples, save_name, pc_align=True):
    save_folder = "%s/%s/" % (EXAMPLE_PATH, save_name)

    if not os.path.exists(save_folder):
        os.makedirs(save_folder)

    for i_sample in range(n_samples):
        local_mesh = mesh.get_local_mesh(n_points=n_points,
                                         pc_align=pc_align)

        save_path = "%s/sample%d.obj" % (save_folder, i_sample)

        local_mesh.write_to_file(save_path)



if __name__ == "__main__":
    from meshparty import trimesh_io
    mm = trimesh_io.MeshMeta()
    mesh = mm.mesh("%s/MeshParty/example/3205058_m.obj" % HOME)

    save_local_views(mesh, n_points=2000, n_samples=10,
                     save_name="3205058_m_local_500")
Example #12
0
                      verbose=False)

mesh_folder = "/data/dynamic_brain_workshop/electron_microscopy/2019/"
voxel_resolution = np.array([4, 4, 40])

# Find all synapses.
all_man_syn = dl.query_synapses("gsynapse_ai_manual_v2")

# Find the postsynaptic cell ID with the most synapses that are manually annotated.
all_man_syn["syn_num"] = all_man_syn.groupby(
    "post_pt_root_id")["id"].transform(len)
cellid = all_man_syn[all_man_syn.syn_num == 34]["post_pt_root_id"].values[0]
print(cellid)

# Visualize the cell.
mm = trimesh_io.MeshMeta(disk_cache_path="test/test_files")
mesh = mm.mesh(
    filename=
    "/data/dynamic_brain_workshop/electron_microscopy/2019/meshes/%d.h5" %
    cellid)
mesh_poly = trimesh_vtk.trimesh_to_vtk(mesh.vertices, mesh.faces, None)
plt_actor = vtkplotter.Actor(mesh_poly)
vtkplotter.embedWindow(backend="k3d")
vp = vtkplotter.Plotter(bg="b")
myactor = vtkplotter.Actor(plt_actor, c="r")
myactor.GetMapper().Update()
vp += myactor
vp.show()

# Find 10 largest synapses automatically extracted on this cell.
post_synapse_df = dl.query_synapses("pni_synapses_i3",