예제 #1
0
def meshPartitionGeodesics(mesh, writePath, pointsPulmonary, pointsTricuspid,
                           apexId):
    """
    Creates a mesh in PLY (need for the tetrahedralization) and another one in VTK with the fields, because the PLY doesn't store the fields. Space is cheap, anyway.
    
    writePath needs to be the file path without extension.
    """

    points, faces = utilities.vtk_to_numpy(mesh,
                                           returnFaces=True,
                                           flatten=False)
    faces = faces.astype(np.int32)
    distancePulmonary = gdist.compute_gdist(points, faces,
                                            pointsPulmonary.astype(np.int32))
    distanceTricuspid = gdist.compute_gdist(points, faces,
                                            pointsTricuspid.astype(np.int32))
    distanceApex = gdist.compute_gdist(points, faces,
                                       np.array([apexId], dtype=np.int32))

    utilities.add_scalar(mesh, distancePulmonary, 'distancePulmonary')
    utilities.add_scalar(mesh, distanceApex, 'distanceApex')
    utilities.add_scalar(mesh, distanceTricuspid, 'distanceTricuspid')

    if writePath:
        utilities.write_poly(
            mesh,
            writePath + '.vtk',
            scalarFields=[distancePulmonary, distanceApex, distanceTricuspid],
            scalarFieldNames=[
                'distancePulmonary', 'distanceApex', 'distanceTricuspid'
            ],
            format='vtk')
        utilities.write_poly(mesh, writePath, format='ply')

    return mesh
예제 #2
0
    def geodesic_distance(self, sources, max_dist=None, targets=None):
        """
        Calculate the geodesic distance between vertices of the surface, 

        ``sources``: one or more indices into vertices, these are required, 
            they specify the vertices from which the distance is calculated.
            NOTE: if multiple sources are provided then the distance returned
            is the shortest from the closest source. 
        ``max_dist``: find the distance to vertices out as far as max_dist. 
        ``targets``: one or more indices into vertices,.

        NOTE: Either ``targets`` or ``max_dist`` should be specified, but not 
            both, specifying neither is equivalent to max_dist=1e100.

        NOTE: when max_dist is specifed, distances > max_dist are returned as 
            numpy.inf

        If end_vertex is omitted the distance from the starting vertex to all 
        vertices within max_dist will be returned, if max_dist is also omitted 
        the distance to all vertices on the surface will be returned.

        """
        #TODO: Probably should check that targets and start_vertex are less than
        #      number of vertices, etc...
        #if NO_GEODESIC_DISTANCE:
        #    LOG.error("%s: The geodesic distance library didn't load" % repr(self))
        #    return

        if (max_dist is None) and (targets is None):
            dist = gdist.compute_gdist(self.vertices.astype(numpy.float64),
                                       self.triangles.astype(numpy.int32),
                                       source_indices=sources.astype(
                                           numpy.int32))
        elif (max_dist is None) and (targets is not None):
            dist = gdist.compute_gdist(
                self.vertices.astype(numpy.float64),
                self.triangles.astype(numpy.int32),
                source_indices=sources.astype(numpy.int32),
                target_indices=targets.astype(numpy.int32))
        elif (max_dist is not None) and (targets is None):
            dist = gdist.compute_gdist(self.vertices.astype(numpy.float64),
                                       self.triangles.astype(numpy.int32),
                                       source_indices=sources.astype(
                                           numpy.int32),
                                       max_distance=max_dist)
        else:
            LOG.error(
                "%s: Specifying both targets and max_dist doesn't work." %
                str(self))
            dist = None

        return dist
예제 #3
0
 def distance(self, src, trg=None, max_distance=None):
   """Compute minimum geodesic distance between source vertices and target
   vertices using gdist library.
   """
   src = np.array(src, dtype=np.int32)
   if trg is not None:
     trg = np.array(trg, dtype=np.int32)
   if max_distance is not None:
     max_distance = np.float64(max_distance)
     distances = gdist.compute_gdist(self.coords, self.faces, src, trg, max_distance)
   else:
     distances = gdist.compute_gdist(self.coords, self.faces, src, trg)
   return distances
예제 #4
0
def run_svg_results(species, paint, labels_dict, vertices, triangles,
                    pir_regions, hip_regions):

    import gdist
    from scipy import stats

    pir = np.array(np.where(np.in1d(paint, pir_regions))[0], dtype=np.int32)
    hip = np.array(np.where(np.in1d(paint, hip_regions))[0], dtype=np.int32)

    data_dist_pir = gdist.compute_gdist(vertices,
                                        triangles,
                                        source_indices=pir)
    data_dist_hip = gdist.compute_gdist(vertices,
                                        triangles,
                                        source_indices=hip)

    dist_labels_pir = []
    dist_labels_hip = []
    ks_stat = []
    for i in labels_dict:
        labs = np.where(paint == i)[0]
        dist_labels_pir.append(np.mean(data_dist_pir[labs]))
        dist_labels_hip.append(np.mean(data_dist_hip[labs]))
        ks_raw, p = stats.ks_2samp(data_dist_pir[labs], data_dist_hip[labs])
        ks_stat.append(ks_raw)

    dist_labels = np.min(np.vstack((dist_labels_pir, dist_labels_hip)), axis=0)

    lab_pir = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 0)[0]
    lab_hip = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 1)[0]

    #dist_labels[lab_pir] = ((dist_labels[lab_pir] / np.max(dist_labels[lab_pir])) * -1 ) + 1
    #dist_labels[lab_hip] = (dist_labels[lab_hip] / np.max(dist_labels[lab_hip])) * -1

    # for wta labeling
    dist_labels[lab_pir] = 1
    dist_labels[lab_hip] = -1

    parse_xml(species, './fig.dist.%s.hip.svg' % species,
              np.array(dist_labels_hip), 'archi')
    parse_xml(species, './fig.dist.%s.pir.svg' % species,
              np.array(dist_labels_pir), 'paleo')
    parse_xml(species, './fig.dist.%s.wta.svg' % species,
              np.array(dist_labels), 'wta')
    def Length(self,
               vertices,
               triangles,
               source_indices,
               target_indices,
               FindMax=True):
        RawArray = gdist.compute_gdist(vertices, triangles, source_indices,
                                       target_indices)
        cleaned = RawArray[~np.isinf(RawArray)]
        source = source_indices[0]
        if FindMax:
            func = max
        else:
            func = min
        if len(cleaned) != 0:
            V = func(cleaned)
            target = target_indices[RawArray == V][0]
            # maybe a bug!
            # if have two points on the edge that share the same and max length, use the first one.

        else:

            V = 0
            target = source

        return {'ShorestDist': V, 'SourceIndex': source, 'TargetIndex': target}
예제 #6
0
def _get_geo_dist(vertices, faces, lib, source: int):
    """
    Computing exact geodesic distances between source and all other vertices of the mesh

    Parameters:
    source: int - index of the source vertex

    Returns:
    geo_dist: np.ndarray - geodesic distances
    """

    assert lib in ('gdist', 'igl')

    targets = np.arange(source, len(vertices), dtype=np.int32)
    source = np.array([source], dtype=np.int32)

    if lib == 'gdist':
        distances = gdist.compute_gdist(vertices,
                                        faces,
                                        source_indices=source,
                                        target_indices=targets)
    else:
        distances = igl.exact_geodesic(vertices,
                                       faces,
                                       vs=source,
                                       vt=targets)

    return distances
def compute_geodesic_scalars(src_idx, mesh):
    source_indices = np.array([src_idx], dtype=np.int32)
    target_indices = np.array(range(len(mesh.v)), dtype=np.int32)
    scalar_geodesic_func = gdist.compute_gdist(mesh.v.astype(np.float64),
                                               mesh.f, source_indices,
                                               target_indices)
    return scalar_geodesic_func
    def geodesic_distance(self, sources, max_dist=None, targets=None):
        """
        Calculate the geodesic distance between vertices of the surface, 

        ``sources``: one or more indices into vertices, these are required, 
            they specify the vertices from which the distance is calculated.
            NOTE: if multiple sources are provided then the distance returned
            is the shortest from the closest source. 
        ``max_dist``: find the distance to vertices out as far as max_dist. 
        ``targets``: one or more indices into vertices,.

        NOTE: Either ``targets`` or ``max_dist`` should be specified, but not 
            both, specifying neither is equivalent to max_dist=1e100.

        NOTE: when max_dist is specifed, distances > max_dist are returned as 
            numpy.inf

        If end_vertex is omitted the distance from the starting vertex to all 
        vertices within max_dist will be returned, if max_dist is also omitted 
        the distance to all vertices on the surface will be returned.

        """
        #TODO: Probably should check that targets and start_vertex are less than
        #      number of vertices, etc...
        #if NO_GEODESIC_DISTANCE:
        #    LOG.error("%s: The geodesic distance library didn't load" % repr(self))
        #    return

        if (max_dist is None) and (targets is None):
            dist = gdist.compute_gdist(self.vertices.astype(numpy.float64),
                                       self.triangles.astype(numpy.int32),
                                       source_indices=sources.astype(numpy.int32))
        elif (max_dist is None) and (targets is not None):
            dist = gdist.compute_gdist(self.vertices.astype(numpy.float64),
                                       self.triangles.astype(numpy.int32),
                                       source_indices=sources.astype(numpy.int32),
                                       target_indices=targets.astype(numpy.int32))
        elif (max_dist is not None) and (targets is None):
            dist = gdist.compute_gdist(self.vertices.astype(numpy.float64),
                                       self.triangles.astype(numpy.int32),
                                       source_indices=sources.astype(numpy.int32),
                                       max_distance=max_dist)
        else:
            LOG.error("%s: Specifying both targets and max_dist doesn't work." % str(self))
            dist = None

        return dist
def faster_gdists(node, cort, vertices, triangles, surf, return_dict):
    idx = np.where(cort == node)[0][0]
    src = utils.translate_src(node, cort)
    trgt = utils.translate_src(cort[idx:], cort)
    temp = np.zeros(len(cort))
    temp[idx:] = gdist.compute_gdist(vertices,
                                     triangles,
                                     source_indices=src,
                                     target_indices=trgt)
    return_dict[node] = utils.recort(temp, surf, cort)
예제 #10
0
    def _DM_operation(self, iteration):

        OneSourceDistance = gdist.compute_gdist(
            self.vertices_simple,
            self.faces_simple,
            source_indices=np.array([iteration]),
            target_indices=np.array(range(iteration + 1))
        )  # Distance from node 0 to the rest (around 0.8 s for 43350 nodes
        vector = np.float32(OneSourceDistance)
        return vector
예제 #11
0
 def test_flat_triangular_mesh(self):
     data = np.loadtxt("data/flat_triangular_mesh.txt", skiprows=1)
     vertices = data[0:121].astype(np.float64)
     triangles = data[121:].astype(np.int32)
     source = np.array([1], dtype=np.int32)
     target = np.array([2], dtype=np.int32)
     distance = gdist.compute_gdist(vertices,
                                    triangles,
                                    source_indices=source,
                                    target_indices=target)
     np.testing.assert_array_almost_equal(distance, [0.2])
예제 #12
0
 def test_hedgehog_mesh(self):
     data = np.loadtxt("data/hedgehog_mesh.txt", skiprows=1)
     vertices = data[0:300].astype(np.float64)
     triangles = data[300:].astype(np.int32)
     source = np.array([0], dtype=np.int32)
     target = np.array([1], dtype=np.int32)
     distance = gdist.compute_gdist(vertices,
                                    triangles,
                                    source_indices=source,
                                    target_indices=target)
     np.testing.assert_array_almost_equal(distance, [1.40522])
예제 #13
0
def dist_calc(surf, source, target):
    """
    source and target are arrays of vertices, surf, surface of the brain,
    return min distance between the two arrays
    """
    vertices, triangles = surf
    new_triangles = triangles.astype('<i4')

    distance = gdist.compute_gdist(vertices,
                                   new_triangles,
                                   source_indices=np.array(source, ndmin=1),
                                   target_indices=np.array(target, ndmin=1))
    return np.min(distance)
예제 #14
0
 def test_flat_triangular_mesh_no_target(self):
     data = np.loadtxt("data/flat_triangular_mesh.txt", skiprows=1)
     vertices = data[0:121].astype(np.float64)
     triangles = data[121:].astype(np.int32)
     source = None
     target = None
     distance = gdist.compute_gdist(
         vertices,
         triangles,
         source,
         target,
     )
     expected = np.loadtxt("data/flat_triangular_mesh_no_target.txt")
     np.testing.assert_array_almost_equal(distance, expected)
예제 #15
0
def dist_calc(surf, cortex, src):
    """
    Calculate exact geodesic distance along cortical surface from set of source nodes.
    """
    import gdist
    from utils import surf_keep_cortex, translate_src, recort

    vertices, triangles = surf_keep_cortex(surf, cortex)
    src_new = translate_src(src, cortex)
    data = gdist.compute_gdist(vertices, triangles, source_indices=src_new)
    dist = recort(data, surf, cortex)
    del data

    return dist
예제 #16
0
def compute_gdist(mesh):
    """
    Geodesic extraction on a mesh with an huge number of starting points
    :param mesh: trimesh object
    :return:
    """
    vert = mesh.vertices
    poly = mesh.faces.astype(np.int32)

    source_index = np.linspace(0,
                               len(vert) - 1,
                               len(vert) // 20).astype(np.int32)
    target_index = np.linspace(0, len(vert) - 1, len(vert)).astype(np.int32)

    return gdist.compute_gdist(vert, poly, source_index, target_index)
예제 #17
0
def dist_calc(surf, cortex, source_nodes):

    """
    Calculate exact geodesic distance along cortical surface from set of source nodes.
    "dist_type" specifies whether to calculate "min", "mean", "median", or "max" distance values
    from a region-of-interest. If running only on single node, defaults to "min".
    """

    cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)
    translated_source_nodes = translate_src(source_nodes, cortex)
    data = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices = translated_source_nodes)
    dist = recort(data, surf, cortex)
    del data

    return dist
예제 #18
0
def calc_dist_matrix_labels(surf, source_nodes, dist_type='min', nv=0):
    '''
       extract all the necessary information from the given brain surface and
       labels and calculate the distance
       source_nodes : list of labels
       nv = every how many vertices will be skipped (useful if a lot of
            vertices)
       returns distance matrix, pandas dataframe
    '''

    vertices, triangles = surf
    new_triangles = triangles.astype('<i4')
    cn = [label.name for label in source_nodes]
    dist_matrix = pd.DataFrame(columns=cn, index=cn)
    np.fill_diagonal(dist_matrix.values, 0)

    # TODO: parallel?
    # NOTE: very slow
    for i in range(len(source_nodes) - 1):
        prev_source = source_nodes[i].vertices.astype('<i4')
        prev_name = source_nodes[i].name

        for j in range(i + 1, len(source_nodes)):
            loading = ("i: " + str(i) + '/' + str(len(source_nodes)) + ':' +
                       "." * j + ' ' * (len(source_nodes) - j - 1) + '|')
            print(loading, end="\r")

            # computes the distance between the targets and the source
            # (gives as many values as targets)
            next_source = source_nodes[j].vertices.astype('<i4')
            next_name = source_nodes[j].name
            distance = gdist.compute_gdist(
                vertices,
                new_triangles,
                source_indices=np.array(prev_source, ndmin=1)[::nv],
                target_indices=np.array(next_source, ndmin=1)[::nv])
            if dist_type == 'min':
                dist = np.min(distance)
            elif dist_type == 'mean':
                dist = np.mean(distance)

            dist_matrix.loc[prev_name][next_name] = dist
            dist_matrix.loc[next_name][prev_name] = dist

    # import seaborn as sns
    # sns.heatmap(dist_matrix, annot=True)
    return dist_matrix
예제 #19
0
def compute_gdist(mesh, vert_id):
    """
    This func computes the geodesic distance from one point to all vertices on
     mesh by using the gdist.compute_gdist().
    Actually, you can get the geo-distances that you want by changing the
    source and target vertices set.
    :param mesh: trimesh object
    :param vert_id: the point index
    :return:
    """
    vert = mesh.vertices
    poly = mesh.faces.astype(np.int32)

    source_index = np.array([vert_id], dtype=np.int32)
    target_index = np.linspace(0, len(vert) - 1, len(vert)).astype(np.int32)

    return gdist.compute_gdist(vert, poly, source_index, target_index)
예제 #20
0
def dist_calc_matrix(surf,
                     cortex,
                     labels,
                     exceptions=['Unknown', 'Medial_wall'],
                     verbose=True):
    """
    Calculate exact geodesic distance along cortical surface from set of source nodes.
    "labels" specifies the freesurfer label file to use. All values will be used other than those
    specified in "exceptions" (default: 'Unknown' and 'Medial_Wall').

    returns:
      dist_mat: symmetrical nxn matrix of minimum distance between pairs of labels
      rois: label names in order of n
    """

    cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)

    # remove exceptions from label list:
    label_list = sd.load.get_freesurfer_label(labels, verbose=False)
    rs = np.where([a not in exceptions for a in label_list])[0]
    rois = [label_list[r] for r in rs]
    if verbose:
        print("# of regions: " + str(len(rois)))

    # calculate distance from each region to all nodes:
    dist_roi = []
    for roi in rois:
        source_nodes = sd.load.load_freesurfer_label(labels, roi)
        translated_source_nodes = translate_src(source_nodes, cortex)
        dist_roi.append(
            gdist.compute_gdist(cortex_vertices,
                                cortex_triangles,
                                source_indices=translated_source_nodes))
        if verbose:
            print(roi)
    dist_roi = np.array(dist_roi)

    # Calculate min distance per region:
    dist_mat = []
    for roi in rois:
        source_nodes = sd.load.load_freesurfer_label(labels, roi)
        translated_source_nodes = translate_src(source_nodes, cortex)
        dist_mat.append(np.min(dist_roi[:, translated_source_nodes], axis=1))
    dist_mat = np.array(dist_mat)

    return dist_mat, rois
예제 #21
0
    def geodesic_distance(self, sources, max_dist=None, targets=None):
        """
        Calculate the geodesic distance between vertices of the surface,

        ``sources``: one or more indices into vertices, these are required,
            they specify the vertices from which the distance is calculated.
            NOTE: if multiple sources are provided then the distance returned
            is the shortest from the closest source.
        ``max_dist``: find the distance to vertices out as far as max_dist.
        ``targets``: one or more indices into vertices,.

        NOTE: Either ``targets`` or ``max_dist`` should be specified, but not
            both, specifying neither is equivalent to max_dist=1e100.

        NOTE: when max_dist is specifed, distances > max_dist are returned as
            numpy.inf

        If end_vertex is omitted the distance from the starting vertex to all
        vertices within max_dist will be returned, if max_dist is also omitted
        the distance to all vertices on the surface will be returned.

        """
        if max_dist is not None and targets is not None:
            raise ValueError(
                "Specifying both targets and max_dist doesn't work.")

        # Cython expects data with specific dtype
        verts = self.vertices.astype(numpy.float64)
        tris = self.triangles.astype(numpy.int32)
        srcs = sources.astype(numpy.int32)
        kwd = {}

        # handle custom args
        if targets is not None:
            kwd['target_indices'] = targets.astype(numpy.int32)

        if max_dist is not None:
            kwd['max_distance'] = max_dist

        dist = gdist.compute_gdist(verts, tris, source_indices=srcs, **kwd)
        return dist
예제 #22
0
def zone_calc(surf, cortex, src):
    """
    Calculate closest nodes to each source node using exact geodesic distance along the cortical surface.
    """

    cortex_vertices, cortex_triangles = surf_keep_cortex(surf, cortex)

    dist_vals = np.zeros((len(source_nodes), len(cortex_vertices)))

    for x in range(len(source_nodes)):

        translated_source_nodes = translate_src(source_nodes[x], cortex)
        dist_vals[x, :] = gdist.compute_gdist(cortex_vertices, cortex_triangles, source_indices = translated_source_nodes)

    data = np.argsort(dist_vals, axis=0)[0, :] + 1

    zone = recort(data, surf, cortex)

    del data

    return zone
예제 #23
0
def geodesic_distances(pos, triangles):
    """geodesic distances using continuous dijkstra method 

    Parameters
    ----------
    pos : array
        Point positions
    triangles : array | None
        Index of points that form triangles

    Returns
    -------
    dist : array
        distances between points
    -------
    Author : Alexandre Fabre
    """

    pos_length = len(pos)

    # convert data in appropriate format
    pos = np.array(pos, dtype=np.float64)
    triangles = np.array(triangles, dtype=np.int32)  

    # index for each points
    index = np.arange(pos_length, dtype=np.int32)

    distance = []

    for i in range(pos_length):
        start = np.array([index[i]], dtype=np.int32)
        targets = index[:i]
        distance.append(gdist.compute_gdist(pos, triangles, start, targets))

    distance = get_full_matrix(distance)
    
    return distance
예제 #24
0
def zone_calc(surf, cortex, src):
    """
    Calculate closest nodes to each source node using exact geodesic distance along the cortical surface.
    """
    import gdist
    from utils import surf_keep_cortex, translate_src, recort

    vertices, triangles = surf_keep_cortex(surf, cortex)

    dist_vals = np.zeros((len(src), len(vertices)))

    for x in range(len(src)):
        src_new = translate_src(src[x], cortex)
        dist_vals[x, :] = gdist.compute_gdist(vertices,
                                              triangles,
                                              source_indices=src_new)

    data = np.argsort(dist_vals, axis=0)[0, :] + 1

    zone = recort(data, surf, cortex)

    del data

    return zone
예제 #25
0
def run_macaque():

    species_dir = '../templates/macaque/'

    macaque_surf = nib.load(
        species_dir + 'MacaqueYerkes19.L.midthickness.32k_fs_LR.surf.gii')
    vertices = np.array(macaque_surf.darrays[0].data, dtype=np.float64)
    triangles = np.array(macaque_surf.darrays[1].data, dtype=np.int32)

    labels = np.genfromtxt(species_dir +
                           'MarkovCC12_M132_29-injected-areas.32k_fs_LR.txt',
                           dtype=str,
                           delimiter='\n')

    label_indices = np.array(
        nib.load(species_dir +
                 'MarkovCC12_M132_29-injected-areas.32k_fs_LR.dlabel.nii').
        get_header().get_index_map(1)[0].vertex_indices)
    parcels_indices = np.array(
        nib.load(species_dir + 'MarkovCC12_M132_91-area.32k_fs_LR.dlabel.nii').
        get_header().get_index_map(1)[0].vertex_indices)
    paint = nib.load(species_dir +
                     'MarkovCC12_M132_29-injected-areas.32k_fs_LR.dlabel.nii'
                     ).get_data().squeeze()[range(len(label_indices))]
    parcels = nib.load(species_dir +
                       'MarkovCC12_M132_91-area.32k_fs_LR.dlabel.nii'
                       ).get_data().squeeze()[range(len(parcels_indices))]

    medial = nib.load(species_dir + 'Macaque.MedialWall.32k_fs_LR.dlabel.nii'
                      ).get_data().squeeze()[range(32492)]
    wall = np.where(medial == 1)[0]
    cortex = np.where(medial != 1)[0]
    labels_dict = labels

    import gdist
    from surfdist import surfdist, utils

    surf = []
    surf.append(vertices)
    surf.append(triangles)
    vertices, triangles = utils.surf_keep_cortex(surf, cortex)

    pir = np.array(parcels_indices[np.where(np.in1d(parcels, [58, 57]))[0]],
                   dtype=np.int32)  # Piriform, INSULA, OPRO , 48, 57
    hip = np.array(parcels_indices[np.where(np.in1d(parcels, [16, 20]))[0]],
                   dtype=np.int32)  # 71,  SUBICULUM, 24a, 29/30, 16, 20,
    #,57,65,
    data_dist_pir = utils.recort(
        gdist.compute_gdist(vertices,
                            triangles,
                            source_indices=utils.translate_src(pir, cortex)),
        surf, cortex)
    data_dist_hip = utils.recort(
        gdist.compute_gdist(vertices,
                            triangles,
                            source_indices=utils.translate_src(hip, cortex)),
        surf, cortex)

    dist_labels_pir = []
    dist_labels_hip = []
    for i in np.unique(paint):
        if i > 0.0:
            labs = np.where(paint == i)[0]
            # set values for mask as mean, median, or min
            dist_labels_pir.append(
                np.mean(data_dist_pir[parcels_indices[labs]]))
            dist_labels_hip.append(
                np.mean(data_dist_hip[parcels_indices[labs]]))

    dist_labels = np.min(np.vstack((dist_labels_pir, dist_labels_hip)), axis=0)

    lab_pir = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 0)[0]
    lab_hip = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 1)[0]

    #dist_labels[lab_pir] = ((dist_labels[lab_pir] / np.max(dist_labels[lab_pir])) * -1 ) + 1
    #dist_labels[lab_hip] = (dist_labels[lab_hip] / np.max(dist_labels[lab_hip])) * -1

    # for binary labeling
    dist_labels[lab_pir] = 1
    dist_labels[lab_hip] = -1

    parse_xml('macaque',
              './fig.dist.macaque.hip.svg',
              np.array(dist_labels_hip),
              cb='archi')
    parse_xml('macaque',
              './fig.dist.macaque.pir.svg',
              np.array(dist_labels_pir),
              cb='paleo')
    parse_xml('macaque',
              './fig.dist.macaque.wta.svg',
              np.array(dist_labels),
              cb='wta')
예제 #26
0
def geodesic_distance(pos,
                      face,
                      src=None,
                      dest=None,
                      norm=True,
                      max_distance=None):
    r"""Computes (normalized) geodesic distances of a mesh given by :obj:`pos`
    and :obj:`face`. If :obj:`src` and :obj:`dest` are given, this method only
    computes the geodesic distances for the respective source and target
    node-pairs.

    .. note::

        This function requires the :obj:`gdist` package.
        To install, run :obj:`pip install cython && pip install gdist`.

    Args:
        pos (Tensor): The node positions.
        face (LongTensor): The face indices.
        src (LongTensor, optional): If given, only compute geodesic distances
            for the specified source indices. (default: :obj:`None`)
        dest (LongTensor, optional): If given, only compute geodesic distances
            for the specified target indices. (default: :obj:`None`)
        norm (bool, optional): Normalizes geodesic distances by
            :math:`\sqrt{\textrm{area}(\mathcal{M})}`. (default: :obj:`True`)
        max_distance (float, optional): If given, only yields results for
            geodesic distances less than :obj:`max_distance`. This will speed
            up runtime dramatically. (default: :obj:`None`)

    :rtype: Tensor
    """

    if gdist is None:
        raise ImportError('Package `gdist` could not be found.')

    max_distance = float('inf') if max_distance is None else max_distance

    if norm:
        area = (pos[face[1]] - pos[face[0]]).cross(pos[face[2]] - pos[face[0]])
        norm = (area.norm(p=2, dim=1) / 2).sum().sqrt().item()
    else:  # pragma: no cover
        norm = 1.0

    if src is None:
        src = np.arange(pos.size(0), dtype=np.int32)
    else:
        src = src.detach().cpu().to(torch.int).numpy()

    dest = None if dest is None else dest.detach().cpu().to(torch.int).numpy()

    dtype = pos.dtype
    pos = pos.detach().cpu().to(torch.double).numpy()
    face = face.detach().t().cpu().to(torch.int).numpy()

    outs = []
    for i in range(len(src)):
        s = src[i:i + 1]
        d = None if dest is None else dest[i:i + 1]

        out = gdist.compute_gdist(pos, face, s, d, max_distance * norm) / norm
        out = torch.from_numpy(out).to(dtype)
        outs.append(out)

    out = torch.cat(outs, dim=0)

    if dest is None:
        out = out.view(-1, pos.shape[0])

    return out
예제 #27
0
def run_human():

    data, header, labels = load_human()
    species = 'human'
    species_dir = '../templates/' + species + '/'

    human_surf = nib.load(species_dir +
                          'S900.L.midthickness_MSMAll.32k_fs_LR.surf.gii')
    vertices = np.array(human_surf.darrays[0].data, dtype=np.float64)
    triangles = np.array(human_surf.darrays[1].data, dtype=np.int32)

    label_indices = np.array(
        nib.load(species_dir +
                 'economo.dlabel.nii').get_header().get_index_map(1)
        [0].vertex_indices)
    paint = nib.load(species_dir +
                     'economo.dlabel.nii').get_data().squeeze()[range(
                         len(label_indices))]

    medial = nib.load(species_dir +
                      'Human.MedialWall_Conte69.32k_fs_LR.dlabel.nii'
                      ).get_data().squeeze()[range(32492)]
    wall = np.where(medial == 1)[0]
    cortex = np.where(medial != 1)[0]

    import gdist
    from surfdist import surfdist, utils

    surf = []
    surf.append(vertices)
    surf.append(triangles)
    vertices, triangles = utils.surf_keep_cortex(surf, cortex)

    pir = np.array(label_indices[np.where(np.in1d(paint, [13]))[0]],
                   dtype=np.int32)  # PARAINSULA 58, 24a, #16,
    hip = np.array(label_indices[np.where(np.in1d(paint, [15, 21]))[0]],
                   dtype=np.int32)  # SUBICULUM, ProM # , 65
    #,57,65,
    data_dist_pir = utils.recort(
        gdist.compute_gdist(vertices,
                            triangles,
                            source_indices=utils.translate_src(pir, cortex)),
        surf, cortex)
    data_dist_hip = utils.recort(
        gdist.compute_gdist(vertices,
                            triangles,
                            source_indices=utils.translate_src(hip, cortex)),
        surf, cortex)

    dist_labels_pir = []
    dist_labels_hip = []
    label_dict = np.genfromtxt(species_dir + 'economo_dictionary.csv',
                               dtype=int,
                               delimiter='\n')
    for i in label_dict:
        #if i > 0.0:
        labs = np.where(paint == i)[0]
        # set values for mask as mean, median, or min
        dist_labels_pir.append(np.mean(data_dist_pir[label_indices[labs]]))
        dist_labels_hip.append(np.mean(data_dist_hip[label_indices[labs]]))

    dist_labels = np.min(np.vstack((dist_labels_pir, dist_labels_hip)), axis=0)

    lab_pir = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 0)[0]
    lab_hip = np.where(
        np.argmin(np.vstack((dist_labels_pir,
                             dist_labels_hip)), axis=0) == 1)[0]

    dist_labels[lab_pir] = (
        (dist_labels[lab_pir] / np.max(dist_labels[lab_pir])) * -1) + 1
    dist_labels[lab_hip] = (dist_labels[lab_hip] /
                            np.max(dist_labels[lab_hip])) * -1

    # for binary labeling
    dist_labels[lab_pir] = 1
    dist_labels[lab_hip] = -1

    def reduce_human(d_in):
        return np.concatenate((d_in[0:17], [np.mean(d_in[17:19])], d_in[19::]))

    for side in ['lateral', 'medial']:
        parse_xml('human',
                  './fig.dist.%s.%s.hip.svg' % (species, side),
                  reduce_human(dist_labels_hip),
                  cb='archi',
                  split=side)
        parse_xml('human',
                  './fig.dist.%s.%s.pir.svg' % (species, side),
                  reduce_human(dist_labels_pir),
                  cb='paleo',
                  split=side)
        parse_xml('human',
                  './fig.dist.%s.%s.wta.svg' % (species, side),
                  reduce_human(dist_labels),
                  cb='wta',
                  split=side)
예제 #28
0
    print("Centroid:\n")
    print(mesh.centroid)

    print("Principal inertia vectors:\n")
    print(mesh.principal_inertia_vectors)

    vert = mesh.vertices
    poly = mesh.faces.astype(np.int32)

    # Compute longitudinal geodesics on the organ surfaces: compute the geodesic distance from one point to all vertices on mesh

    source_index = np.array([vert_id], dtype=np.int32)

    target_index = np.linspace(0, len(vert) - 1, len(vert)).astype(np.int32)
    long_geodesics = gdist.compute_gdist(vert, poly, source_index,
                                         target_index)

    # Compute transverse geodesics on the organ surfaces
    source_index1 = np.array([vert_id1], dtype=np.int32)
    #source_index1 = np.linspace(0, len(vert)-1, len(vert)//1000).astype(np.int32) # Example of geodesic extraction on a mesh with a high number of starting points
    trans_geodesics = gdist.compute_gdist(vert, poly, source_index1,
                                          target_index)

    texture_gifti_to_nifti(args.output,
                           args.input,
                           args.reference,
                           long_geodesics,
                           trans_geodesics,
                           niiname=None)

    print("Mesh vertices shape:\n")
예제 #29
0
import numpy as np
import gdist
import argparse

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='')
    parser.add_argument("-idx", dest="idx", required=True)
    args = parser.parse_args()

    idx = int(args.idx)

    data_dir = '/home/julia/data/gradients/'
    cortex = np.load(data_dir + 'results/null_models/surface/cortex_mask.npy')
    points = np.load(data_dir + 'results/null_models/surface/points.npy')
    faces = np.load(data_dir + 'results/null_models/surface/faces.npy')

    dist = gdist.compute_gdist(np.array(points, dtype=np.float64),
                               np.array(faces, dtype=np.int32),
                               source_indices=np.array([cortex[idx]],
                                                       dtype=np.int32),
                               target_indices=np.array(cortex[idx + 1:],
                                                       dtype=np.int32))

    np.save(
        data_dir + 'results/null_models/surface/iter/idx_{}.npy'.format(idx),
        dist)
예제 #30
0
파일: sim.py 프로젝트: ins-amu/taa-pattern
def compute_gdist_wrapper(verts, triangles, source, queue):
    dist = gdist.compute_gdist(verts, triangles, source)
    queue.put(dist)
예제 #31
0
def calc_gdist(method='peak'):
    """Calculate geodesic distance between each two ROIs.

    Args:
        method (str, optional): 'peak' or 'min'
            If 'peak', use the distance between two vertices
            with peak activation values in two ROIs respectively.
            If 'min', use the minimum distance of pair-wise
            vertices between the two ROIs.
            Defaults to 'peak'.
    """
    import os
    import time
    import gdist
    import numpy as np
    import pandas as pd
    import nibabel as nib
    from cxy_hcp_ffa.lib.predefine import roi2label, hemi2stru
    from magicbox.io.io import CiftiReader

    # inputs
    rois = ('IOG-face', 'pFus-face', 'mFus-face')
    hemis = ('lh', 'rh')
    hemi2Hemi = {'lh': 'L', 'rh': 'R'}
    subj_file = pjoin(proj_dir, 'analysis/s2/subject_id')
    roi_file = pjoin(work_dir, 'rois_v3_{}.nii.gz')
    geo_file = '/nfs/m1/hcp/{sid}/T1w/fsaverage_LR32k/' \
               '{sid}.{Hemi}.midthickness_MSMAll.32k_fs_LR.surf.gii'
    activ_file = pjoin(proj_dir, 'analysis/s2/activation.dscalar.nii')

    # outputs
    log_file = pjoin(work_dir, f'gdist_{method}_log')
    out_file = pjoin(work_dir, f'gdist_{method}.csv')

    # preparation
    subj_ids = open(subj_file).read().splitlines()
    n_subj = len(subj_ids)
    activ_reader = CiftiReader(activ_file)
    out_dict = {}
    for hemi in hemis:
        for roi1_idx, roi1 in enumerate(rois[:-1]):
            for roi2 in rois[roi1_idx + 1:]:
                k = f"{hemi}_{roi1.split('-')[0]}-{roi2.split('-')[0]}"
                out_dict[k] = np.ones(n_subj, dtype=np.float64) * np.nan
    log_lines = []

    # calculation
    for hemi in hemis:
        roi_maps = nib.load(roi_file.format(hemi)).get_fdata().squeeze().T
        activ_maps = activ_reader.get_data(hemi2stru[hemi], True)
        assert roi_maps.shape == activ_maps.shape
        for subj_idx, subj_id in enumerate(subj_ids):
            time1 = time.time()
            roi_map = roi_maps[subj_idx]
            activ_map = activ_maps[subj_idx]
            g_file = geo_file.format(sid=subj_id, Hemi=hemi2Hemi[hemi])
            if not os.path.exists(g_file):
                log_lines.append(f'{g_file} does not exist.')
                continue
            geo = nib.load(g_file)
            coords = geo.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0]
            coords = coords.data.astype(np.float64)
            faces = geo.get_arrays_from_intent('NIFTI_INTENT_TRIANGLE')[0]
            faces = faces.data.astype(np.int32)
            for roi1_idx, roi1 in enumerate(rois[:-1]):
                roi1_idx_map = roi_map == roi2label[roi1]
                if np.any(roi1_idx_map):
                    for roi2 in rois[roi1_idx + 1:]:
                        roi2_idx_map = roi_map == roi2label[roi2]
                        if np.any(roi2_idx_map):
                            k = f"{hemi}_{roi1.split('-')[0]}-"\
                                f"{roi2.split('-')[0]}"
                            if method == 'peak':
                                roi1_max = np.max(activ_map[roi1_idx_map])
                                roi2_max = np.max(activ_map[roi2_idx_map])
                                roi1_idx_map =\
                                    np.logical_and(roi1_idx_map,
                                                   activ_map == roi1_max)
                                roi2_idx_map =\
                                    np.logical_and(roi2_idx_map,
                                                   activ_map == roi2_max)
                                roi1_vertices = np.where(roi1_idx_map)[0]
                                roi1_vertices = roi1_vertices.astype(np.int32)
                                n_vtx1 = len(roi1_vertices)
                                roi2_vertices = np.where(roi2_idx_map)[0]
                                roi2_vertices = roi2_vertices.astype(np.int32)
                                n_vtx2 = len(roi2_vertices)
                                if n_vtx1 > 1 or n_vtx2 > 1:
                                    msg = f'{subj_id}: {roi1} vs {roi2} '\
                                          f'has multiple peaks.'
                                    log_lines.append(msg)
                                    ds = []
                                    for src_vtx in roi1_vertices:
                                        src_vtx = np.array([src_vtx], np.int32)
                                        ds_tmp = \
                                            gdist.compute_gdist(coords, faces,
                                                                src_vtx,
                                                                roi2_vertices)
                                        ds.extend(ds_tmp)
                                    out_dict[k][subj_idx] = np.mean(ds)
                                elif n_vtx1 == 1 and n_vtx2 == 1:
                                    ds = gdist.compute_gdist(
                                        coords, faces, roi1_vertices,
                                        roi2_vertices)
                                    assert len(ds) == 1
                                    out_dict[k][subj_idx] = ds[0]
                                else:
                                    raise RuntimeError("Impossible!")
                            elif method == 'min':
                                roi1_vertices = np.where(roi1_idx_map)[0]
                                roi1_vertices = roi1_vertices.astype(np.int32)
                                roi2_vertices = np.where(roi2_idx_map)[0]
                                roi2_vertices = roi2_vertices.astype(np.int32)
                                ds = gdist.compute_gdist(
                                    coords, faces, roi1_vertices,
                                    roi2_vertices)
                                out_dict[k][subj_idx] = np.min(ds)
                            else:
                                raise ValueError(f'Not supported method: '
                                                 f'{method}')
            print(f'Finished: {subj_idx+1}/{n_subj}, '
                  f'cost {time.time()-time1} seconds.')

    # save out
    out_df = pd.DataFrame(out_dict)
    out_df.to_csv(out_file, index=False)
    out_log = '\n'.join(log_lines)
    open(log_file, 'w').write(out_log)
예제 #32
0
 def _parallel_loop(pos, face, src, dest, max_distance, norm, i, dtype):
     s = src[i:i + 1]
     d = None if dest is None else dest[i:i + 1]
     out = gdist.compute_gdist(pos, face, s, d, max_distance * norm) / norm
     return torch.from_numpy(out).to(dtype)