コード例 #1
0
def display(fun_dir, fs_dir, contrast):
    mlab.figure(bgcolor=(1, 1, 1))
    left_mesh = freesurfer.read_geometry(os.path.join(fs_dir, 'lh.inflated'))
    right_mesh = freesurfer.read_geometry(os.path.join(fs_dir, 'rh.inflated'))
    left_curv = os.path.join(fs_dir, 'lh.curv')
    right_curv = os.path.join(fs_dir, 'rh.curv')
    meshes = [left_mesh, right_mesh]
    curves = [left_curv, right_curv]
    for hemisphere, mesh_file, curv_file in zip(['lh', 'rh'], meshes, curves):
        fun_file = os.path.join(fun_dir, '%s_z_map_%s.gii' % (
                contrast, hemisphere))
        coords, triangles = mesh_file
        x, y, z = coords.T

        if hemisphere == 'lh':
            x -= 50
        else:
            x += 50

        curv = freesurfer.read_morph_data(curv_file).astype(np.float)
        tex = np.array([darrays.data for darrays in 
                        read(fun_file).darrays]).ravel()
        print fun_file, tex.min(), tex.max()
        name = ''
        cmin = -1
        cmax = 1
        mlab.triangular_mesh(x, y, z, triangles, transparent=True, opacity=1.,
                             name=name, scalars=curv, colormap="bone",
                             vmin=cmin, vmax=cmax)
        func_mesh = mlab.pipeline.triangular_mesh_source(
            x, y, z, triangles, scalars=tex)
        thresh = mlab.pipeline.threshold(func_mesh, low=THRESHOLD)
        mlab.pipeline.surface(thresh, colormap="hot", vmin=THRESHOLD, vmax=7)
コード例 #2
0
def loadSurface(pathToSurface):
    '''
    This method loads the surface from a mgh file and returns it. Pretty simple
    stuff
    '''
    print('Loading %s' % (pathToSurface))
    surface = nfs.read_geometry(pathToSurface)

    return surface
コード例 #3
0
ファイル: surftools.py プロジェクト: dgoyard/pyfreesurfer
    def load(self, meshfile, inflatedmeshpath=None, annotfile=None):
        """ Load a FreeSurfer surface.

        Parameters
        ----------
        meshfile: str (mandatory)
            the location of the file containing the FreeSurfer mesh to be
            loaded.
        inflatedmeshpath: str (optional, default None)
            the location of the file containing the FreeSurfer inflated mesh
            to be loaded.
        annotfile: str (optional, default None)
            the location of the file containing the FreeSurfer annotations to
            be loaded.

        Returns
        -------
        surf: TriSurface
            a triangular surface representation.
        """
        vertices, triangles = freesurfer.read_geometry(meshfile)
        if inflatedmeshpath is not None:
            inflated_vertices, _triangles = freesurfer.read_geometry(
                inflatedmeshpath)
            if not numpy.allclose(triangles, _triangles):
                raise ValueError("'{0}' and '{1}' do not represent the same "
                                 "surface.".format(meshfile, inflatedmeshpath))
        else:
            inflated_vertices = None
        if annotfile is not None:
            labels, ctab, regions = freesurfer.read_annot(
                annotfile, orig_ids=False)
            meta = dict(
                (index, {"region": item[0], "color": item[1][:4].tolist()})
                for index, item in enumerate(zip(regions, ctab)))
        else:
            labels = None
            meta = None

        return TriSurface(vertices=vertices, triangles=triangles,
                          labels=labels, metadata=meta,
                          inflated_vertices=inflated_vertices)
コード例 #4
0
ファイル: freesurfer.py プロジェクト: gareaut/caps-clindmri
def surf_convert(fsdir, t1files, surffiles, output_directory=None,
                 rm_orig=False,
                 fsconfig="/i2bm/local/freesurfer/SetUpFreeSurfer.sh"):
    """ Export FreeSurfer surfaces to the native space.

    Note that all the vetices are given in the index coordinate system.
    The subjecy id in the t1 and surf files must appear in the -3 position:
        xxx/subject_id/convert/t1.nii.gz

    <unit>
        <input name="fsdir" type="Directory" description="The
            freesurfer working directory with all the subjects."/>
        <input name="output_directory" type="Directory" description="The
            conversion destination folder."/>
        <input name="t1files" type="List_File" description="The t1 nifti
            files."/>
        <input name="surffiles" type="List_File" description="The surface
            to be converted."/>
        <input name="rm_orig" type="Bool" description="If true remove
            the input surfaces."/>
        <input name="fsconfig" type="File" description="The freesurfer
            configuration batch."/>
        <output name="csurffiles" type="List_File" description="The converted
            surfaces in the native space."/>
    </unit>
    """
    # Create a t1 subject map
    t1map = {}
    for fname in t1files:
        subject_id = fname.split("/")[-3]
        if subject_id in t1map:
            raise ("Can't map two t1 for subject '{0}'.".format(subject_id))
        t1map[subject_id] = fname

    # Convert all the surfaces
    csurffiles = []
    for fname in surffiles:

        # Get the t1 reference image
        subject_id = fname.split("/")[-3]
        t1file = t1map[subject_id]
        t1_image = nibabel.load(t1file)

        # Compute the conformed space to the native anatomical deformation
        asegfile = os.path.join(fsdir, subject_id, "mri", "aseg.mgz")
        physical_to_index = numpy.linalg.inv(t1_image.get_affine())
        translation = tkregister_translation(asegfile, fsconfig)
        deformation = numpy.dot(physical_to_index, translation)

        # Load and warp the mesh
        # The mesh: a 2-uplet with vertex (x, y, z) coordinates and
        # mesh triangles
        mesh = freesurfer.read_geometry(fname)
        surf = TriSurface(vertices=apply_affine_on_mesh(mesh[0], deformation),
                          triangles=mesh[1])

        # Save the mesh in the native space
        outputfile = fname + ".native"
        surf.save(os.path.dirname(outputfile), os.path.basename(outputfile))
        csurffiles.append(outputfile)

        # Clean input surface if specified
        if rm_orig:
            os.remove(fname)

    return csurffiles
コード例 #5
0
ファイル: surfconvs.py プロジェクト: neurospin/pyfreesurfer
def surf_convert(
        fsdir,
        t1files,
        surffiles,
        sidpos=-3,
        rm_orig=False,
        fsconfig=DEFAULT_FREESURFER_PATH):
    """ Export FreeSurfer surfaces to the native space.

    Note that all the returned vetices are given in the index coordinate
    system.

    The subject id in the t1 and surf files must appear in the 'sidpos'
    position. For the default value '-3', the T1 path might look like
    'xxx/subject_id/convert/t1.nii.gz'

    Parameters
    ----------
    fsdir: str (mandatory)
        The FreeSurfer working directory with all the subjects.
    t1files: str (mandatory)
        The t1 nifti files.
    surffiles:
        The surfaces to be converted.
    sidpos: int (optional, default -3)
        The subject identifier position in the surface and T1 files.
    rm_orig: bool (optional)
        If True remove the input surfaces.
    fsconfig: str (optional)
        The FreeSurfer configuration batch.

    Returns
    -------
    csurffiles:
        The converted surfaces in the native space indexed coordinates.
    """
    # Check input parameters
    for path in t1files + surffiles:
        if not os.path.isfile(path):
            raise ValueError("'{0}' is not a valid file.".format(path))
    if not os.path.isdir(fsdir):
        raise ValueError("'{0}' is not a valid directory.".format(fsdir))

    # Create a t1 subject map
    t1map = {}
    for fname in t1files:
        subject_id = fname.split(os.path.sep)[sidpos]
        if subject_id in t1map:
            raise ValueError("Can't map two t1 for subject "
                             "'{0}'.".format(subject_id))
        t1map[subject_id] = fname

    # Convert all the surfaces
    csurffiles = []
    for fname in surffiles:

        # Get the t1 reference image
        subject_id = fname.split(os.path.sep)[sidpos]
        t1file = t1map[subject_id]
        t1_image = nibabel.load(t1file)

        # Compute the conformed space to the native anatomical deformation
        asegfile = os.path.join(fsdir, subject_id, "mri", "aseg.mgz")
        physical_to_index = numpy.linalg.inv(t1_image.get_affine())
        translation = tkregister_translation(asegfile, fsconfig)
        deformation = numpy.dot(physical_to_index, translation)

        # Load and warp the mesh
        # The mesh: a 2-uplet with vertex (x, y, z) coordinates and
        # mesh triangles
        mesh = freesurfer.read_geometry(fname)
        surf = TriSurface(vertices=apply_affine_on_mesh(mesh[0], deformation),
                          triangles=mesh[1])

        # Save the mesh in the native space
        outputfile = fname + ".native"
        surf.save(outputfile)
        csurffiles.append(outputfile)

        # Construct the surfaces binarized volume
        binarizedfile = os.path.join(outputfile + ".nii.gz")
        overlay = numpy.zeros(t1_image.shape, dtype=numpy.uint)
        indices = numpy.round(surf.vertices).astype(int).T
        indices[0, numpy.where(indices[0] >= t1_image.shape[0])] = 0
        indices[1, numpy.where(indices[1] >= t1_image.shape[1])] = 0
        indices[2, numpy.where(indices[2] >= t1_image.shape[2])] = 0
        overlay[indices.tolist()] = 1
        overlay_image = nibabel.Nifti1Image(overlay, t1_image.get_affine())
        nibabel.save(overlay_image, binarizedfile)

        # Clean input surface if specified
        if rm_orig:
            os.remove(fname)

    return csurffiles
コード例 #6
0
def interface2surf(interface_image, surface_file, cortex_label, ref_mgz, out_file):

    import os
    import nibabel as nb
    import nibabel.freesurfer as nbfs
    import numpy as np
    import scipy.spatial.distance as syd

    # get image properties
    interfaceImage = nb.load(interface_image)
    interface_indexes = interfaceImage.get_data()
    interface_shape = np.shape(interface_indexes)
    reoriented_indexes = np.zeros(interface_shape)
    affineMatrix = interfaceImage.get_affine()

    # get surface and put it in voxel space
    surface = nbfs.read_geometry(surface_file)
    surface_coords = surface[0]
    surface_coords[:, 0] = surface_coords[:, 0] + (interface_shape[0] / 2)
    surface_coords[:, 1] = surface_coords[:, 1] + (interface_shape[1] / 2)
    surface_coords[:, 2] = surface_coords[:, 2] + (interface_shape[2] / 2)
    surf_array_dim = np.shape(surface_coords)
    surf_length = surf_array_dim[0]
    label_indices = np.loadtxt(cortex_label, delimiter=" ", dtype=long, skiprows=2, usecols=[0])
    projected_index = np.zeros(surf_length)
    projected_distance = np.zeros(surf_length) - 1

    # get interface coordinates and put them in normal voxel space
    voxelCoordList = []
    for x, y, z in zip(*np.nonzero(interface_indexes)):
        voxelCoord = [x, y, z]
        if affineMatrix[0, 0] == -1:
            voxelCoord[0] = interface_shape[0] - 1 - voxelCoord[0]
        if affineMatrix[1, 1] == -1:
            voxelCoord[1] = interface_shape[1] - 1 - voxelCoord[1]
        if affineMatrix[2, 2] == -1:
            voxelCoord[2] = interface_shape[2] - 1 - voxelCoord[2]
        voxelCoordList.append(voxelCoord)
        reoriented_indexes[voxelCoord[0], voxelCoord[1], voxelCoord[2]] = interface_indexes[x, y, z]
    interface_voxels = np.array(voxelCoordList)

    for i in label_indices:

        surf_coord = surface_coords[i : i + 1, :]
        surf_floor = np.floor(surf_coord)
        local_index = reoriented_indexes[surf_floor[0, 0], surf_floor[0, 1], surf_floor[0, 2]]

        """
        -if the vertex lays in an index voxel assign directly
        -TO IMPLEMENT IF SLOW: check next in the 26-neighborhood
        -otherwise, check distances to all index voxels and choose smallest
        """

        if local_index != 0:
            projected_index[i] = local_index
            projected_distance[i] = 0.0
        else:
            all_dists = syd.cdist(surf_coord, interface_voxels + 0.5, "euclidean")
            nearest_voxel_ID = np.argmin(all_dists)
            nearest_dist = all_dists[0, nearest_voxel_ID]
            projected_distance[i] = nearest_dist
            projected_voxel_coords = interface_voxels[nearest_voxel_ID, :]
            projected_index[i] = reoriented_indexes[
                projected_voxel_coords[0], projected_voxel_coords[1], projected_voxel_coords[2]
            ]

    """
    convert projected_index into a .mif file
    """
    ref_mgz_file = nb.load(ref_mgz)
    data = ref_mgz_file.get_data()
    data[:, 0, 0] = projected_index
    surface_projected_image = nb.MGHImage(data, ref_mgz_file.get_affine(), ref_mgz_file.get_header())
    nb.save(surface_projected_image, out_file)

    return os.path.abspath(out_file)
コード例 #7
0
import pylab as pl
import nibabel.freesurfer as fs


# assuming cp -r $FREESURFER_HOME/subjects/fsaverage5 data/ folder,
here = os.path.dirname(os.path.abspath(__file__))
fname = lambda fn: os.path.join(here, '../data/fsaverage5', fn)

# load FS T1 (talairach.xfm is from -i vol.nii to T1.mgz ?)
img = fs.load(fname('mri/T1.mgz'))  # type: fs.MGHImage
vol = img.get_data().transpose((0, 2, 1))
aff = img.affine
inv_aff = np.linalg.inv(aff)

# load left pial surface
verts, faces = fs.read_geometry(fname('surf/lh.pial'))

# choose sagittal slice by X coord
x = -32.0
i = int(inv_aff.dot(np.r_[x, 0.0, 0.0, 1.0])[0])

# mask vertices around chosen coord
vert_mask = np.c_[verts[:, 0] > (x - 2), verts[:, 0] < (x + 2)].all(axis=1)
in_slice_verts = isv_y, isv_z = verts[vert_mask, 1:].T

# plot slice & vertices in mask
fig = pl.figure(figsize=(15, 5))
pl.subplot(131)
pl.imshow(vol[i].T, cmap='gray', aspect='equal', extent=[-128.0, 128, -128.0, 128.0])
pl.plot(isv_y, isv_z, 'y.')
pl.xlabel('+Y Anterior')
コード例 #8
0
def get_geometry(subject, side, surface_type="inflated"):
    mesh_file = get_surface_file(subject, side, surface_type)
    vertices, triangles = read_geometry(mesh_file)
    x, y, z = vertices.T

    return x, y, z, triangles
コード例 #9
0
def find_parcel_centroids(*,
                          lhannot,
                          rhannot,
                          method='surface',
                          version='fsaverage',
                          surf='sphere',
                          drop_labels=None):
    """
    Returns vertex coords corresponding to centroids of parcels in annotations

    Note that using any other `surf` besides the default of 'sphere' may result
    in centroids that are not directly within the parcels themselves due to
    sulcal folding patterns.

    Parameters
    ----------
    {lh,rh}annot : str
        Path to .annot file containing labels of parcels on the {left,right}
        hemisphere. These must be specified as keyword arguments to avoid
        accidental order switching.
    method : {'average', 'surface', 'geodesic'}, optional
        Method for calculation of parcel centroid. See Notes for more
        information. Default: 'surface'
    version : str, optional
        Specifies which version of `fsaverage` provided annotation files
        correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
        'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
    surf : str, optional
        Specifies which surface projection of fsaverage to use for finding
        parcel centroids. Default: 'sphere'
    drop_labels : list, optional
        Specifies regions in {lh,rh}annot for which the parcel centroid should
        not be calculated. If not specified, centroids for parcels defined in
        `netneurotools.freesurfer.FSIGNORE` are not calculated. Default: None

    Returns
    -------
    centroids : (N, 3) numpy.ndarray
        xyz coordinates of vertices closest to the centroid of each parcel
        defined in `lhannot` and `rhannot`
    hemiid : (N,) numpy.ndarray
        Array denoting hemisphere designation of coordinates in `centroids`,
        where `hemiid=0` denotes the left and `hemiid=1` the right hemisphere

    Notes
    -----
    The following methods can be used for finding parcel centroids:

    1. ``method='average'``

       Uses the arithmetic mean of the coordinates for the vertices in each
       parcel. Note that in this case the calculated centroids will not act
       actually fall on the surface of `surf`.

    2. ``method='surface'``

       Calculates the 'average' coordinates and then finds the closest vertex
       on `surf`, where closest is defined as the vertex with the minimum
       Euclidean distance.

    3. ``method='geodesic'``

       Uses the coordinates of the vertex with the minimum average geodesic
       distance to all other vertices in the parcel. Note that this is slightly
       more time-consuming than the other two methods, especially for
       high-resolution meshes.
    """

    methods = ['average', 'surface', 'geodesic']
    if method not in methods:
        raise ValueError('Provided method for centroid calculation {} is '
                         'invalid. Must be one of {}'.format(methods, methods))

    if drop_labels is None:
        drop_labels = FSIGNORE
    drop_labels = _decode_list(drop_labels)

    surfaces = fetch_fsaverage(version)[surf]

    centroids, hemiid = [], []
    for n, (annot, surf) in enumerate(zip([lhannot, rhannot], surfaces)):
        vertices, faces = read_geometry(surf)
        labels, ctab, names = read_annot(annot)
        names = _decode_list(names)

        for lab in np.unique(labels):
            if names[lab] in drop_labels:
                continue
            if method in ['average', 'surface']:
                roi = np.atleast_2d(vertices[labels == lab].mean(axis=0))
                if method == 'surface':  # find closest vertex on the sphere
                    roi = vertices[np.argmin(cdist(vertices, roi), axis=0)[0]]
            elif method == 'geodesic':
                inds, = np.where(labels == lab)
                roi = _geodesic_parcel_centroid(vertices, faces, inds)
            centroids.append(roi)
            hemiid.append(n)

    return np.row_stack(centroids), np.asarray(hemiid)
コード例 #10
0
ファイル: surfconvs.py プロジェクト: slefranc/pyfreesurfer
def surf_convert(fsdir,
                 t1files,
                 surffiles,
                 sidpos=-3,
                 rm_orig=False,
                 fsconfig=DEFAULT_FREESURFER_PATH):
    """ Export FreeSurfer surfaces to the native space.

    Note that all the returned vetices are given in the index coordinate
    system.
    The subject id in the t1 and surf files must appear in the 'sidpos'
    position. For the default value '-3', the T1 path might look like:
        xxx/subject_id/convert/t1.nii.gz

    Parameters
    ----------
    fsdir: str (mandatory)
        The FreeSurfer working directory with all the subjects.
    t1files: str (mandatory)
        The t1 nifti files.
    surffiles:
        The surfaces to be converted.
    sidpos: int (optional, default -3)
        The subject identifier position in the surface and T1 files.
    rm_orig: bool (optional)
        If True remove the input surfaces.
    fsconfig: str (optional)
        The FreeSurfer configuration batch.

    Returns
    -------
    csurffiles:
        The converted surfaces in the native space indexed coordinates.
    """
    # Check input parameters
    for path in t1files + surffiles:
        if not os.path.isfile(path):
            raise ValueError("'{0}' is not a valid file.".format(path))
    if not os.path.isdir(fsdir):
        raise ValueError("'{0}' is not a valid directory.".format(fsdir))

    # Create a t1 subject map
    t1map = {}
    for fname in t1files:
        subject_id = fname.split(os.path.sep)[sidpos]
        if subject_id in t1map:
            raise ValueError("Can't map two t1 for subject "
                             "'{0}'.".format(subject_id))
        t1map[subject_id] = fname

    # Convert all the surfaces
    csurffiles = []
    for fname in surffiles:

        # Get the t1 reference image
        subject_id = fname.split(os.path.sep)[sidpos]
        t1file = t1map[subject_id]
        t1_image = nibabel.load(t1file)

        # Compute the conformed space to the native anatomical deformation
        asegfile = os.path.join(fsdir, subject_id, "mri", "aseg.mgz")
        physical_to_index = numpy.linalg.inv(t1_image.get_affine())
        translation = tkregister_translation(asegfile, fsconfig)
        deformation = numpy.dot(physical_to_index, translation)

        # Load and warp the mesh
        # The mesh: a 2-uplet with vertex (x, y, z) coordinates and
        # mesh triangles
        mesh = freesurfer.read_geometry(fname)
        surf = TriSurface(vertices=apply_affine_on_mesh(mesh[0], deformation),
                          triangles=mesh[1])

        # Save the mesh in the native space
        outputfile = fname + ".native"
        surf.save(outputfile)
        csurffiles.append(outputfile)

        # Construct the surfaces binarized volume
        binarizedfile = os.path.join(outputfile + ".nii.gz")
        overlay = numpy.zeros(t1_image.shape, dtype=numpy.uint)
        indices = numpy.round(surf.vertices).astype(int).T
        indices[0, numpy.where(indices[0] >= t1_image.shape[0])] = 0
        indices[1, numpy.where(indices[1] >= t1_image.shape[1])] = 0
        indices[2, numpy.where(indices[2] >= t1_image.shape[2])] = 0
        overlay[indices.tolist()] = 1
        overlay_image = nibabel.Nifti1Image(overlay, t1_image.get_affine())
        nibabel.save(overlay_image, binarizedfile)

        # Clean input surface if specified
        if rm_orig:
            os.remove(fname)

    return csurffiles
コード例 #11
0
ファイル: reader.py プロジェクト: rcherbonnier/caps-clindmri
def read_cortex_surface_segmentation(fsdir, physical_to_index, fsconfig,
                                     affine=None):
    """ Read the cortex gyri surface segmentatation of freesurfer.

    Give access to the right and left hemisphere segmentations that can be
    projected on the cortical and inflated cortical surfaces.
    The vertex are expressed in the voxel coordinates.

    Parameters
    ----------
    fsdir: str( mandatory)
        the subject freesurfer segmentation directory.
    physical_to_index: array (mandatory)
        the transformation to project a physical point in an array.
    fsconfig: str (mandatory)
        the freesurfer configuration file.
    affine: array (optional, default None)
        an affine transformation in voxel coordinates that will be applied on
        the output vertex of the cortex surface.

    Returns
    -------
    segmentation: dict
        contain the two hemisphere 'lh' and 'rh' triangular surfaces and
        inflated surfaces represented in a TriSurface structure.
    """
    # Construct the path to the surface segmentation results and associated
    # labels
    meshdir = os.path.join(fsdir, "surf")
    labeldir = os.path.join(fsdir, "label")
    segfile = os.path.join(fsdir, "mri")

    # Get deformation between the ras and ras-tkregister spaces
    asegfile = os.path.join(segfile, "aseg.mgz")
    translation = tkregister_translation(asegfile, fsconfig)

    # Construct the deformation to apply on the cortex mesh
    if affine is None:
        affine = numpy.identity(4)
    deformation = numpy.dot(affine, numpy.dot(physical_to_index, translation))

    # Create an dictionary to contain all the surfaces and labels
    segmentation = {}

    # Select the hemisphere
    for hemi in ["lh", "rh"]:

        # Get annotation id at each vertex (if a vertex does not belong
        # to any label and orig_ids=False, its id will be set to -1) and
        # the names of the labels
        annotfile = os.path.join(labeldir, "{0}.aparc.annot".format(hemi))
        labels, ctab, regions = freesurfer.read_annot(
            annotfile, orig_ids=False)
        meta = dict((index, {"region": item[0], "color": item[1][:4].tolist()})
                    for index, item in enumerate(zip(regions, ctab)))

        # Select the surface type
        hemisegmentation = {}
        for surf in ["white", "inflated"]:

            # Load the mesh: a 2-uplet with vertex (x, y, z) coordinates and
            # mesh triangles
            meshfile = os.path.join(meshdir, "{0}.{1}".format(hemi, surf))
            mesh = freesurfer.read_geometry(meshfile)
            hemisegmentation[surf] = {
                "vertices": apply_affine_on_mesh(mesh[0], deformation),
                "triangles": mesh[1]
            }

        # Save the segmentation result
        segmentation[hemi] = TriSurface(
            vertices=hemisegmentation["white"]["vertices"],
            inflated_vertices=hemisegmentation["inflated"]["vertices"],
            triangles=hemisegmentation["white"]["triangles"],
            labels=labels,
            metadata=meta)

    return segmentation
コード例 #12
0
def interface2surf(interface_image, surface_file, cortex_label, ref_mgz, out_file):

    import os
    import nibabel as nb
    import nibabel.freesurfer as nbfs
    import numpy as np
    import scipy.spatial.distance as syd
    
    # get image properties
    interfaceImage = nb.load(interface_image)
    interface_indexes = interfaceImage.get_data()
    interface_shape = np.shape(interface_indexes)
    reoriented_indexes = np.zeros(interface_shape)
    affineMatrix = interfaceImage.get_affine()
    
    # get surface and put it in voxel space
    surface = nbfs.read_geometry(surface_file)
    surface_coords = surface[0]
    surface_coords[:,0] = surface_coords[:,0] + (interface_shape[0]/2)
    surface_coords[:,1] = surface_coords[:,1] + (interface_shape[1]/2)
    surface_coords[:,2] = surface_coords[:,2] + (interface_shape[2]/2)
    surf_array_dim = np.shape(surface_coords)
    surf_length = surf_array_dim[0]
    label_indices = np.loadtxt(cortex_label, delimiter=' ', dtype=long, skiprows=2, usecols=[0])
    projected_index=np.zeros(surf_length)
    projected_distance=np.zeros(surf_length)-1
    
    # get interface coordinates and put them in normal voxel space
    voxelCoordList=[]
    for x, y, z in zip(*np.nonzero(interface_indexes)):
        voxelCoord = [x,y,z]
        if(affineMatrix[0,0]==-1):
            voxelCoord[0]=interface_shape[0]-1-voxelCoord[0]
        if(affineMatrix[1,1]==-1):
            voxelCoord[1]=interface_shape[1]-1-voxelCoord[1]
        if(affineMatrix[2,2]==-1):
            voxelCoord[2]=interface_shape[2]-1-voxelCoord[2]
        voxelCoordList.append(voxelCoord)
        reoriented_indexes[voxelCoord[0],voxelCoord[1],voxelCoord[2]]=interface_indexes[x,y,z]
    interface_voxels = np.array(voxelCoordList)
           

            
    for i in label_indices:
    
        surf_coord = surface_coords[i:i+1,:]
        surf_floor = np.floor(surf_coord)
        local_index = reoriented_indexes[surf_floor[0,0],surf_floor[0,1],surf_floor[0,2]]
        
        """
        -if the vertex lays in an index voxel assign directly
        -TO IMPLEMENT IF SLOW: check next in the 26-neighborhood
        -otherwise, check distances to all index voxels and choose smallest
        """
        
        if( local_index != 0):
            projected_index[i] = local_index
            projected_distance[i] = 0.0
        else: 
            all_dists = syd.cdist(surf_coord, interface_voxels + 0.5, 'euclidean')
            nearest_voxel_ID = np.argmin(all_dists)
            nearest_dist = all_dists[0,nearest_voxel_ID]
            projected_distance[i] = nearest_dist
            projected_voxel_coords=interface_voxels[nearest_voxel_ID,:]
            projected_index[i] = reoriented_indexes[ projected_voxel_coords[0],projected_voxel_coords[1],projected_voxel_coords[2] ]

    """
    convert projected_index into a .mif file
    """
    ref_mgz_file = nb.load(ref_mgz)    
    data=ref_mgz_file.get_data()
    data[:,0,0]=projected_index
    surface_projected_image = nb.MGHImage(data, ref_mgz_file.get_affine(), ref_mgz_file.get_header())
    nb.save(surface_projected_image, out_file)

    return os.path.abspath(out_file)
コード例 #13
0
def map_to_average_brain(coords, left_pial, right_pial, left_sphere,
                         right_sphere):
    """
    Maps a set of Freesurfer surface coordinates in an individual brain to the equivalent coordinates on the average
    brain.

    Method taken from the iElvis project (http://ielvis.pbworks.com), which implemented the same function in MATLAB:
    https://github.com/iELVis/iELVis/blob/master/iELVis_MAIN/iELVis_MATLAB/ELEC_LOC/sub2AvgBrain.m

    :param coords: {np.ndarray} Coordinates in the individual Freesurfer space
    :param subject_surf_dir: {str} Path to the directory containing the subject's surface meshes
    :param avg_surf_dir: {str} Path to the directory containing the fsaverage surface meshes
    :return: {np.ndarray} The matching coordinates in the average brain
    :return: {np.ndarray} The corresponding atlas labels in the average brain
    """
    hemispheres = [
        'left', 'right'
    ]  # For all surfaces, we append the right hemisphere to the left hemisphere
    fsavg_subj_dir = osp.join(
        paths.rhino_root,
        'data',
        'eeg',
        'freesurfer',
        'subjects',
        'fsaverage',
    )
    files = {
        'left_pial': left_pial,
        'right_pial': right_pial,
        'left_sphere': left_sphere,
        'right_sphere': right_sphere,
        'left_avg_sphere': osp.join(fsavg_subj_dir, 'surf', 'lh.sphere.reg'),
        'right_avg_sphere': osp.join(fsavg_subj_dir, 'surf', 'rh.sphere.reg'),
        'left_avg_pial': osp.join(fsavg_subj_dir, 'surf', 'lh.pial'),
        'right_avg_pial': osp.join(fsavg_subj_dir, 'surf', 'rh.pial'),
        'left_avg_annot': osp.join(fsavg_subj_dir, 'label', 'lh.aparc.annot'),
        'right_avg_annot': osp.join(fsavg_subj_dir, 'label', 'rh.aparc.annot')
    }

    # Find vertex indices on subject's pial surface
    pial_verts = [read_geometry(files['%s_pial' % h])[0] for h in hemispheres]

    distances = [dist.cdist(v, coords) for v in pial_verts]

    hemisphere = np.min(distances[0], 0) < np.min(distances[1], 0)
    pial_indices = [np.argmin(d, 0) for d in distances]

    # Take those vertices in sphere.reg
    sphere_verts = [
        read_geometry(files['%s_sphere' % h])[0] for h in hemispheres
    ]

    electrode_sphere_verts = [
        sv[pi] for (sv, pi) in zip(sphere_verts, pial_indices)
    ]

    # Find indices of nearest vertices in fsaverage.?h.sphere.reg
    avg_sphere_verts = [
        read_geometry(files['%s_avg_sphere' % h])[0] for h in hemispheres
    ]

    avg_sphere_indices = [
        np.argmin(dist.cdist(asv, esv), axis=0)
        for (asv, esv) in zip(avg_sphere_verts, electrode_sphere_verts)
    ]
    # Take those vertices on average pial surface
    avg_pial_verts = [
        read_geometry(files['%s_avg_pial' % h])[0] for h in hemispheres
    ]

    avg_pial_inds, _, avg_pial_labels = zip(
        *[read_annot(files['%s_avg_annot' % h]) for h in hemispheres])
    avg_pial_labels = [np.array(x) for x in avg_pial_labels]

    new_pial_verts = np.where(
        hemisphere[:, None],
        *[apv[asi] for apv, asi in zip(avg_pial_verts, avg_sphere_indices)])
    new_pial_labels = np.where(
        hemisphere, *[
            np.array(apl)[(api[asi])] for apl, api, asi in zip(
                avg_pial_labels, avg_pial_inds, avg_sphere_indices)
        ])
    print(new_pial_verts.shape)
    print(new_pial_labels.shape)
    return new_pial_verts, new_pial_labels
コード例 #14
0
def find_parcel_centroids(*,
                          lhannot,
                          rhannot,
                          version='fsaverage',
                          surf='sphere',
                          drop=None):
    """
    Returns vertex coords corresponding to centroids of parcels in annotations

    Note that using any other `surf` besides the default of 'sphere' may result
    in centroids that are not directly within the parcels themselves due to
    sulcal folding patterns.

    Parameters
    ----------
    {lh,rh}annot : str
        Path to .annot file containing labels of parcels on the {left,right}
        hemisphere. These must be specified as keyword arguments to avoid
        accidental order switching.
    version : str, optional
        Specifies which version of `fsaverage` provided annotation files
        correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
        'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
    surf : str, optional
        Specifies which surface projection of fsaverage to use for finding
        parcel centroids. Default: 'sphere'
    drop : list, optional
        Specifies regions in {lh,rh}annot for which the parcel centroid should
        not be calculated. If not specified, centroids for 'unknown' and
        'corpuscallosum' are not calculated. Default: None

    Returns
    -------
    centroids : (N, 3) numpy.ndarray
        xyz coordinates of vertices closest to the centroid of each parcel
        defined in `lhannot` and `rhannot`
    hemiid : (N,) numpy.ndarray
        Array denoting hemisphere designation of coordinates in `centroids`,
        where `hemiid=0` denotes the left and `hemiid=1` the right hemisphere
    """

    if drop is None:
        drop = [
            'unknown',
            'corpuscallosum',  # default FreeSurfer
            'Background+FreeSurfer_Defined_Medial_Wall'  # common alternative
        ]
    drop = _decode_list(drop)

    surfaces = fetch_fsaverage(version)[surf]

    centroids, hemiid = [], []
    for n, (annot, surf) in enumerate(zip([lhannot, rhannot], surfaces)):
        vertices, faces = read_geometry(surf)
        labels, ctab, names = read_annot(annot)
        names = _decode_list(names)

        for lab in np.unique(labels):
            if names[lab] in drop:
                continue
            coords = np.atleast_2d(vertices[labels == lab].mean(axis=0))
            roi = vertices[np.argmin(cdist(vertices, coords), axis=0)[0]]
            centroids.append(roi)
            hemiid.append(n)

    return np.row_stack(centroids), np.asarray(hemiid)
コード例 #15
0
def surf_convert(fsdir,
                 output_directory,
                 t1files,
                 surffiles,
                 rm_orig=False,
                 fsconfig="/i2bm/local/freesurfer/SetUpFreeSurfer.sh"):
    """ Export FreeSurfer surfaces to the native space.

    Note that all the vetices are given in the index coordinate system.
    The subjecy id in the t1 and surf files must appear in the -3 position:
        xxx/subject_id/convert/t1.nii.gz 

    <unit>
        <input name="fsdir" type="Directory" description="The
            freesurfer working directory with all the subjects."/>
        <input name="output_directory" type="Directory" description="The
            conversion destination folder."/>
        <input name="t1files" type="List_File" description="The t1 nifti
            files."/>
        <input name="surffiles" type="List_File" description="The surface
            to be converted."/>
        <input name="rm_orig" type="Bool" description="If true remove
            the input surfaces."/>
        <input name="fsconfig" type="File" description="The freesurfer
            configuration batch."/>
        <output name="csurffiles" type="List_File" description="The converted
            surfaces in the native space."/>
    </unit>  
    """
    # Create a t1 subject map
    t1map = {}
    for fname in t1files:
        subject_id = fname.split("/")[-3]
        if subject_id in t1map:
            raise ("Can't map two t1 for subject '{0}'.".format(subject_id))
        t1map[subject_id] = fname

    # Convert all the surfaces
    csurffiles = []
    for fname in surffiles:

        # Get the t1 reference image
        subject_id = fname.split("/")[-3]
        t1file = t1map[subject_id]
        t1_image = nibabel.load(t1file)

        # Compute the conformed space to the native anatomical deformation
        asegfile = os.path.join(fsdir, subject_id, "mri", "aseg.mgz")
        physical_to_index = numpy.linalg.inv(t1_image.get_affine())
        translation = tkregister_translation(asegfile, fsconfig)
        deformation = numpy.dot(physical_to_index, translation)

        # Load and warp the mesh
        # The mesh: a 2-uplet with vertex (x, y, z) coordinates and
        # mesh triangles
        mesh = freesurfer.read_geometry(fname)
        surf = TriSurface(vertices=apply_affine_on_mesh(mesh[0], deformation),
                          triangles=mesh[1])

        # Save the mesh in the native space
        outputfile = fname + ".native"
        surf.save(os.path.dirname(outputfile), os.path.basename(outputfile))
        csurffiles.append(outputfile)

        # Clean input surface if specified
        if rm_orig:
            os.remove(fname)

    return csurffiles