Пример #1
0
def reduce3_to_bci_lh(reduce3labs):
    class h32k:
        pass

    class h:
        pass

    class s:
        pass

    class bs:
        pass

    class bci:
        pass

    ''' reduce3 to h32k'''
    r3 = readdfs('lh.Yeo2011_17Networks_N1000_reduce3.dfs')
    r3.labels = np.squeeze(reduce3labs.T)
    '''h32k to full res FS'''
    g_surf = nib.load('/big_disk/ajoshi/HCP_data/reference/100307/MNINonLinea\
r/Native/100307.L.very_inflated.native.surf.gii')
    h.vertices = g_surf.darrays[0].data
    h.faces = g_surf.darrays[1].data
    h = interpolate_labels(r3, h)
    ''' native FS ref to native FS BCI'''
    g_surf = nib.load('/big_disk/ajoshi/HCP_data/reference/100307/MNINon\
Linear/Native/100307.L.sphere.reg.native.surf.gii')
    s.vertices = g_surf.darrays[0].data
    s.faces = g_surf.darrays[1].data
    s.labels = h.labels
    ''' map to bc sphere'''
    bs.vertices, bs.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI\
_DNI_Atlas/surf/lh.sphere.reg')
    bs = interpolate_labels(s, bs)
    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_A\
tlas/surf/lh.white')
    bci.labels = bs.labels
    #   writedfs('BCI_orig_rh.dfs', bci)

    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_A\
tlas/surf/lh.inflated')
    #    view_patch(bci, bci.labels)

    #    writedfs('BCI_pial_rh.dfs.', bci)

    bci.vertices, bci.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI_\
DNI_Atlas/surf/lh.white')
    #    writedfs('BCI_white_rh.dfs.', bci)

    bci.vertices[:, 0] += 96 * 0.8
    bci.vertices[:, 1] += 192 * 0.546875
    bci.vertices[:, 2] += 192 * 0.546875
    bci_bst = readdfs('/big_disk/ajoshi/data/BCI-DNI_brain_atlas/BCI-DNI_\
brain.left.inner.cortex.dfs')
    bci_bst = interpolate_labels(bci, bci_bst)
    labs = bci_bst.labels
    return labs
Пример #2
0
def morph2dense(source_sphere,target_sphere,input_morph,path_output):
    """
    This function maps a morphological file from a source surface to a target target surface.    
    Inputs:
        *source_sphere: source surface.
        *target_sphere: target surface.
        *input_morph: morphological input file.
        *path_output: path where output is saved.
        
    created by Daniel Haenelt
    Date created: 13-07-2019
    Last modified: 13-07-2019
    """
    import os
    from nibabel.freesurfer.io import read_morph_data, write_morph_data, read_geometry
    from scipy.interpolate import griddata    

    # make output folder
    if not os.path.exists(path_output):
        os.mkdir(path_output)

    # transform morphological data to dense surfaces
    pts_sphere_dense, _ = read_geometry(target_sphere)
    pts_sphere, _ = read_geometry(source_sphere)
    
    # get morphological data
    morph = read_morph_data(input_morph)
    
    # do the transformation
    method = "nearest"
    morph_dense = griddata(pts_sphere, morph, pts_sphere_dense, method)
        
    # write dense morphological data
    write_morph_data(os.path.join(path_output,os.path.basename(input_morph)), morph_dense)
Пример #3
0
def euclDist_infl(subject):

    import numpy as np
    import nibabel.freesurfer.io as fs
    from scipy.spatial import distance_matrix
    
    fsDir = '/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/freesurfer'
    surfDir = '/afs/cbs.mpg.de/projects/mar005_lsd-lemon-surf/probands'
    
    for hemi in ['lh', 'rh']:
        
        # fsaverage5 coords on sphere
        fsa5_sphere_coords = fs.read_geometry('%s/fsaverage5/surf/%s.sphere' % (fsDir, hemi))[0]
        cort = fs.read_label('%s/fsaverage5/label/%s.cortex.label' % (fsDir, hemi))
     
        # get corresponding nodes on subject sphere (find coords of high-dim subject surface closest to fsa5 nodes in sphere space)
        subj_sphere_coords = fs.read_geometry('%s/%s/surf/%s.sphere' % (fsDir, subject, hemi))[0]
        subj_indices = []
        for node in cort:
            dist2all = np.squeeze(distance_matrix(np.expand_dims(fsa5_sphere_coords[node], axis=0), subj_sphere_coords))
            subj_indices.append(list(dist2all).index(min(dist2all)))
        
        # pair-wise euclidean distance between included nodes on subject surface (midline)
        subj_surf_coords = fs.read_geometry('%s/%s/surf/%s.inflated' % (fsDir, subject, hemi))[0]
        
        
        euclDist = np.zeros((10242,10242))
        euclDist[np.ix_(cort, cort)] = distance_matrix(subj_surf_coords[subj_indices,:],subj_surf_coords[subj_indices,:])
        np.save('%s/%s/distance_maps/%s_%s_euclDist_inflated_fsa5' % (surfDir, subject, subject, hemi), euclDist)
Пример #4
0
def transfertodense(subjectid,vals,hemi,interptype='nearest',surftype='sphere'):
    '''
    def transfertodense(subjectid,vals,hemi,interptype='nearest',surftype='sphere'):

    <subjectid> is like 'C0041'
    <vals> is a column vector of values defined on the regular sphere surface (one hemi)
    <hemi> is 'lh' or 'rh'
    <interptype> is 'linear' or 'nearest', 'cubic'
    <surftype> is 'sphere' (default),'inflated',etc...

    Interpolate to obtain <vals> defined on the dense sphere surface.

    Note that griddata in matlab can only do 'nearest' for 3d interpolation
    griddata in scipy can do 'nearest','linear','cubic' for 3d interpolation

    '''
    from scipy.Interpolate import griddata
    import nibabel.freesurfer.io as fsio

    # calc
    surf1file = cvnpath('freesurfer')/subjectid/'surf'/f'{hemi}.{surftype}'
    surf1file = cvnpath('freesurfer')/subjectid/'surf'/f'{hemi}.{surftype}DENSE'

    # load surfaces (note that we skip the post-processing of vertices and faces since unnecessary for what we are doing)
    surf1.vertices, _ = fsio.read_geometry(surf1file)
    surf2.vertices, _ = fsio.read_geometry(surf2file)

    # do it
    return griddata(surf1.vertices[:,0],surf1.vertices[:,1],surf1.vertices[:,2],vals.flatten(), \
                 surf2.vertices[:,0],surf2.vertices[:,1],surf2.vertices[:,2],method=interptype)
Пример #5
0
def transfer_mesh_color(subject_id,
                        atlas='aparc',
                        reconall_folder='/data01/ayagoz/HCP_1200/FS_reconall/',
                        concon_mesh='/home/kurmukov/HCP/Dan_iso5.m'):
    '''
    Transfer mesh labels from subject sphere mesh, to concon sphere mesh
    
    Parameters:
    
    subject_id - int,
     subject id
     
    atlas - str,
     atlas to transfer, possible values: aparc (Desikan-Killiany), aparc.a2009s (Destrieux Atlas).
     Defined by free surfer https://surfer.nmr.mgh.harvard.edu/fswiki/CorticalParcellation
     
    reconall_folder - str,
     path to recon-all FS output
     
    concon_mesh - str,
     path to ConCon sphere mesh
    '''

    lh_vertices, _ = read_geometry(
        f'{reconall_folder}{subject_id}/surf/lh.sphere.reg')
    rh_vertices, _ = read_geometry(
        f'{reconall_folder}{subject_id}/surf/rh.sphere.reg')

    lh_labels = load_surf_data(
        f'{reconall_folder}{subject_id}/label/lh.{atlas}.annot')
    rh_labels = load_surf_data(
        f'{reconall_folder}{subject_id}/label/rh.{atlas}.annot')

    lh_vertices /= 100
    rh_vertices /= 100

    lh_vertices_CC, _ = load_mesh_boris(concon_mesh)
    rh_vertices_CC, _ = load_mesh_boris(concon_mesh)

    knn = KNeighborsClassifier(n_neighbors=5,
                               weights='uniform',
                               metric='minkowski')

    knn.fit(lh_vertices, lh_labels)
    lh_labels_CC = knn.predict(lh_vertices_CC)

    knn.fit(rh_vertices, rh_labels)
    rh_labels_CC = knn.predict(rh_vertices_CC)

    rh_labels_CC[rh_labels_CC != -1] += np.max(lh_labels_CC)

    labels_CC = np.concatenate([lh_labels_CC, rh_labels_CC])

    return labels_CC
def surface_intersect(inner_surface_path, outer_surface_path):

    inner_mesh = trimesh.Trimesh(
        *read_geometry(inner_surface_path, read_metadata=False))
    outer_mesh = trimesh.Trimesh(
        *read_geometry(outer_surface_path, read_metadata=False))

    ray_interceptor_inner = trimesh.ray.ray_pyembree.RayMeshIntersector(
        inner_mesh)
    inds = ray_interceptor_inner.intersects_location(
        ray_origins=outer_mesh.vertices,
        ray_directions=outer_mesh.vertex_normals,
        multiple_hits=True)[1]
    return len(inds) != 0
Пример #7
0
def calctransferfunctions(fslhfile, fsrhfile, sslhfile, ssrhfile):
    '''
    cvncalctransferfunctions(fslhfile,fsrhfile,sslhfile,ssrhfile)

    <fslhfile>,<fsrhfile> are locations of the fsaverage spherical surfaces
    <sslhfile>,<ssrhfile> are locations of other surfaces registered on the sphere to fsaverage

    return functions that perform nearest-neigbor interpolation
    to go back and forth between values defined on the surfaces.

    # History
    20180714 now accept pathlib path for all 4 input

    '''

    from nibabel.freesurfer.io import read_geometry
    from scipy.interpolate import griddata
    # load spherical surfaces (note that we skip the post-processing of vertices and faces since unnecessary for what we are doing)
    fslh_vertices, _ = read_geometry(str(fslhfile))
    fsrh_vertices, _ = read_geometry(str(fsrhfile))
    sslh_vertices, _ = read_geometry(str(sslhfile))
    ssrh_vertices, _ = read_geometry(str(ssrhfile))

    # define the functions
    tempix = griddata(fslh_vertices,
                      np.arange(fslh_vertices.shape[0]) + 1,
                      sslh_vertices,
                      method='nearest')
    tfunFSSSlh = lambda x: x[tempix].flatten().astype('float')

    tempix = griddata(fslh_vertices,
                      np.arange(fslh_vertices.shape[0]) + 1,
                      ssrh_vertices,
                      method='nearest')
    tfunFSSSrh = lambda x: x[tempix].flatten().astype('float')

    tempix = griddata(sslh_vertices,
                      np.arange(sslh_vertices.shape[0]) + 1,
                      fslh_vertices,
                      method='nearest')
    tfunSSFSlh = lambda x: x[tempix].flatten().astype('float')

    tempix = griddata(ssrh_vertices,
                      np.arange(ssrh_vertices.shape[0]) + 1,
                      fsrh_vertices,
                      method='nearest')
    tfunSSFSrh = lambda x: x[tempix].flatten().astype('float')

    return tfunFSSSlh, tfunFSSSrh, tfunSSFSlh, tfunSSFSrh
Пример #8
0
    def __init__(self, files):
        if isinstance(files, tuple):
            white = read_geometry(files[0])
            pial = read_geometry(files[1])
            vertices = (pial[0] - white[0]) / 2
            faces = pial[1]
            
        else:
            mid = read_geometry(files)
            vertices = mid[0]
            faces = mid[1]

        self.vertices = vertices
        self.faces = faces
        self.mesh = Trimesh(vertices=self.vertices, faces=self.faces)
Пример #9
0
def calculate_area(filename_surf, filename_area=""):
    """
    The function calculates vertex-wise surface area. The code is taken from the octave code 
    surf2area.m from Anderson Winkler found in his github repository (https://github.com/
    andersonwinkler/areal). Consider a triangular face ABC with corner points
    a = [x_A, y_A, z_A]'
    b = [x_B, y_B, z_B]'
    c = [x_C, y_C, z_C]'
    The area for this triangle is given by the normed cross product A = |u x v|/2 with u = a - c and 
    v = b - c. This is a face-wise surface area representation. To convert this to a vertex-wise 
    representation, we assign each vertex one third of the sum of the areas of all faces that meet 
    at that vertex. Cf. Anderson Winkler et al. Measuring and comparing brain cortical surface area 
    and other areal quantities, Neuroimage 61(4), p. 1428-1443 (2012).
    Inputs:
        *file_surf: input file geometry on which surface area is calculated.
        *file_area: file name of the surface area file.
    Outputs:
        *dpv: vertex-wise surface area.
        
    created by Daniel Haenelt
    Date created: 01-11-2018             
    Last modified: 17-12-2018
    """
    import numpy as np
    from numpy.linalg import norm
    from nibabel.freesurfer.io import write_morph_data, read_geometry

    # Read the surface file
    vtx, fac = read_geometry(filename_surf)
    nV = len(vtx)
    nF = len(fac)

    # compute area per face (DPF)
    facvtx = np.concatenate([vtx[fac[:, 0]], vtx[fac[:, 1]], vtx[fac[:, 2]]],
                            axis=1)
    facvtx0 = facvtx[:, 0:6] - np.concatenate(
        [facvtx[:, 6:9], facvtx[:, 6:9]], axis=1)  # place 3rd vtx at origin
    cp = np.cross(facvtx0[:, 0:3], facvtx0[:, 3:6], axisa=1,
                  axisb=1)  # cross product
    dpf = norm(cp, axis=1) / 2  # half of the norm
    print("Total area (facewise): " + str(np.sum(dpf)))

    # compute area per vertex (DPV)
    dpv = np.zeros(nV)

    # for speed, divide the dpf by 3
    dpf = dpf / 3

    # redistribute
    for f in range(nF):
        dpv[fac[f, :]] = dpv[fac[f, :]] + dpf[f]

    print("Total area (vertexwise): " + str(np.sum(dpv)))

    # save dpv
    if filename_area:
        write_morph_data(filename_area, dpv)

    # return vertex-wise surface area
    return dpv
Пример #10
0
def hemisphere(request, image="", hemisphere=""):
    fileUrl = f"https://neurovault.org/api/images/{image}"
    try:
        with urllib.request.urlopen(fileUrl) as url:
            fileData = json.loads(url.read().decode())
    except (urllib.error.HTTPError, ValueError):
        fileData = None

    # Map external file to internal file:
    surface_file = fileData[f"surface_{hemisphere}_file"]
    giftiParser = GiftiImageParser(buffer_size=35000000)
    giftiParser.parse(string=urlopen(surface_file).read())
    giftiObject = giftiParser.img
    colors = giftiObject.darrays[0].data

    if hemisphere not in ["left", "right"]:
        exit(1)
    elif hemisphere == "left":
        hemi_short = "lh"
    else:
        hemi_short = "rh"

    fs_base = os.path.join(settings.STATIC_ROOT, "fs/")
    verts, faces = fsio.read_geometry(
        os.path.join(fs_base, "%s.pial" % hemi_short))

    bytestream = bytes(fv_scalar_to_collada(verts, faces, colors).getvalue())
    filename = f"{hemisphere}.dae"
    file = ContentFile(bytestream, filename)
    response = HttpResponse(file, content_type="application/xml")
    response["Content-Disposition"] = "attachment; filename=" + filename
    return response
Пример #11
0
    def read(self, surface_path, use_center_surface):
        vertices, triangles, metadata = read_geometry(surface_path,
                                                      read_metadata=True)
        self.logger.info("From the file %s the extracted metadata is %s",
                         surface_path, metadata)

        if use_center_surface:
            cras = [0, 0, 0]
            self.logger.info(
                "The --center_ras flag was specified, so the ras centering point is %s",
                cras)
        else:
            if CENTER_RAS_FS_SURF in metadata:
                cras = metadata[CENTER_RAS_FS_SURF]
                self.logger.info(
                    "The ras centering point for surface %s is %s",
                    surface_path, cras)
            else:
                cras = [0, 0, 0]
                self.logger.warning(
                    "Could not read the ras centering point from surface %s header. "
                    "The cras will be %s", surface_path, cras)

        return Surface(vertices,
                       triangles,
                       area_mask=None,
                       center_ras=cras,
                       generic_metadata=metadata)
Пример #12
0
def hemisphere(request, image='', hemisphere=''):
    # query neurovault image
    fileUrl = f"https://neurovault.org/api/images/{image}"
    try:
        with urllib.request.urlopen(fileUrl) as url:
            fileData = json.loads(url.read().decode())
    except (urllib.error.HTTPError, ValueError):
        fileData = None

    surface = fileData[f"surface_{hemisphere}_file"]

    dom = parse(urllib.request.urlopen(surface))
    zip_base64 = dom.getElementsByTagName('Data')[0].childNodes[0].data
    zip = base64.b64decode(zip_base64.encode('ascii'))
    unzip = zlib.decompress(zip)
    colors = numpy.fromstring(unzip, dtype='float32')

    if hemisphere not in ["left","right"]:
        print("Bad hemisphere input")
        exit(1)
    elif hemisphere == "left":
        hemi_short = "lh"
    else:
        hemi_short = "rh"

    fs_base = os.path.join(settings.BASE_DIR, 'staticfiles/fs/')
    verts,faces = fsio.read_geometry(os.path.join(fs_base,"%s.pial" % hemi_short))

    bytestream = bytes(fv_scalar_to_collada(verts,faces,colors).getvalue())
    filename = f"{hemisphere}.dae"
    file  = ContentFile(bytestream, filename)
    response = HttpResponse(file, content_type='application/xml')
    response['Content-Disposition'] = 'attachment; filename=' + filename
    return response
Пример #13
0
def spherically_project_surface(insurf, outsurf):
    """ (string) -> None
    takes path to insurf, spherically projects it, outputs it to outsurf
    """
    surf = fs.read_geometry(insurf, read_metadata=True)
    projected = sphericalProject(surf[0], surf[1])
    fs.write_geometry(outsurf, projected[0], projected[1], volume_info=surf[2]) 
Пример #14
0
def get_pial_meshes(age=None,
                    template=None,
                    face_count=20000,
                    subjects_dir=None):

    if template is None:
        if age is not None:
            template = f"ANTS{age}-0Months3T"
        else:
            raise ValueError("The age or the template must be specified.")

    if subjects_dir is None:
        subjects_dir = Path(os.environ["SUBJECTS_DIR"])

    mesh_pattern = "{}/{}/surf/{}.pial"
    vertices = {}
    faces = {}
    for hemi in ["lh", "rh"]:
        vertices_hemi, faces_hemi = read_geometry(
            mesh_pattern.format(subjects_dir, template, hemi))

        open3d_mesh = open3d.geometry.TriangleMesh(
            vertices=open3d.utility.Vector3dVector(vertices_hemi),
            triangles=open3d.utility.Vector3iVector(faces_hemi))

        mesh = open3d_mesh.simplify_quadric_decimation(int(face_count / 2))
        vertices[hemi] = np.asarray(mesh.vertices)
        faces[hemi] = np.asarray(mesh.triangles)

    return vertices, faces
def mrtrix_mesh2vox(surface_path, template_path, temp_dir, output_prefix):
    """
    Create a partial volume map from a surface and a reference template using mrtrix mesh2voxel command.

    :param surface_path: path to the surface file
    :param template_path: path to the template file
    :param temp_dir: path to temporary directory to which temporary files are saved
    :param output_prefix: prefix to output file
    """
    # Adapt affine translation using metadata
    template = nib.load(template_path)
    _, _, meta = read_geometry(surface_path, read_metadata=True)

    template = nib.as_closest_canonical(template)
    affine = template.affine.copy()
    affine[:-1, -1] = template.affine[:-1, -1] - meta['cras']

    new_template = nib.Nifti1Image(template.dataobj, affine)
    new_template_path = temp_dir / 'template.mgz'
    nib.save(new_template, new_template_path)

    # Reconstruct volume from mesh
    subprocess.run([
        'mesh2voxel', surface_path, new_template_path,
        temp_dir / f'{output_prefix}_output.mgz'
    ])

    # Save the reconstructed volume with the right affine
    output = nib.load(temp_dir / f'{output_prefix}_output.mgz')
    new_output = nib.Nifti1Image(output.dataobj, template.affine)
    # nib.save(new_output, output_path)

    return new_output
Пример #16
0
def main():

    # define specific freesurfer polydata surface to read
    hemi = 'lh'  # lh or rh for left or right hemispheres
    surf = 'inflated'  # all surfaces exported by freesurfer, e.g., pial, sphere, smoothwm, curv, inflated...
    subj = 5

    fs_file = '{}.{}'.format(hemi, surf)
    fs_dir = os.environ[
        'OneDrive'] + r'\data\nexstim_coord\freesurfer\ppM1_S{}\surf'.format(
            subj)
    fs_path = os.path.join(fs_dir, fs_file)

    vertices, faces, volume_info = fsio.read_geometry(fs_path,
                                                      read_metadata=True)

    # create the methods to convert the nifti points to vtk object
    polydata = vtk.vtkPolyData()
    points = vtk.vtkPoints()
    polys = vtk.vtkCellArray()
    scalars = vtk.vtkFloatArray()
    colors = vtk.vtkNamedColors()

    # load the point, cell, and data attributes
    for n, xi in enumerate(vertices):
        points.InsertPoint(n, xi)
        scalars.InsertTuple1(n, n)
    for fc in faces:
        polys.InsertNextCell(make_vtk_id_list(fc))

    # assign the pieces to the vtkPolyData
    polydata.SetPoints(points)
    polydata.SetPolys(polys)
    polydata.GetPointData().SetScalars(scalars)

    # visualize
    mapper = vtk.vtkPolyDataMapper()
    mapper.SetInputData(polydata)
    mapper.SetScalarRange(polydata.GetScalarRange())

    actor = vtk.vtkActor()
    actor.SetMapper(mapper)

    ren = vtk.vtkRenderer()
    ren.AddActor(actor)
    ren.SetBackground(colors.GetColor3d("Cornsilk"))

    ren_win = vtk.vtkRenderWindow()
    ren_win.AddRenderer(ren)
    ren_win.SetSize(800, 800)

    # create the window interactor
    iren = vtk.vtkRenderWindowInteractor()
    iren.SetRenderWindow(ren_win)

    iren.Initialize()
    ren_win.Render()
    iren.Start()

    close_window(iren)
Пример #17
0
def get_b0_orientation(surf_in,
                       vol_in,
                       write_output=False,
                       path_output="",
                       name_output=""):
    """
    This function computes the angle between surface normals and B0-direction per vertex.
    Inputs:
        *surf_in: input of surface mesh.
        *vol_in: input of corresponding nifti volume.
        *write output: write out to disk (boolean).
        *path_output: path where to save output.
        *name_output: basename of output file.
    Outputs:
        *theta: angle in radians.
        
    created by Daniel Haenelt
    Date created: 31-07-2020 
    Last modified: 31-07-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry, write_morph_data
    from lib.io.get_filename import get_filename
    from lib.surface.vox2ras import vox2ras
    from lib_gbb.normal import get_normal

    # make subfolders
    if write_output and not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from surface filename
    _, hemi, _ = get_filename(surf_in)

    # load surface
    vtx, fac = read_geometry(surf_in)

    # get transformation matrix
    _, r2v = vox2ras(vol_in)  # ras-tkr -> voxel
    v2s = nb.load(vol_in).affine  # voxel -> scanner-ras
    M = v2s.dot(r2v)

    # apply affine transformation
    vtx = apply_affine(M, vtx)

    # get surface normals
    n = get_normal(vtx, fac)

    # get angle between b0 and surface normals in radians
    theta = np.arccos(np.dot(n, [0, 0, 1]))

    # write output
    if write_output:
        write_morph_data(os.path.join(path_output, hemi + "." + name_output),
                         theta)

    return theta
Пример #18
0
    def parse_fs(self, surface_path):
        (vertices, triangles, metadata) = read_geometry(surface_path, read_metadata=True)
        cras = metadata['cras']

        logger = get_logger(__name__)
        logger.info("From the file %s the extracted metadata is %s" % (surface_path, metadata))

        return Surface(vertices, triangles, cras, metadata)
Пример #19
0
def write_surface(image, filename):

    extension = filename.split('.')[-1]

    if extension == 'mha':
        sitk.WriteImage(image, filename)

    elif extension == 'annot':
        print(type(image[1]))
        io.write_annot(filename, image[0], image[1], image[2])

    elif extension == 'label':
        raise ValueError('Reader for extensions \'label\' not yet implemented')

    elif extension in ['inflated', 'pial', 'white']:
        io.read_geometry(filename, image[0], image[1])

    else:
        return io.write_morph_data(filename, image)
Пример #20
0
def euclDist(subject):

    import numpy as np
    import nibabel.freesurfer.io as fs
    from scipy.spatial import distance_matrix

    fsDir = '/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/freesurfer'
    surfDir = '/afs/cbs.mpg.de/projects/mar005_lsd-lemon-surf/probands'

    for hemi in ['lh', 'rh']:

        # fsaverage5 coords on sphere
        fsa5_sphere_coords = fs.read_geometry('%s/fsaverage5/surf/%s.sphere' %
                                              (fsDir, hemi))[0]
        cort = fs.read_label('%s/fsaverage5/label/%s.cortex.label' %
                             (fsDir, hemi))

        # get corresponding nodes on subject sphere (find coords of high-dim subject surface closest to fsa5 nodes in sphere space)
        subj_sphere_coords = fs.read_geometry('%s/%s/surf/%s.sphere' %
                                              (fsDir, subject, hemi))[0]
        subj_indices = []
        for node in cort:
            dist2all = np.squeeze(
                distance_matrix(
                    np.expand_dims(fsa5_sphere_coords[node], axis=0),
                    subj_sphere_coords))
            subj_indices.append(list(dist2all).index(min(dist2all)))

        # pair-wise euclidean distance between included nodes on subject surface (midline)
        subj_surf_coords_pial = fs.read_geometry('%s/%s/surf/%s.pial' %
                                                 (fsDir, subject, hemi))[0]
        subj_surf_coords_wm = fs.read_geometry('%s/%s/surf/%s.smoothwm' %
                                               (fsDir, subject, hemi))[0]
        subj_surf_coords = (subj_surf_coords_pial + subj_surf_coords_wm) / 2.

        euclDist = np.zeros((10242, 10242))
        euclDist[np.ix_(cort, cort)] = distance_matrix(
            subj_surf_coords[subj_indices, :],
            subj_surf_coords[subj_indices, :])
        np.save(
            '%s/%s/distance_maps/%s_%s_euclDist_fsa5' %
            (surfDir, subject, subject, hemi), euclDist)
Пример #21
0
def fetch_template_surface(
    template: str,
    join: bool = True,
    layer: Optional[str] = None,
    data_dir: Optional[Union[str, Path]] = None,
) -> Union[BSPolyData, Tuple[BSPolyData, BSPolyData]]:
    """Loads surface templates.

    Parameters
    ----------
    template : str
        Name of the surface template. Valid values are "fslr32k", "fsaverage",
        "fsaverage3", "fsaverage4", "fsaverage5", "fsaverage6", "civet41k",
        "civet164k".
    join : bool, optional
        If true, returns surfaces as a single object, if false, returns an
        object per hemisphere, by default True.
    layer : str, optional
        Name of the cortical surface of interest. Valid values are "white",
        "smoothwm", "pial", "inflated", "sphere" for fsaverage surfaces;
        "midthickness", "inflated", "vinflated" for "fslr32k"; "mid", "white"
        for CIVET surfaces; and "sphere" for "civet41k". If None,
        defaults to "pial" or "midthickness", by default None.
    data_dir : str, Path, optional
        Directory to save the data, by default
        $HOME_DIR/brainstat_data/surface_data.

    Returns
    -------
    BSPolyData or tuple of BSPolyData
        Output surface(s). If a tuple, then the first element is the left
        hemisphere.
    """

    data_dir = Path(
        data_dir) if data_dir else data_directories["SURFACE_DATA_DIR"]
    surface_files = _fetch_template_surface_files(template, data_dir, layer)
    if template[:9] == "fsaverage":
        surfaces_fs = [read_geometry(file) for file in surface_files]
        surfaces = [
            build_polydata(surface[0], surface[1]) for surface in surfaces_fs
        ]
    elif template == "fslr32k":
        surfaces = [read_surface(file) for file in surface_files]
    else:
        surfaces_obj = [read_civet(file) for file in surface_files]
        surfaces = [
            build_polydata(surface[0], surface[1]) for surface in surfaces_obj
        ]

    if join:
        return combine_surfaces(surfaces[0], surfaces[1])
    else:
        return surfaces[0], surfaces[1]
def load_surface(lr):
    all_coords = []
    for surf_type in ['white', 'pial']:
        coords, faces = read_geometry(
            '/data_dir/freesurfer/'
            'subjects/fsaverage/surf/lh.{surf_type}'.format(
                surf_type=surf_type))
        all_coords.append(coords)
    coords = np.array(all_coords).astype(np.float).mean(axis=0)
    surf = Surface(coords, faces)
    return surf
Пример #23
0
def load_freesurfer_geometry(filename, to='mesh', warn=False):
    '''
    load_freesurfer_geometry(filename) yields the data stored at the freesurfer geometry file given
      by filename. The optional argument 'to' may be used to change the kind of data that is
      returned.

    The following are valid settings for the 'to' keyword argument:
      * 'mesh' (the default) yields a mesh object
      * 'tess' yields a tess object (discarding coordinates)
      * 'raw' yields a tuple of numpy arrays, identical to the read_geometry return value.
    '''
    if not warn:
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore',
                                    category=UserWarning,
                                    module='nibabel')
            (xs, fs, info) = fsio.read_geometry(filename, read_metadata=True)
    else:
        (xs, fs, info) = fsio.read_geometry(filename, read_metadata=True)
    # see if there's chirality data here...
    filename = os.path.split(filename)[1]
    filename = filename.lower()
    if   filename.startswith('lh'): info['chirality'] = 'lh.'
    elif filename.startswith('rh'): info['chirality'] = 'rh.'
    # parse it into something
    to = to.lower()
    if to in ['mesh', 'auto', 'automatic']:
        return geo.Mesh(fs, xs, meta_data=info)
    elif to in ['tess', 'tesselation']:
        return geo.Tesselation(fs, meta_data=info)
    elif to in ['coords', 'coordinates']:
        return xs
    elif to in ['triangles', 'faces']:
        return fs
    elif to in ['meta', 'meta_data']:
        return info
    elif to =='raw':
        return (xs, fs)
    else:
        raise ValueError('Could not understand \'to\' argument: %s' % to)
Пример #24
0
def main():
    parser = create_parser()
    args = parser.parse_args()
    print 'fname:', args.fname
    g = fio.read_geometry(args.fname)
    m = Trimesh(g[0], g[1])
    if args.outname is None:
        # (name, ext) = os.path.splitext(args.fname)
        outname = '%s.stl' % args.fname
    else:
        outname = args.outname
    print 'outname:', outname
    export_mesh(m, outname)
Пример #25
0
def main():
	parser = create_parser()
	args = parser.parse_args()
	print 'fname:', args.fname
	g = fio.read_geometry(args.fname)
	m = Trimesh(g[0], g[1])
	if args.outname is None:
		# (name, ext) = os.path.splitext(args.fname)
		outname = '%s.stl' % args.fname
	else:
		outname = args.outname
	print 'outname:', outname
	export_mesh(m, outname)
Пример #26
0
def main():
	parser = create_parser()
	args = parser.parse_args()
	print 'fname:', args.fname
	g = fio.read_geometry(args.fname)
	if args.outname is None:
		outname = '%s_vtk' % args.fname
	else:
		outname = args.outname
	print 'outname:', outname
	rnd = (np.random.random((g[0].shape[0])) * 255).astype(np.uint8)
	pointData = {'Colors': (rnd, rnd, rnd)}
	meshToVTK(outname, g[0], g[1], pointData)
Пример #27
0
def main():
    parser = create_parser()
    args = parser.parse_args()
    print 'fname:', args.fname
    g = fio.read_geometry(args.fname)
    if args.outname is None:
        outname = '%s_vtk' % args.fname
    else:
        outname = args.outname
    print 'outname:', outname
    rnd = (np.random.random((g[0].shape[0])) * 255).astype(np.uint8)
    pointData = {'Colors': (rnd, rnd, rnd)}
    meshToVTK(outname, g[0], g[1], pointData)
Пример #28
0
    def read_freesurfer(cls, filename):
        """Read a triangular format Freesurfer surface mesh.

        Parameters
        ----------
        filename : str
            Filename for the file with the triangular data

        Returns
        -------
        surface : TriSurface

        """
        vertices, faces = read_geometry(filename)
        return cls(vertices=vertices, faces=faces)
Пример #29
0
def read(fn):
    '''General read function for surfaces
    
    For now only supports ascii (as used in AFNI's SUMA), Caret
    and freesurfer formats
    '''
    if fn.endswith('.asc'):
        from mvpa2.support.nibabel import surf_fs_asc
        return surf_fs_asc.read(fn)
    elif fn.endswith('.coord'):
        from mvpa2.support.nibabel import surf_caret
        return surf_caret.read(fn)
    else:
        import nibabel.freesurfer.io as fsio
        coords, faces = fsio.read_geometry(fn)
        return Surface(coords, faces)
def fs_to_dae( args ):

  #load in FS mesh
  verts,faces = fsio.read_geometry( args.input )

  #dumb copypasta for mesh face normals
  norms = np.zeros( verts.shape, dtype=verts.dtype )
  tris = verts[faces]
  n = np.cross( tris[::,1 ] - tris[::,0]  , tris[::,2 ] - tris[::,0] )
  norm_sizes = np.sqrt(n[:,0]**2 + n[:,1]**2 + n[:,2]**2)

  for i in range(3):
    n[:,i] = n[:,i] / norm_sizes

  del norm_sizes

  #map back to vertices
  norms[ faces[:,0] ] += n
  norms[ faces[:,1] ] += n
  norms[ faces[:,2] ] += n

  del n

  norm_sizes = np.sqrt(norms[:,0]**2 + norms[:,1]**2 + norms[:,2]**2)

  for i in range(3):
    norms[:,i] = norms[:,i] / norm_sizes

  #color
  if not args.color:
    color = np.ones(norms.shape) * 0.4
  else:
    scalars = fsio.read_morph_data(args.color)
    color = color_func(scalars)

  #make trimesh
  mesh = trimesh.Trimesh(\
    verts,\
    faces,\
    vertex_normals=norms,\
    vertex_colors=color)

  mesh.export(file_obj=args.output, file_type="collada")
  #mesh.export(file_obj=args.output, file_type="obj")
  #mesh.export_gltf(file_obj=args.output)

  return
Пример #31
0
def plot_normal_direction(input_surf, axis=2):
    """ Plot normal direction

    This function plots the direction from the white surface towards the pial 
    surface based on the white surface vertex normals along one axis.    

    Parameters
    ----------
    input_surf : str
        Filename of source mesh (white surface).
    axis : int, optional
        Axis for distance calculation in ras space (0,1,2). The default is 2.

    Returns
    -------
    None.

    Notes
    -------
    created by Daniel Haenelt
    Date created: 13-12-2019         
    Last modified: 05-10-2020

    """
    
    # fixed parameter
    line_threshold = 0.05 # if direction is along one axis, omit line if length is below threshold

    # load geometry
    vtx_source, fac_source = read_geometry(input_surf)

    # get surface normals per vertex
    norm = get_normal(vtx_source, fac_source)
    
    # get distance along one axis
    r_dist = norm[:,axis].copy()
    
    # get directions
    r_dist[r_dist > line_threshold] = 1
    r_dist[r_dist < -line_threshold] = -1
    r_dist[np.abs(r_dist) != 1] = 0

    # write output
    header = nb.freesurfer.mghformat.MGHHeader()
    output = nb.freesurfer.mghformat.MGHImage(r_dist, np.eye(4), header)
    nb.save(output, input_surf+"_plot_normal_dir"+str(axis)+".mgh")
    
    def __init__(self, fsaverage_dir, hemisphere, atlas):

        # Load patient labels for vertex based on <atlas> parcels (MACRO)
        (self.vertices, self.colortable, self.labels) = fio.read_annot(
            f"{fsaverage_dir}/label/{hemisphere}.{atlas}.annot")

        # Load geometry of the brain
        (self.coords, self.faces) = fio.read_geometry(f"{fsaverage_dir}/surf/{hemisphere}.sphere.reg")

        # an array of list: at position i-th there is the list of faces that the vertex i touches
        self.vertex_to_faces = np.empty((len(self.vertices),), dtype=object)
        for i in range(len(self.vertex_to_faces)):
            self.vertex_to_faces[i] = []
        # For each face, add the face i to the vertex i-th list of faces
        for i, f in enumerate(self.faces):
            for j in range(3):
                self.vertex_to_faces[f[j]].append(i)
Пример #33
0
def read_surface(filename):

    extension = filename.split('.')[-1]

    if extension == 'mha':
        img = sitk.ReadImage(filename)
        return sitk.GetArrayFromImage(img)
    elif extension == 'annot':
        raise ValueError('Reader for extensions \'annot\' not yet implemented')

    elif extension == 'label':
        raise ValueError('Reader for extensions \'label\' not yet implemented')

    elif extension in ['inflated', 'pial', 'white']:
        coords, faces = io.read_geometry(filename)
        return coords, faces
    else:
        return io.read_morph_data(filename)
Пример #34
0
    def read(self, surface_path, use_center_surface):
        vertices, triangles, metadata = read_geometry(
            surface_path, read_metadata=True)
        self.logger.info(
            "From the file %s the extracted metadata is %s", surface_path, metadata)

        if use_center_surface:
            cras = [0, 0, 0]
            self.logger.info(
                "The --center_ras flag was specified, so the ras centering point is %s", cras)
        else:
            if CENTER_RAS_FS_SURF in metadata:
                cras = metadata[CENTER_RAS_FS_SURF]
                self.logger.info(
                    "The ras centering point for surface %s is %s", surface_path, cras)
            else:
                cras = [0, 0, 0]
                self.logger.warning("Could not read the ras centering point from surface %s header. "
                                    "The cras will be %s", surface_path, cras)

        return Surface(vertices, triangles, area_mask=None,
                       center_ras=cras, generic_metadata=metadata)
Пример #35
0
def connectivity_2_tvb_fs(subID, subFolder, SC_matrix, reconallFolder='recon_all'):

    # Create the results folder
    if not os.path.exists(subFolder + 'results/'):
        os.mkdir(subFolder + 'results/')

    # Load the SC matrix
    SC = io.loadmat(subFolder + '/mrtrix_68/tracks_68/' + SC_matrix)
    weights = SC['SC_cap_agg_bwflav2']
    delay = SC['SC_dist_agg_mean']

    # Load the required things compiuted previously by FREESURFER
    lh_vert, lh_faces = fs.read_geometry(subFolder + '/' + reconallFolder + '/surf/lh.pial')
    rh_vert, rh_faces = fs.read_geometry(subFolder + '/' + reconallFolder + '/surf/rh.pial')
    cortexMesh = {'vertices': np.vstack((lh_vert, rh_vert)),
                  'faces': np.vstack((lh_faces, rh_faces + np.shape(lh_vert)[0]))}

    # Calculate vertex-normals
    cortexMesh['vertexNormals'] = calcVertNormals(cortexMesh['vertices'], cortexMesh['faces'])

    # Load annotation tables
    lh_labels, lh_ctab, lh_names = fs.read_annot(subFolder + '/' + reconallFolder + '/label/lh.aparc.annot')
    rh_labels, rh_ctab, rh_names = fs.read_annot(subFolder + '/' + reconallFolder + '/label/rh.aparc.annot')
    # Remove the CC i.e. correct labeling
    lh_labels[lh_labels > 3] -= 1
    rh_labels[rh_labels > 3] -= 1
    # Combine into single vectors
    rh_labels += np.max(lh_labels)
    rh_labels[rh_labels == np.min(rh_labels)] = -1
    cortexMesh['labels'] = np.hstack((lh_labels, rh_labels))
    # Store label name-strings
    tmp = lh_names[1:4] + lh_names[5:]
    tmp_lh = ['lh_' + s for s in tmp]
    tmp_rh = ['rh_' + s for s in tmp]
    cortexMesh['labelNames'] = tmp_lh + tmp_rh

    # Do the TVB mesh clean
    cortexMesh['vertices'], cortexMesh['faces'], cortexMesh['vertexNormals'], cortexMesh['labels'] = removeFB(
        cortexMesh['vertices'], cortexMesh['faces'], cortexMesh['vertexNormals'], cortexMesh['labels'])

    # Now finally start storing things....
    # ############

    # Define the filenames
    filenames = ['weights.txt', 'centres.txt', 'tract.txt', 'orientation.txt', 'area.txt', 'cortical.txt', 'hemisphere.txt']

    # 1.) Weights
    np.savetxt(subFolder + 'results/' + filenames[0], weights, delimiter=' ', fmt='%1i')

    # 2.) Position
    # Calc region centers
    # centers = np.zeros((weights.shape[0], 3))
    with open(subFolder + 'results/' + filenames[1], 'w') as f:
        for i in range(weights.shape[0]):
            # First get all vertices corresponding to a certain region
            regionVertices = cortexMesh['vertices'][cortexMesh['labels'] == i + 1]
            # Compute the mean of each region
            tmp = np.mean(regionVertices, axis=0)
            # Now look for the nearest neighbors
            idx = np.sum(np.abs(cortexMesh['vertices'] - tmp), axis=1).argmin()
            # Define the nearest vertex as center
            # centers[i, :] = cortexMesh['vertices'][idx, :]
            center = cortexMesh['vertices'][idx, :]
            # Write file
            f.write('{0} {1} {2} {3}\n'.format(cortexMesh['labelNames'][i], str(center[0]), str(center[1]), str(center[2])))
        f.close()

    # 3.) Tract
    np.savetxt(subFolder + 'results/' + filenames[2], delay, delimiter=' ', fmt='%1i')

    # 4.) Orientation
    with open(subFolder + 'results/' + filenames[3], 'w') as f:
        for i in range(weights.shape[0]):
            # Get all vertex-normals correspodning to the vertices of the current region
            regionVertexNormals = cortexMesh['vertexNormals'][cortexMesh['labels'] == i + 1]
            # Compute mean vector
            orientation = np.mean(regionVertexNormals, axis=0)
            # Normalize it
            orientation /= np.sqrt(orientation[0]**2 + orientation[1]**2 + orientation[2]**2)
            # Write to file
            f.write('{0} {1} {2}\n'.format(str(orientation[0]), str(orientation[1]), str(orientation[2])))
        f.close()

    # 5.) Area
    # I'm not quite sure how to get to the exact value for the surface in mm^2
    # so for now i just count the surface vertices corresponding to each region
    # EDIT: According to the TVB Dokumentation, this attribute is not mandatory
    # for the Input!
    with open(subFolder + 'results/' + filenames[4], 'w') as f:
        for i in range(weights.shape[0]):
            area = np.count_nonzero(cortexMesh['labels'] == i)
            f.write('{0}\n'.format(str(area)))
        f.close()

    # 6.) Cortical
    # Since in the default atlas all areas are cortical
    cortical = np.ones((68, 1))
    np.savetxt(subFolder + 'results/' + filenames[5], cortical, delimiter=' ', fmt='%1i')

    # 7.) Hemisphere
    # Again hard coded for Desikan-Killany Mask!
    # TODO: Make this flexible!
    hemisphere = np.vstack((np.zeros((34, 1)), np.ones((34, 1))))
    np.savetxt(subFolder + 'results/' + filenames[6], hemisphere, delimiter=' ', fmt='%1i')

    # Assemble the Zip-File
    zf = zipfile.ZipFile(subFolder + 'results/' + subID + '_Connectivity.zip', mode='w')
    for fname in filenames:
        zf.write(subFolder + 'results/' + fname)
        os.remove(subFolder + 'results/' + fname)
    zf.close()