コード例 #1
0
def get_b0_orientation(surf_in,
                       vol_in,
                       write_output=False,
                       path_output="",
                       name_output=""):
    """
    This function computes the angle between surface normals and B0-direction per vertex.
    Inputs:
        *surf_in: input of surface mesh.
        *vol_in: input of corresponding nifti volume.
        *write output: write out to disk (boolean).
        *path_output: path where to save output.
        *name_output: basename of output file.
    Outputs:
        *theta: angle in radians.
        
    created by Daniel Haenelt
    Date created: 31-07-2020 
    Last modified: 31-07-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry, write_morph_data
    from lib.io.get_filename import get_filename
    from lib.surface.vox2ras import vox2ras
    from lib_gbb.normal import get_normal

    # make subfolders
    if write_output and not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from surface filename
    _, hemi, _ = get_filename(surf_in)

    # load surface
    vtx, fac = read_geometry(surf_in)

    # get transformation matrix
    _, r2v = vox2ras(vol_in)  # ras-tkr -> voxel
    v2s = nb.load(vol_in).affine  # voxel -> scanner-ras
    M = v2s.dot(r2v)

    # apply affine transformation
    vtx = apply_affine(M, vtx)

    # get surface normals
    n = get_normal(vtx, fac)

    # get angle between b0 and surface normals in radians
    theta = np.arccos(np.dot(n, [0, 0, 1]))

    # write output
    if write_output:
        write_morph_data(os.path.join(path_output, hemi + "." + name_output),
                         theta)

    return theta
コード例 #2
0
def calculate_equidistant_epi(input_white, input_pial, input_vol, path_output, n_layers, pathLAYNII,
                              r=[0.4,0.4,0.4], n_iter=2, debug=False):
    """
    This function computes equidistant layers in volume space from input pial and white surfaces
    in freesurfer format using the laynii function LN_GROW_LAYERS. The input surfaces do not have 
    to cover the whole brain. Number of vertices and indices do not have to correspond between 
    surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_layers: number of generated layers + 1.
        *pathLAYNII: path to laynii folder.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
        *debug: write out some intermediate files (boolean).
    
    created by Daniel Haenelt
    Date created: 31-05-2020
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from scipy.ndimage.morphology import binary_fill_holes
    from collections import Counter
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh
    
    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)
    
    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" and not hemi == "rh":
        sys.exit("Could not identify hemi from filename!")
    
    # new filenames in output folder
    res_white = os.path.join(path_output,hemi+".white")
    res_pial = os.path.join(path_output,hemi+".pial")
    res_vol = os.path.join(path_output,"epi_upsampled.nii")
    
    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")    
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")
    
    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)
    
    # load surface
    vtx_white, _ = read_geometry(res_white) 
    vtx_pial, _ = read_geometry(res_pial)
    
    # load volume
    vol = nb.load(res_vol)
    
    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)
    
    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:,0],vtx_white[:,1],vtx_white[:,2]] = 1   
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)
    
    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:,0],vtx_pial[:,1],vtx_pial[:,2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)
    
    """
    make wm
    """
    white_label_array = np.zeros_like(white_array)
    for i in range(np.shape(white_label_array)[2]):
        white_label_array[:,:,i] = binary_fill_holes(white_array[:,:,i])
    white_label_array = white_label_array - white_array
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_flatten = np.ndarray.flatten(white_label_array)
    white_label_flatten = white_label_flatten[white_label_flatten > 0]
    label_number = Counter(white_label_flatten).most_common(1)[0][0]
    white_label_array[white_label_array != label_number] = 0
    white_label_array[white_label_array > 0] = 1    
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_array)
    for i in range(np.shape(pial_label_array)[2]):
        pial_label_array[:,:,i] = binary_fill_holes(pial_array[:,:,i])
    pial_label_array = pial_label_array - pial_array
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_flatten = np.ndarray.flatten(pial_label_array)
    pial_label_flatten = pial_label_flatten[pial_label_flatten > 0]
    label_number = Counter(pial_label_flatten).most_common(1)[0][0]
    pial_label_array[pial_label_array != label_number] = 0
    pial_label_array[pial_label_array > 0] = 1    
    pial_label_array = pial_label_array + pial_array # add csf line again
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
       
    """
    make gm
    """
    ribbon_label_array = pial_label_array - white_label_array
    ribbon_label_array[ribbon_label_array != 1] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    
    """
    make rim
    """
    rim_array = np.zeros_like(ribbon_label_array)
    rim_array[ribbon_label_array == 1] = 3
    rim_array[pial_array == 1] = 1
    rim_array[white_array == 1] = 2

    output = nb.Nifti1Image(rim_array, vol.affine, vol.header)
    nb.save(output, os.path.join(path_output,"rim.nii"))    
    
    """
    grow layers using laynii
    """
    os.chdir(pathLAYNII)
    vinc = 40
    os.system("./LN_GROW_LAYERS" + \
              " -rim " + os.path.join(path_output,"rim.nii") + \
              " -vinc " + str(vinc) + \
              " -N " + str(n_layers) + \
              " -threeD" + \
              " -output " + os.path.join(path_output,"layers.nii"))

    """
    tranform label to levelset
    """    
    binary_array = white_label_array + ribbon_label_array + pial_label_array
    binary_array[binary_array != 0] = 1 
    
    layer_array =  nb.load(os.path.join(path_output,"layers.nii")).get_fdata()
    layer_array += 1
    layer_array[layer_array == 1] = 0
    layer_array[white_label_array == 1] = 1 # fill wm
    
    if debug:
        out_debug = nb.Nifti1Image(layer_array, vol.affine, vol.header)
        nb.save(out_debug, os.path.join(path_output,"layer_plus_white_debug.nii"))
    
    level_array = np.zeros(np.append(vol.header["dim"][1:4],n_layers + 1))
    for i in range(n_layers+1):
        print("Probabilty to levelset for layer: "+str(i+1))
        
        temp_layer_array = binary_array.copy()
        temp_layer_array[layer_array > i+1] = 0
        temp_layer = nb.Nifti1Image(temp_layer_array, vol.affine, vol.header)
    
        # write control output
        if debug:
            nb.save(temp_layer, os.path.join(path_output,"layer_"+str(i)+"_debug.nii"))
    
        # transform binary image to levelset image
        res = probability_to_levelset(temp_layer)
        
        # sort levelset image into 4d array
        level_array[:,:,:,i] = res["result"].get_fdata()

    # levelset image
    vol.header["dim"][0] = 4
    vol.header["dim"][4] = n_layers + 1
    levelset = nb.Nifti1Image(level_array, vol.affine, vol.header)
    
    # write niftis
    nb.save(white, os.path.join(path_output,"wm_line.nii"))
    nb.save(pial, os.path.join(path_output,"csf_line.nii"))
    nb.save(white_label,os.path.join(path_output,"wm_label.nii"))
    nb.save(pial_label,os.path.join(path_output,"csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output,"gm_label.nii"))
    nb.save(levelset, os.path.join(path_output,"boundaries.nii"))    
コード例 #3
0
def make_mesh(boundary_in,
              ref_in,
              file_out,
              nlayer,
              flip_faces=False,
              niter_smooth=2,
              niter_upsample=0,
              niter_inflate=15):
    """
    This function generates a surface mesh from a levelset image. The surface mesh is smoothed and a
    curvature file is generated. Vertices are in the vertex ras coordinate system. Optionally, the
    mesh can be upsampled and an inflated version of the mesh can be written out. The hemisphere
    has to be indicated as prefix in the output file. If nlayer is set to -1, a 3D levelset image
    can be used as boundary input file.
    Inputs:
        *boundary_in: filename of 4D levelset image.
        *ref_in: filename of reference volume for getting the coordinate transformation.
        *file_out: filename of output surface.
        *nlayer: layer from the 4D boundary input at which the mesh is generated.
        *flip_faces: reverse normal direction of mesh.
        *niter_smooth: number of smoothing iterations.
        *niter_upsample: number of upsampling iterations (is performed if set > 0).
        *niter_inflate: number of inflating iterations (is performed if set > 0).
    
    created by Daniel Haenelt
    Date created: 18-12-2019
    Last modified: 24-07-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import write_geometry
    from nighres.surface import levelset_to_mesh
    from cortex.polyutils import Surface
    from lib.surface.vox2ras import vox2ras
    from lib.surface.smooth_surface import smooth_surface
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh
    from lib.surface.get_curvature import get_curvature
    from lib.surface import inflate_surf_mesh

    # make output folder
    if not os.path.exists(os.path.dirname(file_out)):
        os.makedirs(os.path.dirname(file_out))

    # get levelset boundary from single layer
    boundary = nb.load(boundary_in)
    boundary.header["dim"][0] = 1
    boundary_array = boundary.get_fdata()

    if nlayer != -1:
        boundary_array = boundary_array[:, :, :, nlayer]

    boundary = nb.Nifti1Image(boundary_array, boundary.affine, boundary.header)

    # make mesh
    surf = levelset_to_mesh(boundary,
                            connectivity="18/6",
                            level=0.0,
                            inclusive=True)

    # get vertices and faces
    vtx = surf["result"]["points"]
    fac = surf["result"]["faces"]

    # get vox2ras transformation
    vox2ras_tkr, _ = vox2ras(ref_in)

    # apply vox2ras to vertices
    vtx = apply_affine(vox2ras_tkr, vtx)

    # flip faces
    if flip_faces:
        fac = np.flip(fac, axis=1)

    # write mesh
    write_geometry(file_out, vtx, fac)

    # smooth surface
    smooth_surface(file_out, file_out, niter_smooth)

    # upsample mesh (optionally)
    if niter_upsample != 0:
        upsample_surf_mesh(file_out, file_out, niter_upsample, "linear")

    # print number of vertices and average edge length
    print("number of vertices: " + str(len(vtx[:, 0])))
    print("average edge length: " + str(Surface(vtx, fac).avg_edge_length))

    # get curvature (looks for hemisphere prefix)
    get_curvature(file_out, os.path.dirname(file_out))

    # inflate surface (optionally)
    if niter_inflate != 0:
        inflate_surf_mesh(file_out, file_out + "_inflated", niter_inflate)
コード例 #4
0
def get_thickness(boundaries_in, ref_in, hemi, path_output, r=[0.4, 0.4, 0.4]):
    """
    This function computes the cortical thickness as euclidean distances between vertex ras
    coordinates from outer levelset boundaries.
    Inputs:
        *boundaries_in: filename of 4D levelset image.
        *ref_in: filename of reference volume for coordinate transformation.
        *hemi: hemisphere.
        *path_output: path where output is written.
        *r: destination voxel size after upsampling (performed if not None).
    
    created by Daniel Haenelt
    Date created: 18-12-2019
    Last modified: 24-07-2020
    """
    import os
    import shutil as sh
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nighres.laminar import profile_sampling
    from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # upsample volume
    if not r == None:
        upsample_volume(ref_in, os.path.join(path_output, "ref.nii"), r, "Cu")
    else:
        sh.copyfile(ref_in, os.path.join(path_output, "ref.nii"))

    # make coordinate mapping
    cmap = generate_coordinate_mapping(boundaries_in, pad=0)
    cmap.header["dim"][0] = 1

    # get voxel to vertex ras coordinate transformation
    vox2ras_tkr, _ = vox2ras(os.path.join(path_output, "ref.nii"))

    # apply transformation to cmap
    ras_array = apply_affine(vox2ras_tkr, cmap.get_fdata())

    # split coordinates into single dimensions
    x_ras = nb.Nifti1Image(ras_array[:, :, :, 0], cmap.affine, cmap.header)
    y_ras = nb.Nifti1Image(ras_array[:, :, :, 1], cmap.affine, cmap.header)
    z_ras = nb.Nifti1Image(ras_array[:, :, :, 2], cmap.affine, cmap.header)

    # get profile sampling
    x_profile = profile_sampling(boundaries_in, x_ras)
    y_profile = profile_sampling(boundaries_in, y_ras)
    z_profile = profile_sampling(boundaries_in, z_ras)

    # compute euclidean distance between outer levelset boundaries
    x_array = x_profile["result"].get_fdata()
    y_array = y_profile["result"].get_fdata()
    z_array = z_profile["result"].get_fdata()

    x_diff_array = np.square(x_array[:, :, :, -1] - x_array[:, :, :, 0])
    y_diff_array = np.square(y_array[:, :, :, -1] - y_array[:, :, :, 0])
    z_diff_array = np.square(z_array[:, :, :, -1] - z_array[:, :, :, 0])

    r = np.sqrt(x_diff_array + y_diff_array + z_diff_array)

    # set unrealistic values to zero
    r[r > 10] = 0

    # hemi suffix
    if hemi == "lh":
        hemi_suffix = "_left"
    elif hemi == "rh":
        hemi_suffix = "_right"
    else:
        hemi_suffix = None

    # write nifti
    output = nb.Nifti1Image(r, cmap.affine, cmap.header)
    nb.save(output,
            os.path.join(path_output, "thickness" + hemi_suffix + ".nii"))

    # remove temporary reference volume
    os.remove(os.path.join(path_output, "ref.nii"))
コード例 #5
0
def calculate_equivolumetric_epi2(input_white,
                                  input_pial,
                                  input_vol,
                                  path_output,
                                  n_layers,
                                  r=[0.4, 0.4, 0.4],
                                  n_iter=2):
    """
    This function computes equivolumetric layers in volume space from input pial and white surfaces
    in freesurfer format. The input surfaces do not have to cover the whole brain. Number of 
    vertices and indices do not have to correspond between surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_layers: number of generated layers + 1.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
    
    created by Daniel Haenelt
    Date created: 17-12-2019
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from nighres.laminar import volumetric_layering
    from scipy.ndimage.morphology import binary_fill_holes
    from collections import Counter
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" and not hemi == "rh":
        sys.exit("Could not identify hemi from filename!")

    # new filenames in output folder
    res_white = os.path.join(path_output, hemi + ".white")
    res_pial = os.path.join(path_output, hemi + ".pial")
    res_vol = os.path.join(path_output, "epi_upsampled.nii")

    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")

    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)

    # load surface
    vtx_white, fac_white = read_geometry(res_white)
    vtx_pial, _ = read_geometry(res_pial)

    # load volume
    vol = nb.load(res_vol)

    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)

    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:, 0], vtx_white[:, 1], vtx_white[:, 2]] = 1
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)

    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:, 0], vtx_pial[:, 1], vtx_pial[:, 2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)
    """
    make wm
    """
    white_label_array = np.zeros_like(white_array)
    for i in range(np.shape(white_label_array)[2]):
        white_label_array[:, :, i] = binary_fill_holes(white_array[:, :, i])
    white_label_array = white_label_array - white_array
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_flatten = np.ndarray.flatten(white_label_array)
    white_label_flatten = white_label_flatten[white_label_flatten > 0]
    label_number = Counter(white_label_flatten).most_common(1)[0][0]
    white_label_array[white_label_array != label_number] = 0
    white_label_array[white_label_array > 0] = 1
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_array)
    for i in range(np.shape(pial_label_array)[2]):
        pial_label_array[:, :, i] = binary_fill_holes(pial_array[:, :, i])
    pial_label_array = pial_label_array - pial_array
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_flatten = np.ndarray.flatten(pial_label_array)
    pial_label_flatten = pial_label_flatten[pial_label_flatten > 0]
    label_number = Counter(pial_label_flatten).most_common(1)[0][0]
    pial_label_array[pial_label_array != label_number] = 0
    pial_label_array[pial_label_array > 0] = 1
    #pial_label_array = pial_label_array + pial_array # add csf line again (worsen layering)
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
    """
    make gm
    """
    ribbon_label_array = pial_label_array - white_label_array
    ribbon_label_array[ribbon_label_array != 1] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    """
    layers
    """
    csf_level = probability_to_levelset(pial_label)
    wm_level = probability_to_levelset(white_label)

    volumetric_layering(wm_level["result"],
                        csf_level["result"],
                        n_layers=n_layers,
                        topology_lut_dir=None,
                        save_data=True,
                        overwrite=True,
                        output_dir=path_output,
                        file_name="epi")

    # write niftis
    nb.save(white, os.path.join(path_output, "wm_line.nii"))
    nb.save(pial, os.path.join(path_output, "csf_line.nii"))
    nb.save(white_label, os.path.join(path_output, "wm_label.nii"))
    nb.save(pial_label, os.path.join(path_output, "csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output, "gm_label.nii"))
コード例 #6
0
ファイル: deform_surface.py プロジェクト: IamPolina/scripts
def deform_surface(input_surf,
                   input_orig,
                   input_deform,
                   input_target,
                   hemi,
                   path_output,
                   input_mask=None,
                   interp_method="nearest",
                   smooth_iter=0,
                   flip_faces=False,
                   cleanup=True):
    """
    This function deforms a surface mesh in freesurfer convention using a coordinate map containing
    voxel coordinates. The computation takes quite a while because in the case of removed vertices,
    i.e. if a mask is given as input, the remaining faces are reindexed.
    Inputs:
        *input_surf: surface mesh to be transformed.
        *input_orig: freesurfer orig.mgz.
        *input_deform: deformation (coordinate mapping).
        *input_target: target volume.
        *hemi: hemisphere.
        *path_output: path where to save output.
        *input_mask: mask volume.
        *interp_method: interpolation method (nearest or trilinear).
        *smooth_iter: number of smoothing iterations applied to final image (if set > 0).
        *flip_faces: reverse normal direction of mesh.
        *cleanup: remove intermediate files.
        
    created by Daniel Haenelt
    Date created: 06-02-2019          
    Last modified: 20-06-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    import shutil as sh
    from nibabel.freesurfer.io import write_geometry, read_geometry
    from nibabel.affines import apply_affine
    from nipype.interfaces.freesurfer import SampleToSurface
    from nipype.interfaces.freesurfer import SmoothTessellation
    from lib.io.get_filename import get_filename
    from lib.io.mgh2nii import mgh2nii
    from lib.surface.vox2ras import vox2ras

    # set freesurfer path environment
    os.environ["SUBJECTS_DIR"] = path_output

    # freesurfer subject
    tmp = np.random.randint(0, 10, 5)
    tmp_string = ''.join(str(i) for i in tmp)
    sub = "tmp_" + tmp_string

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # mimic freesurfer folder structure (with some additional folder for intermediate files)
    path_sub = os.path.join(path_output, sub)
    path_mri = os.path.join(path_sub, "mri")
    path_surf = os.path.join(path_sub, "surf")

    os.makedirs(path_sub)
    os.makedirs(path_mri)
    os.makedirs(path_surf)

    # get file extension of orig
    _, name_orig, ext_orig = get_filename(input_orig)

    # name of surface file
    name_surf = os.path.basename(input_surf)

    # copy orig, cmap and input surface to mimicked freesurfer folders
    sh.copyfile(input_surf, os.path.join(path_surf, hemi + ".source"))
    if ext_orig != ".mgz":
        mgh2nii(input_orig, path_mri, "mgz")
        os.rename(os.path.join(path_mri, name_orig + ".mgz"),
                  os.path.join(path_mri, "orig.mgz"))
    else:
        sh.copyfile(input_orig, os.path.join(path_mri, "orig.mgz"))

    # read surface geometry
    vtx, fac = read_geometry(input_surf)

    # get affine vox2ras-tkr transformation to target volume
    vox2ras_tkr, _ = vox2ras(input_target)

    # divide coordinate mapping into its x, y and z components
    cmap_img = nb.load(input_deform)
    cmap_img.header["dim"][0] = 3
    cmap_img.header["dim"][4] = 1

    # apply vox2ras transformation to coordinate mappings
    cmap_array = cmap_img.get_fdata()
    cmap_array = apply_affine(vox2ras_tkr, cmap_array)

    components = ["x", "y", "z"]
    vtx_new = np.zeros([len(vtx), 3])
    for i in range(len(components)):
        temp_array = cmap_array[:, :, :, i]
        temp_img = nb.Nifti1Image(temp_array, cmap_img.affine, cmap_img.header)
        nb.save(temp_img, os.path.join(path_mri,
                                       components[i] + "_deform.nii"))

        # mri_vol2surf
        sampler = SampleToSurface()
        sampler.inputs.subject_id = sub
        sampler.inputs.reg_header = True
        sampler.inputs.hemi = hemi
        sampler.inputs.source_file = os.path.join(
            path_mri, components[i] + "_deform.nii")
        sampler.inputs.surface = "source"
        sampler.inputs.sampling_method = "point"
        sampler.inputs.sampling_range = 0
        sampler.inputs.sampling_units = "mm"
        sampler.inputs.interp_method = interp_method
        sampler.inputs.out_type = "mgh"
        sampler.inputs.out_file = os.path.join(
            path_surf, hemi + "." + components[i] + "_sampled.mgh")
        sampler.run()

        data_img = nb.load(
            os.path.join(path_surf,
                         hemi + "." + components[i] + "_sampled.mgh"))
        vtx_new[:, i] = np.squeeze(data_img.get_fdata())

    if input_mask:

        # mri_vol2surf (background)
        sampler = SampleToSurface()
        sampler.inputs.subject_id = sub
        sampler.inputs.reg_header = True
        sampler.inputs.hemi = hemi
        sampler.inputs.source_file = input_mask
        sampler.inputs.surface = "source"
        sampler.inputs.sampling_method = "point"
        sampler.inputs.sampling_range = 0
        sampler.inputs.sampling_units = "mm"
        sampler.inputs.interp_method = "nearest"
        sampler.inputs.out_type = "mgh"
        sampler.inputs.out_file = os.path.join(path_surf,
                                               hemi + ".background.mgh")
        sampler.run()

        # get new indices
        background_list = nb.load(
            os.path.join(path_surf, hemi + ".background.mgh")).get_fdata()
        background_list = np.squeeze(background_list).astype(int)

        # only keep vertex indices within the slab
        ind_keep = np.arange(0, len(vtx[:, 0]))
        ind_keep[background_list == 0] = -1
        ind_keep = ind_keep[ind_keep != -1]

        # get new vertices
        vtx_new = vtx_new[ind_keep, :]

        # get new faces
        fac_keep = np.zeros(len(fac[:, 0]))
        fac_keep += np.in1d(fac[:, 0], ind_keep)
        fac_keep += np.in1d(fac[:, 1], ind_keep)
        fac_keep += np.in1d(fac[:, 2], ind_keep)
        fac_temp = fac[fac_keep == 3, :]
        fac_new = fac[fac_keep == 3, :]

        # sort new faces
        c_step = 0
        n_step = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
        for i in range(len(ind_keep)):
            temp = np.where(ind_keep[i] == fac_temp)
            fac_new[temp] = i

            # print status
            counter = np.floor(i / len(ind_keep) * 100).astype(int)
            if counter == n_step[c_step]:
                print("sort faces: " + str(counter) + " %")
                c_step += 1

        # remove singularities (vertices without faces)
        fac_counter = 0
        fac_old = fac_new.copy()
        n_singularity = np.zeros(len(vtx_new))
        c_step = 0
        for i in range(len(vtx_new)):
            row, col = np.where(fac_old == i)

            n_singularity[i] = len(row)
            if not n_singularity[i]:
                fac_temp = fac_new.copy()
                fac_temp[fac_temp >= fac_counter] = -1
                fac_temp[fac_temp != -1] = 0
                fac_new += fac_temp
                fac_counter -= 1

            # update face counter
            fac_counter += 1

            # print status
            counter = np.floor(i / len(vtx_new) * 100).astype(int)
            if counter == n_step[c_step]:
                print("clean vertices: " + str(counter) + " %")
                c_step += 1

        # vertices and indices without singularities
        vtx_new = vtx_new[n_singularity != 0]
        ind_keep = ind_keep[n_singularity != 0]

        # save index mapping between original and transformed surface
        np.savetxt(os.path.join(path_output, name_surf + "_ind.txt"),
                   ind_keep,
                   fmt='%d')
    else:
        fac_new = fac

    # flip faces
    if flip_faces:
        fac_new = np.flip(fac_new, axis=1)

    # write new surface
    write_geometry(os.path.join(path_output, name_surf + "_def"), vtx_new,
                   fac_new)

    # smooth surface
    if smooth_iter:
        smooth = SmoothTessellation()
        smooth.inputs.in_file = os.path.join(path_output, name_surf + "_def")
        smooth.inputs.out_file = os.path.join(path_output,
                                              name_surf + "_def_smooth")
        smooth.inputs.smoothing_iterations = smooth_iter
        smooth.inputs.disable_estimates = True
        smooth.run()

    # delete intermediate files
    if cleanup:
        sh.rmtree(path_sub, ignore_errors=True)
コード例 #7
0
output_surf_vox = "boundaries_out_vox.mat"
output_cmap = "cmap_rBBR.nii"

lh_white = join(path_input, "lh.white")
rh_white = join(path_input, "rh.white")
lh_pial = join(path_input, "lh.pial")
rh_pial = join(path_input, "rh.pial")
ref_vol = join(path_input, "reference_volume.nii")
in_surf_mat_ras = join(path_input, input_surf_ras)
in_surf_mat_vox = join(path_input, input_surf_vox)
out_surf_mat_ras = join(subjects_dir, output_surf_ras)
out_surf_mat_vox = join(subjects_dir, output_surf_vox)
input_cfg = join(path_input, "cfg.mat")

# get affine vox2ras-tkr and ras2vox-tkr transformation to reference volume
vox2ras_tkr, ras2vox_tkr = vox2ras(ref_vol)

# surf2mat
cwd = os.getcwd()
os.chdir(join(pathLIB, "io"))
os.system("matlab" + \
          " -nodisplay -nodesktop -r " + \
          "\"surf2mat(\'{0}\', \'{1}\', \'{2}\', \'{3}\', \'{4}\'); exit;\"". \
          format(lh_white, rh_white, lh_pial, rh_pial, in_surf_mat_ras))
os.chdir(cwd)

# ras2vox
data = loadmat(in_surf_mat_ras)

# apply ras2vox transformation to vertices
for i in range(2):
コード例 #8
0
def calculate_equivolumetric_epi(input_white,
                                 input_pial,
                                 input_vol,
                                 path_output,
                                 n_start,
                                 n_end,
                                 n_layers,
                                 r=[0.4, 0.4, 0.4],
                                 n_iter=2):
    """
    This function computes equivolumetric layers in volume space from input pial and white surfaces
    in freesurfer format. The input surfaces do not have to cover the whole brain. Number of 
    vertices and indices do not have to correspond between surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_start: number of slices (axis=2) to discard at the beginning of the upsampled volume.
        *n_end: number of slices (axis=2) to discard at the end of the upsampled volume.
        *n_layers: number of generated layers + 1.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
    
    created by Daniel Haenelt
    Date created: 17-12-2019
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from nighres.laminar import volumetric_layering
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" or hemi == "rh":
        sys.exit("Could not identify hemi from filename!")

    # new filenames in output folder
    res_white = os.path.join(path_output, hemi + ".white")
    res_pial = os.path.join(path_output, hemi + ".pial")
    res_vol = os.path.join(path_output, "epi_upsampled.nii")

    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")

    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)

    # load surface
    vtx_white, fac_white = read_geometry(res_white)
    vtx_pial, _ = read_geometry(res_pial)

    # load volume
    vol = nb.load(res_vol)

    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)

    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:, 0], vtx_white[:, 1], vtx_white[:, 2]] = 1
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)

    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:, 0], vtx_pial[:, 1], vtx_pial[:, 2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)

    # lines to levelset
    white_level = probability_to_levelset(white)
    white_level_array = white_level["result"].get_fdata()

    pial_level = probability_to_levelset(pial)
    pial_level_array = pial_level["result"].get_fdata()
    """
    make wm
    """
    white_label_array = np.zeros_like(white_level_array)
    white_label_array[white_level_array > pial_level_array] = 1
    white_label_array[white_label_array != 1] = 0
    white_label_array -= 1
    white_label_array = np.abs(white_label_array).astype(int)
    white_label_array = white_label_array - white_array
    white_label_array[white_label_array < 0] = 0
    if n_start > 0:
        white_label_array[:, :, :n_start] = 0
    if n_end > 0:
        white_label_array[:, :, -n_end:] = 0
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_array[white_label_array == 1] = 0
    white_label_array[white_label_array > 0] = 1
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_level_array)
    pial_label_array[pial_level_array < white_level_array] = 1
    pial_label_array[pial_label_array != 1] = 0
    pial_label_array = np.abs(pial_label_array).astype(int)
    pial_label_array = pial_label_array - pial_array
    pial_label_array[pial_label_array < 0] = 0
    if n_start > 0:
        pial_label_array[:, :, :n_start] = 0
    if n_end > 0:
        pial_label_array[:, :, -n_end:] = 0
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_array[pial_label_array != 1] = 0
    pial_label_array -= 1
    pial_label_array = np.abs(pial_label_array)
    pial_label_array[pial_array == 1] = 0
    if n_start > 0:
        pial_label_array[:, :, :n_start] = 0
    if n_end > 0:
        pial_label_array[:, :, -n_end:] = 0
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
    """
    make gm
    """
    ribbon_label_array = pial_label_array.copy()
    ribbon_label_array -= 1
    ribbon_label_array = np.abs(ribbon_label_array)
    ribbon_label_array = ribbon_label_array + white_label_array
    ribbon_label_array -= 1
    ribbon_label_array = np.abs(ribbon_label_array).astype(int)
    if n_start > 0:
        ribbon_label_array[:, :, :n_start] = 0
    if n_end > 0:
        ribbon_label_array[:, :, -n_end:] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    """
    layers
    """
    #csf_level = probability_to_levelset(pial_label)
    #wm_level = probability_to_levelset(white_label)

    #volumetric_layering(wm_level["result"],
    #                    csf_level["result"],
    #                    n_layers=n_layers,
    #                    topology_lut_dir=None,
    #                    save_data=True,
    #                    overwrite=True,
    #                    output_dir=path_output,
    #                    file_name="epi")

    # write niftis
    nb.save(white, os.path.join(path_output, "wm_line.nii"))
    nb.save(pial, os.path.join(path_output, "csf_line.nii"))
    nb.save(white_label, os.path.join(path_output, "wm_label.nii"))
    nb.save(pial_label, os.path.join(path_output, "csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output, "gm_label.nii"))