def mesh_sampling_layer(surf_in,
                        file_in,
                        boundaries_in,
                        path_output,
                        layer,
                        r=[0.4, 0.4, 0.4],
                        interpolation="Cu",
                        average_layer=False,
                        write_profile=True,
                        write_upsampled=True):
    """
    This function samples data from an image volume to a surface mesh from specific layers defined 
    by a levelset image. If average_layer is true, the parameter layer should contain only two 
    integers which denote the start and ending layer.
    Inputs:
        *surf_in: filename of input surface mesh.
        *file_in: filename of input volume from which data is sampled.
        *boundaries_in: filename of 4D levelset image.
        *path_output: path where output is written.
        *layer: which layers to sample (array of integers).
        *r: destination voxel size after upsampling (performed if not None).
        *interpolation: interpolation method for upsampling of file from whic data is sampled.
        *average_layer: average across cortex.
        *write_profile: write sampled profile.
        *write_upsampled: write upsampled file.
    
    created by Daniel Haenelt
    Date created: 18-12-2019
    Last modified: 24-07-2020
    """
    import os
    import sys
    import shutil as sh
    import numpy as np
    import nibabel as nb
    from os.path import join, exists, basename, splitext
    from nighres.laminar import profile_sampling
    from lib.io.get_filename import get_filename
    from lib.utils.upsample_volume import upsample_volume
    from lib.mapping import map2surface

    # make output folder
    if not exists(path_output):
        os.makedirs(path_output)

    # filenames
    _, name_file, ext_file = get_filename(file_in)
    _, hemi, name_surf = get_filename(surf_in)

    name_surf = name_surf[1:]
    name_profile = splitext(basename(file_in))[0] + "_profile"

    # check hemi
    if not hemi == "lh" and not hemi == "rh":
        sys.exit("Could not identify hemi from filename!")

    # upsample volume
    if not r == None:
        name_file = name_file + "_upsampled"
        upsample_volume(file_in, join(path_output, name_file + ext_file), r,
                        interpolation)
    else:
        if file_in != join(path_output, name_file + ext_file):
            sh.copyfile(file_in, join(path_output, name_file + ext_file))

    # get profile sampling
    tmp = np.random.randint(0, 10, 5)
    tmp_string = ''.join(str(i) for i in tmp)
    profile = profile_sampling(boundaries_in,
                               join(path_output, name_file + ext_file),
                               save_data=write_profile,
                               overwrite=write_profile,
                               output_dir=path_output,
                               file_name="profile_" + tmp_string)

    # rename profile sampling output
    if write_profile:
        os.rename(
            join(path_output, "profile_" + tmp_string + "_lps-data.nii.gz"),
            join(path_output, name_profile + ".nii.gz"))

    # load profile
    if write_profile:
        data = nb.load(join(path_output, name_profile + ".nii.gz"))
    else:
        data = profile["result"]
    data.header["dim"][0] = 3

    # do mapping
    tmp2 = np.random.randint(0, 10, 5)
    tmp2_string = ''.join(str(i) for i in tmp2)
    if not average_layer:

        for i in range(len(layer)):
            data_array = data.get_fdata()[:, :, :, layer[i]]
            out = nb.Nifti1Image(data_array, data.affine, data.header)
            nb.save(out, join(path_output, "temp_" + tmp2_string + ".nii"))

            # do the mapping
            map2surface(surf_in,
                        join(path_output, "temp_" + tmp2_string + ".nii"),
                        hemi,
                        path_output,
                        input_white=None,
                        input_ind=None,
                        cleanup=True)

            # rename mapping file
            os.rename(
                join(
                    path_output, hemi + ".temp_" + tmp2_string + "_" +
                    name_surf + "_def.mgh"),
                join(
                    path_output, hemi + "." + name_file + "_layer" +
                    str(layer[i]) + ".mgh"))

    else:

        if len(layer) != 2:
            sys.exit("For averaging, layer should only contain two elements!")

        data_array = data.get_fdata()[:, :, :, layer[0]:layer[1]]
        data_array = np.mean(data_array, axis=3)
        out = nb.Nifti1Image(data_array, data.affine, data.header)
        nb.save(out, join(path_output, "temp_" + tmp2_string + ".nii"))

        # do the mapping
        map2surface(surf_in,
                    join(path_output, "temp_" + tmp2_string + ".nii"),
                    hemi,
                    path_output,
                    input_white=None,
                    input_ind=None,
                    cleanup=True)

        # rename mapping file
        os.rename(
            join(path_output,
                 hemi + ".temp_" + tmp2_string + "_" + name_surf + "_def.mgh"),
            join(
                path_output, hemi + "." + name_file + "_avg_layer" +
                str(layer[0]) + "_" + str(layer[1]) + ".mgh"))

    # clean temp
    os.remove(join(path_output, "temp_" + tmp2_string + ".nii"))

    # clean file
    if not write_upsampled:
        os.remove(join(path_output, name_file + ext_file))
Exemple #2
0
def get_thickness(boundaries_in, ref_in, hemi, path_output, r=[0.4, 0.4, 0.4]):
    """
    This function computes the cortical thickness as euclidean distances between vertex ras
    coordinates from outer levelset boundaries.
    Inputs:
        *boundaries_in: filename of 4D levelset image.
        *ref_in: filename of reference volume for coordinate transformation.
        *hemi: hemisphere.
        *path_output: path where output is written.
        *r: destination voxel size after upsampling (performed if not None).
    
    created by Daniel Haenelt
    Date created: 18-12-2019
    Last modified: 24-07-2020
    """
    import os
    import shutil as sh
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nighres.laminar import profile_sampling
    from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # upsample volume
    if not r == None:
        upsample_volume(ref_in, os.path.join(path_output, "ref.nii"), r, "Cu")
    else:
        sh.copyfile(ref_in, os.path.join(path_output, "ref.nii"))

    # make coordinate mapping
    cmap = generate_coordinate_mapping(boundaries_in, pad=0)
    cmap.header["dim"][0] = 1

    # get voxel to vertex ras coordinate transformation
    vox2ras_tkr, _ = vox2ras(os.path.join(path_output, "ref.nii"))

    # apply transformation to cmap
    ras_array = apply_affine(vox2ras_tkr, cmap.get_fdata())

    # split coordinates into single dimensions
    x_ras = nb.Nifti1Image(ras_array[:, :, :, 0], cmap.affine, cmap.header)
    y_ras = nb.Nifti1Image(ras_array[:, :, :, 1], cmap.affine, cmap.header)
    z_ras = nb.Nifti1Image(ras_array[:, :, :, 2], cmap.affine, cmap.header)

    # get profile sampling
    x_profile = profile_sampling(boundaries_in, x_ras)
    y_profile = profile_sampling(boundaries_in, y_ras)
    z_profile = profile_sampling(boundaries_in, z_ras)

    # compute euclidean distance between outer levelset boundaries
    x_array = x_profile["result"].get_fdata()
    y_array = y_profile["result"].get_fdata()
    z_array = z_profile["result"].get_fdata()

    x_diff_array = np.square(x_array[:, :, :, -1] - x_array[:, :, :, 0])
    y_diff_array = np.square(y_array[:, :, :, -1] - y_array[:, :, :, 0])
    z_diff_array = np.square(z_array[:, :, :, -1] - z_array[:, :, :, 0])

    r = np.sqrt(x_diff_array + y_diff_array + z_diff_array)

    # set unrealistic values to zero
    r[r > 10] = 0

    # hemi suffix
    if hemi == "lh":
        hemi_suffix = "_left"
    elif hemi == "rh":
        hemi_suffix = "_right"
    else:
        hemi_suffix = None

    # write nifti
    output = nb.Nifti1Image(r, cmap.affine, cmap.header)
    nb.save(output,
            os.path.join(path_output, "thickness" + hemi_suffix + ".nii"))

    # remove temporary reference volume
    os.remove(os.path.join(path_output, "ref.nii"))
Exemple #3
0
def calculate_equidistant_epi(input_white, input_pial, input_vol, path_output, n_layers, pathLAYNII,
                              r=[0.4,0.4,0.4], n_iter=2, debug=False):
    """
    This function computes equidistant layers in volume space from input pial and white surfaces
    in freesurfer format using the laynii function LN_GROW_LAYERS. The input surfaces do not have 
    to cover the whole brain. Number of vertices and indices do not have to correspond between 
    surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_layers: number of generated layers + 1.
        *pathLAYNII: path to laynii folder.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
        *debug: write out some intermediate files (boolean).
    
    created by Daniel Haenelt
    Date created: 31-05-2020
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from scipy.ndimage.morphology import binary_fill_holes
    from collections import Counter
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh
    
    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)
    
    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" and not hemi == "rh":
        sys.exit("Could not identify hemi from filename!")
    
    # new filenames in output folder
    res_white = os.path.join(path_output,hemi+".white")
    res_pial = os.path.join(path_output,hemi+".pial")
    res_vol = os.path.join(path_output,"epi_upsampled.nii")
    
    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")    
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")
    
    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)
    
    # load surface
    vtx_white, _ = read_geometry(res_white) 
    vtx_pial, _ = read_geometry(res_pial)
    
    # load volume
    vol = nb.load(res_vol)
    
    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)
    
    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:,0],vtx_white[:,1],vtx_white[:,2]] = 1   
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)
    
    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:,0],vtx_pial[:,1],vtx_pial[:,2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)
    
    """
    make wm
    """
    white_label_array = np.zeros_like(white_array)
    for i in range(np.shape(white_label_array)[2]):
        white_label_array[:,:,i] = binary_fill_holes(white_array[:,:,i])
    white_label_array = white_label_array - white_array
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_flatten = np.ndarray.flatten(white_label_array)
    white_label_flatten = white_label_flatten[white_label_flatten > 0]
    label_number = Counter(white_label_flatten).most_common(1)[0][0]
    white_label_array[white_label_array != label_number] = 0
    white_label_array[white_label_array > 0] = 1    
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_array)
    for i in range(np.shape(pial_label_array)[2]):
        pial_label_array[:,:,i] = binary_fill_holes(pial_array[:,:,i])
    pial_label_array = pial_label_array - pial_array
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_flatten = np.ndarray.flatten(pial_label_array)
    pial_label_flatten = pial_label_flatten[pial_label_flatten > 0]
    label_number = Counter(pial_label_flatten).most_common(1)[0][0]
    pial_label_array[pial_label_array != label_number] = 0
    pial_label_array[pial_label_array > 0] = 1    
    pial_label_array = pial_label_array + pial_array # add csf line again
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
       
    """
    make gm
    """
    ribbon_label_array = pial_label_array - white_label_array
    ribbon_label_array[ribbon_label_array != 1] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    
    """
    make rim
    """
    rim_array = np.zeros_like(ribbon_label_array)
    rim_array[ribbon_label_array == 1] = 3
    rim_array[pial_array == 1] = 1
    rim_array[white_array == 1] = 2

    output = nb.Nifti1Image(rim_array, vol.affine, vol.header)
    nb.save(output, os.path.join(path_output,"rim.nii"))    
    
    """
    grow layers using laynii
    """
    os.chdir(pathLAYNII)
    vinc = 40
    os.system("./LN_GROW_LAYERS" + \
              " -rim " + os.path.join(path_output,"rim.nii") + \
              " -vinc " + str(vinc) + \
              " -N " + str(n_layers) + \
              " -threeD" + \
              " -output " + os.path.join(path_output,"layers.nii"))

    """
    tranform label to levelset
    """    
    binary_array = white_label_array + ribbon_label_array + pial_label_array
    binary_array[binary_array != 0] = 1 
    
    layer_array =  nb.load(os.path.join(path_output,"layers.nii")).get_fdata()
    layer_array += 1
    layer_array[layer_array == 1] = 0
    layer_array[white_label_array == 1] = 1 # fill wm
    
    if debug:
        out_debug = nb.Nifti1Image(layer_array, vol.affine, vol.header)
        nb.save(out_debug, os.path.join(path_output,"layer_plus_white_debug.nii"))
    
    level_array = np.zeros(np.append(vol.header["dim"][1:4],n_layers + 1))
    for i in range(n_layers+1):
        print("Probabilty to levelset for layer: "+str(i+1))
        
        temp_layer_array = binary_array.copy()
        temp_layer_array[layer_array > i+1] = 0
        temp_layer = nb.Nifti1Image(temp_layer_array, vol.affine, vol.header)
    
        # write control output
        if debug:
            nb.save(temp_layer, os.path.join(path_output,"layer_"+str(i)+"_debug.nii"))
    
        # transform binary image to levelset image
        res = probability_to_levelset(temp_layer)
        
        # sort levelset image into 4d array
        level_array[:,:,:,i] = res["result"].get_fdata()

    # levelset image
    vol.header["dim"][0] = 4
    vol.header["dim"][4] = n_layers + 1
    levelset = nb.Nifti1Image(level_array, vol.affine, vol.header)
    
    # write niftis
    nb.save(white, os.path.join(path_output,"wm_line.nii"))
    nb.save(pial, os.path.join(path_output,"csf_line.nii"))
    nb.save(white_label,os.path.join(path_output,"wm_label.nii"))
    nb.save(pial_label,os.path.join(path_output,"csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output,"gm_label.nii"))
    nb.save(levelset, os.path.join(path_output,"boundaries.nii"))    
def calculate_equivolumetric_epi2(input_white,
                                  input_pial,
                                  input_vol,
                                  path_output,
                                  n_layers,
                                  r=[0.4, 0.4, 0.4],
                                  n_iter=2):
    """
    This function computes equivolumetric layers in volume space from input pial and white surfaces
    in freesurfer format. The input surfaces do not have to cover the whole brain. Number of 
    vertices and indices do not have to correspond between surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_layers: number of generated layers + 1.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
    
    created by Daniel Haenelt
    Date created: 17-12-2019
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from nighres.laminar import volumetric_layering
    from scipy.ndimage.morphology import binary_fill_holes
    from collections import Counter
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" and not hemi == "rh":
        sys.exit("Could not identify hemi from filename!")

    # new filenames in output folder
    res_white = os.path.join(path_output, hemi + ".white")
    res_pial = os.path.join(path_output, hemi + ".pial")
    res_vol = os.path.join(path_output, "epi_upsampled.nii")

    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")

    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)

    # load surface
    vtx_white, fac_white = read_geometry(res_white)
    vtx_pial, _ = read_geometry(res_pial)

    # load volume
    vol = nb.load(res_vol)

    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)

    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:, 0], vtx_white[:, 1], vtx_white[:, 2]] = 1
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)

    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:, 0], vtx_pial[:, 1], vtx_pial[:, 2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)
    """
    make wm
    """
    white_label_array = np.zeros_like(white_array)
    for i in range(np.shape(white_label_array)[2]):
        white_label_array[:, :, i] = binary_fill_holes(white_array[:, :, i])
    white_label_array = white_label_array - white_array
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_flatten = np.ndarray.flatten(white_label_array)
    white_label_flatten = white_label_flatten[white_label_flatten > 0]
    label_number = Counter(white_label_flatten).most_common(1)[0][0]
    white_label_array[white_label_array != label_number] = 0
    white_label_array[white_label_array > 0] = 1
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_array)
    for i in range(np.shape(pial_label_array)[2]):
        pial_label_array[:, :, i] = binary_fill_holes(pial_array[:, :, i])
    pial_label_array = pial_label_array - pial_array
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_flatten = np.ndarray.flatten(pial_label_array)
    pial_label_flatten = pial_label_flatten[pial_label_flatten > 0]
    label_number = Counter(pial_label_flatten).most_common(1)[0][0]
    pial_label_array[pial_label_array != label_number] = 0
    pial_label_array[pial_label_array > 0] = 1
    #pial_label_array = pial_label_array + pial_array # add csf line again (worsen layering)
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
    """
    make gm
    """
    ribbon_label_array = pial_label_array - white_label_array
    ribbon_label_array[ribbon_label_array != 1] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    """
    layers
    """
    csf_level = probability_to_levelset(pial_label)
    wm_level = probability_to_levelset(white_label)

    volumetric_layering(wm_level["result"],
                        csf_level["result"],
                        n_layers=n_layers,
                        topology_lut_dir=None,
                        save_data=True,
                        overwrite=True,
                        output_dir=path_output,
                        file_name="epi")

    # write niftis
    nb.save(white, os.path.join(path_output, "wm_line.nii"))
    nb.save(pial, os.path.join(path_output, "csf_line.nii"))
    nb.save(white_label, os.path.join(path_output, "wm_label.nii"))
    nb.save(pial_label, os.path.join(path_output, "csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output, "gm_label.nii"))
def apply_registration(file_in,
                       cmap_in,
                       file_out,
                       interpolation="linear",
                       r=[0.4, 0.4, 0.4]):
    """
    This function applies a coordinate mapping to a volume. Optionally, the voxel size of the output
    volume can be changed. This is achieved by adjusting the coordinate mapping to the new voxel 
    size before application.
    Inputs:
        *file_in: filename of input volume.
        *cmap_in: filename of coordinate mapping.
        *file_out: filename of output volume.
        *interpolation: interpolation type (linear or nearest).
        *r: destination voxel size after upsampling (performed if not None).
    Outputs:
        *nibabel object instance of transformed input.
    
    created by Daniel Haenelt
    Date created: 30-05-2020
    Last modified: 02-06-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    from nighres.registration import apply_coordinate_mappings
    from lib.io.get_filename import get_filename
    from lib.utils.upsample_volume import upsample_volume

    # make output folder
    path_output = os.path.dirname(file_out)
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # filename for temporary cmap copy
    _, _, ext_cmap = get_filename(cmap_in)
    tmp = np.random.randint(0, 10, 5)
    tmp_string = ''.join(str(i) for i in tmp)
    file_tmp = os.path.join(path_output, "tmp_" + tmp_string + ext_cmap)
    file_tmp2 = os.path.join(path_output, "tmp2_" + tmp_string + ext_cmap)

    # adjust coordinate mapping
    if r:

        # upsample cmap
        upsample_volume(cmap_in, file_tmp, dxyz=r, rmode="Linear")
        upsample_volume(cmap_in, file_tmp2, dxyz=r, rmode="NN")

        # mask upsampled cmap
        cmap = nb.load(file_tmp)
        mask = nb.load(file_tmp2)

        cmap_array = cmap.get_fdata()
        mask_array = mask.get_fdata()

        mask_array = np.sum(mask_array, axis=3)
        mask_array[mask_array != 0] = 1

        cmap_array[:, :, :, 0][mask_array == 0] = 0
        cmap_array[:, :, :, 1][mask_array == 0] = 0
        cmap_array[:, :, :, 2][mask_array == 0] = 0

        cmap = nb.Nifti1Image(cmap_array, cmap.affine, cmap.header)

    else:

        cmap = nb.load(cmap_in)

    # apply coordinate mapping
    res = apply_coordinate_mappings(
        image=file_in,  # input 
        mapping1=cmap,  # cmap
        interpolation=interpolation,  # nearest or linear
        padding="zero",  # closest, zero or max
        save_data=False,  # save output data to file (boolean)
        overwrite=False,  # overwrite existing results (boolean)
        output_dir=None,  # output directory
        file_name=None,  # base name with file extension for output
    )

    # write output
    nb.save(res["result"], file_out)

    # remove temporary files
    if r:
        os.remove(file_tmp)
        os.remove(file_tmp2)

    return res["result"]
def mesh_sampling_layer_other(surf_in,
                              file_in,
                              target2source_in,
                              source2target_in,
                              boundaries_in,
                              path_output,
                              layer,
                              smooth_iter=0,
                              r=[0.4, 0.4, 0.4],
                              interpolation="Cu",
                              average_layer=False,
                              write_profile=False,
                              write_upsampled=True,
                              cleanup=True):
    """
    This function samples data from an image volume to a surface mesh which is located in a 
    different space. Boundaries and surface mesh are first transformed to the space of the image 
    volume using coordinate mappings before data sampling. If average_layer is true, the parameter 
    layer should contain only two integers which denote the start and ending layer. The basename
    of the surface file should have no file extension and the hemisphere should be stated as prefix.
    Inputs:
        *surf_in: filename of input surface mesh.
        *file_in: filename of input volume from which data is sampled.
        *target2source_in: target to source coordinate mapping.
        *source2target_in: source to target coordinate mapping.
        *boundaries_in: filename of 4D levelset image.
        *path_output: path where output is written.
        *layer: which layers to sample (array of integers).
        *smooth_iter: number of smoothing iterations after mesh deformation.
        *r: destination voxel size after upsampling (performed if not None).
        *interpolation: interpolation method for upsampling of file from which data is sampled.
        *average_layer: average across cortex.
        *write_profile: write sampled profile.
        *write_upsampled: write upsampled file.
        *cleanup: remove intermediate files.
    
    created by Daniel Haenelt
    Date created: 13-01-2020
    Last modified: 24-06-2020
    """
    import os
    import shutil as sh
    import numpy as np
    import nibabel as nb
    from nighres.registration import apply_coordinate_mappings
    from lib.io.get_filename import get_filename
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.deform_surface import deform_surface
    from lib.surface.mesh_sampling_layer import mesh_sampling_layer
    """
    set folder structure
    """
    tmp = np.random.randint(0, 10, 5)
    tmp_string = ''.join(str(i) for i in tmp)

    path_temp = os.path.join(path_output, "temp_" + tmp_string)
    path_cmap = os.path.join(path_temp, "cmap")
    path_data = os.path.join(path_temp, "data")
    path_surf = os.path.join(path_temp, "surf")

    if not os.path.exists(path_output):
        os.makedirs(path_output)

    if not os.path.exists(path_temp):
        os.makedirs(path_temp)

    if not os.path.exists(path_cmap):
        os.makedirs(path_cmap)

    if not os.path.exists(path_data):
        os.makedirs(path_data)

    if not os.path.exists(path_surf):
        os.makedirs(path_surf)

    # get filenames
    _, hemi, name_surf = get_filename(surf_in)
    _, name_file, ext_file = get_filename(file_in)
    _, name_t2s, ext_t2s = get_filename(target2source_in)
    _, name_s2t, ext_s2t = get_filename(source2target_in)

    # copy input files
    sh.copyfile(target2source_in, os.path.join(path_cmap, "t2s" + ext_t2s))
    sh.copyfile(source2target_in, os.path.join(path_cmap, "s2t" + ext_s2t))
    sh.copyfile(file_in, os.path.join(path_data, name_file + ext_file))

    # set filenames
    data = os.path.join(path_data, name_file + ext_file)
    data_upsampled = os.path.join(path_data,
                                  name_file + "_upsampled" + ext_file)
    t2s = os.path.join(path_cmap, "t2s" + ext_t2s)
    t2s_upsampled = os.path.join(path_cmap, "t2s_upsampled" + ext_t2s)
    t2s_rescaled = os.path.join(path_cmap, "t2s_upsampled_rescaled" + ext_t2s)
    s2t = os.path.join(path_cmap, "s2t" + ext_s2t)
    s2t_upsampled = os.path.join(path_cmap, "s2t_upsampled" + ext_s2t)
    s2t_rescaled = os.path.join(path_cmap, "s2t_upsampled_rescaled" + ext_s2t)
    """
    upsample data
    """
    if r:
        upsample_volume(data, data_upsampled, dxyz=r, rmode=interpolation)
        upsample_volume(t2s, t2s_upsampled, dxyz=r, rmode="Linear")
        upsample_volume(s2t, s2t_upsampled, dxyz=r, rmode="Linear")
    """
    rescale cmap
    """
    dim_target = nb.load(s2t).header["dim"][1:4] - 1
    dim_source = nb.load(t2s).header["dim"][1:4] - 1
    dim_target_upsampled = nb.load(s2t_upsampled).header["dim"][1:4] - 1
    dim_source_upsampled = nb.load(t2s_upsampled).header["dim"][1:4] - 1

    cmap_t2s = nb.load(t2s_upsampled)
    cmap_s2t = nb.load(s2t_upsampled)
    cmap_t2s_array = cmap_t2s.get_fdata()
    cmap_s2t_array = cmap_s2t.get_fdata()

    for i in range(3):
        cmap_t2s_array[:, :, :, i] = cmap_t2s_array[:, :, :, i] / dim_target[
            i] * dim_target_upsampled[i]
        cmap_s2t_array[:, :, :, i] = cmap_s2t_array[:, :, :, i] / dim_source[
            i] * dim_source_upsampled[i]

    cmap_t2s_rescaled = nb.Nifti1Image(cmap_t2s_array, cmap_t2s.affine,
                                       cmap_t2s.header)
    cmap_s2t_rescaled = nb.Nifti1Image(cmap_s2t_array, cmap_s2t.affine,
                                       cmap_s2t.header)
    nb.save(cmap_t2s_rescaled, t2s_rescaled)
    nb.save(cmap_s2t_rescaled, s2t_rescaled)
    """
    deform boundaries
    """
    apply_coordinate_mappings(
        image=boundaries_in,  # input 
        mapping1=t2s_rescaled,  # cmap
        interpolation="linear",  # nearest or linear
        padding="zero",  # closest, zero or max
        save_data=True,  # save output data to file (boolean)
        overwrite=True,  # overwrite existing results (boolean)
        output_dir=path_data,  # output directory
        file_name="boundaries"  # base name with file extension for output
    )
    """
    deform mesh
    """
    deform_surface(input_surf=surf_in,
                   input_orig=s2t_upsampled,
                   input_deform=s2t_rescaled,
                   input_target=data_upsampled,
                   hemi=hemi,
                   path_output=path_surf,
                   input_mask=None,
                   interp_method="trilinear",
                   smooth_iter=smooth_iter,
                   flip_faces=False,
                   cleanup=True)
    """
    sample data
    """
    if smooth_iter:
        surf_in = os.path.join(path_surf, hemi + name_surf + "_def_smooth")
    else:
        surf_in = os.path.join(path_surf, hemi + name_surf + "_def")

    mesh_sampling_layer(surf_in=surf_in,
                        file_in=data_upsampled,
                        boundaries_in=os.path.join(
                            path_data, "boundaries_def-img.nii.gz"),
                        path_output=path_output,
                        layer=layer,
                        r=None,
                        average_layer=average_layer,
                        write_profile=write_profile,
                        write_upsampled=write_upsampled)

    # delete intermediate files
    if cleanup:
        sh.rmtree(path_temp, ignore_errors=True)
def calculate_equivolumetric_epi(input_white,
                                 input_pial,
                                 input_vol,
                                 path_output,
                                 n_start,
                                 n_end,
                                 n_layers,
                                 r=[0.4, 0.4, 0.4],
                                 n_iter=2):
    """
    This function computes equivolumetric layers in volume space from input pial and white surfaces
    in freesurfer format. The input surfaces do not have to cover the whole brain. Number of 
    vertices and indices do not have to correspond between surfaces.
    Inputs:
        *input_white: filename of white surface.
        *input_pial: filename of pial surface.
        *input_vol: filename of reference volume.
        *path_output: path where output is written.
        *n_start: number of slices (axis=2) to discard at the beginning of the upsampled volume.
        *n_end: number of slices (axis=2) to discard at the end of the upsampled volume.
        *n_layers: number of generated layers + 1.
        *r: array of new voxel sizes for reference volume upsampling.
        *n_iter: number of surface upsampling iterations.
    
    created by Daniel Haenelt
    Date created: 17-12-2019
    Last modified: 24-07-2020
    """
    import os
    import sys
    import numpy as np
    import nibabel as nb
    from nibabel.affines import apply_affine
    from nibabel.freesurfer.io import read_geometry
    from skimage import measure
    from nighres.surface import probability_to_levelset
    from nighres.laminar import volumetric_layering
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.vox2ras import vox2ras
    from lib.surface.upsample_surf_mesh import upsample_surf_mesh

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    # get hemi from filename
    hemi = os.path.splitext(os.path.basename(input_white))[0]
    if not hemi == "lh" or hemi == "rh":
        sys.exit("Could not identify hemi from filename!")

    # new filenames in output folder
    res_white = os.path.join(path_output, hemi + ".white")
    res_pial = os.path.join(path_output, hemi + ".pial")
    res_vol = os.path.join(path_output, "epi_upsampled.nii")

    # upsample reference volume and input surface
    upsample_volume(input_vol, res_vol, dxyz=r, rmode="Cu")
    upsample_surf_mesh(input_white, res_white, n_iter, "linear")
    upsample_surf_mesh(input_pial, res_pial, n_iter, "linear")

    # get affine ras2vox-tkr transformation to reference volume
    _, ras2vox_tkr = vox2ras(res_vol)

    # load surface
    vtx_white, fac_white = read_geometry(res_white)
    vtx_pial, _ = read_geometry(res_pial)

    # load volume
    vol = nb.load(res_vol)

    # apply ras2vox to coords
    vtx_white = np.round(apply_affine(ras2vox_tkr, vtx_white)).astype(int)
    vtx_pial = np.round(apply_affine(ras2vox_tkr, vtx_pial)).astype(int)

    # surfaces to lines in volume
    white_array = np.zeros(vol.header["dim"][1:4])
    white_array[vtx_white[:, 0], vtx_white[:, 1], vtx_white[:, 2]] = 1
    white = nb.Nifti1Image(white_array, vol.affine, vol.header)

    pial_array = np.zeros(vol.header["dim"][1:4])
    pial_array[vtx_pial[:, 0], vtx_pial[:, 1], vtx_pial[:, 2]] = 1
    pial = nb.Nifti1Image(pial_array, vol.affine, vol.header)

    # lines to levelset
    white_level = probability_to_levelset(white)
    white_level_array = white_level["result"].get_fdata()

    pial_level = probability_to_levelset(pial)
    pial_level_array = pial_level["result"].get_fdata()
    """
    make wm
    """
    white_label_array = np.zeros_like(white_level_array)
    white_label_array[white_level_array > pial_level_array] = 1
    white_label_array[white_label_array != 1] = 0
    white_label_array -= 1
    white_label_array = np.abs(white_label_array).astype(int)
    white_label_array = white_label_array - white_array
    white_label_array[white_label_array < 0] = 0
    if n_start > 0:
        white_label_array[:, :, :n_start] = 0
    if n_end > 0:
        white_label_array[:, :, -n_end:] = 0
    white_label_array = measure.label(white_label_array, connectivity=1)
    white_label_array[white_label_array == 1] = 0
    white_label_array[white_label_array > 0] = 1
    white_label = nb.Nifti1Image(white_label_array, vol.affine, vol.header)
    """
    make csf
    """
    pial_label_array = np.zeros_like(pial_level_array)
    pial_label_array[pial_level_array < white_level_array] = 1
    pial_label_array[pial_label_array != 1] = 0
    pial_label_array = np.abs(pial_label_array).astype(int)
    pial_label_array = pial_label_array - pial_array
    pial_label_array[pial_label_array < 0] = 0
    if n_start > 0:
        pial_label_array[:, :, :n_start] = 0
    if n_end > 0:
        pial_label_array[:, :, -n_end:] = 0
    pial_label_array = measure.label(pial_label_array, connectivity=1)
    pial_label_array[pial_label_array != 1] = 0
    pial_label_array -= 1
    pial_label_array = np.abs(pial_label_array)
    pial_label_array[pial_array == 1] = 0
    if n_start > 0:
        pial_label_array[:, :, :n_start] = 0
    if n_end > 0:
        pial_label_array[:, :, -n_end:] = 0
    pial_label = nb.Nifti1Image(pial_label_array, vol.affine, vol.header)
    """
    make gm
    """
    ribbon_label_array = pial_label_array.copy()
    ribbon_label_array -= 1
    ribbon_label_array = np.abs(ribbon_label_array)
    ribbon_label_array = ribbon_label_array + white_label_array
    ribbon_label_array -= 1
    ribbon_label_array = np.abs(ribbon_label_array).astype(int)
    if n_start > 0:
        ribbon_label_array[:, :, :n_start] = 0
    if n_end > 0:
        ribbon_label_array[:, :, -n_end:] = 0
    ribbon_label = nb.Nifti1Image(ribbon_label_array, vol.affine, vol.header)
    """
    layers
    """
    #csf_level = probability_to_levelset(pial_label)
    #wm_level = probability_to_levelset(white_label)

    #volumetric_layering(wm_level["result"],
    #                    csf_level["result"],
    #                    n_layers=n_layers,
    #                    topology_lut_dir=None,
    #                    save_data=True,
    #                    overwrite=True,
    #                    output_dir=path_output,
    #                    file_name="epi")

    # write niftis
    nb.save(white, os.path.join(path_output, "wm_line.nii"))
    nb.save(pial, os.path.join(path_output, "csf_line.nii"))
    nb.save(white_label, os.path.join(path_output, "wm_label.nii"))
    nb.save(pial_label, os.path.join(path_output, "csf_label.nii"))
    nb.save(ribbon_label, os.path.join(path_output, "gm_label.nii"))
Exemple #8
0
def mesh_sampling(surf_in, vol_in, source2target_in, path_output, r=[0.4,0.4,0.4], 
                  interpolation="Cu", cleanup=True):
    """
    This function samples data onto a surface mesh. Optionally, the volume can be upsampled and a
    coordinate mapping can be applied to transform the surface mesh to the space of the input 
    volume.
    Inputs:
        *surf_in: filename of input surface mesh.
        *vol_in: filename of input volume from which data is sampled.
        *source2target_in: source to target coordinate mapping.
        *path_output: path where output is written.
        *r: destination voxel size after upsampling (performed if not None).
        *interpolation: interpolation method for upsampling of file from which data is sampled.
        *cleanup: remove intermediate files.
       
    created by Daniel Haenelt
    Date created: 24-06-2020        
    Last modified: 24-06-2020
    """
    import os
    import numpy as np
    import nibabel as nb
    import shutil as sh
    from sh import gunzip
    from lib.io.get_filename import get_filename
    from lib.utils.upsample_volume import upsample_volume
    from lib.surface.deform_surface import deform_surface
    from lib.mapping import map2surface

    # make output folder
    if not os.path.exists(path_output):
        os.makedirs(path_output)

    tmp = np.random.randint(0, 10, 5)
    tmp_string = ''.join(str(i) for i in tmp)
    path_temp = os.path.join(path_output, "temp_"+tmp_string)
    if not os.path.exists(path_temp):
        os.makedirs(path_temp)

    # get filenames
    _, hemi, name_mesh = get_filename(surf_in)
    name_mesh = name_mesh[1:]
    _, name_vol, _ = get_filename(vol_in)

    # set filenames
    vol_upsampled = os.path.join(path_temp, "vol_upsampled.nii")
    s2t_upsampled = os.path.join(path_temp, "s2t_upsampled.nii")

    # upsample volumes and rescale cmap
    if r:
        upsample_volume(vol_in, vol_upsampled, dxyz=r, rmode=interpolation)
        upsample_volume(source2target_in, s2t_upsampled, dxyz=r, rmode="Linear")
        
        # rescale cmap
        dim = nb.load(vol_in).header["dim"][1:4] - 1
        dim_upsampled = nb.load(vol_upsampled).header["dim"][1:4] - 1
    
        cmap_s2t = nb.load(s2t_upsampled)
        cmap_s2t_array = cmap_s2t.get_fdata()    
        for i in range(3):
            cmap_s2t_array[:,:,:,i] = cmap_s2t_array[:,:,:,i] / dim[i] * dim_upsampled[i]
        
        cmap_s2t_upsampled = nb.Nifti1Image(cmap_s2t_array, cmap_s2t.affine, cmap_s2t.header)
        nb.save(cmap_s2t_upsampled, s2t_upsampled)
    else:
        _, _, ext = get_filename(vol_in)
        sh.copy(vol_in, os.path.join(path_temp, "vol_upsampled"+ext))
        if ext[-3:] == ".gz":
            gunzip(os.path.join(path_temp, "vol_upsampled"+ext))
            
        _, _, ext = get_filename(source2target_in)
        sh.copy(source2target_in, os.path.join(path_temp, "s2t_upsampled"+ext))
        if ext[-3:] == ".gz":
            gunzip(os.path.join(path_temp, "s2t_upsampled"+ext))
        
    # deform mesh
    deform_surface(input_surf=surf_in,
                   input_orig=s2t_upsampled,
                   input_deform=s2t_upsampled,
                   input_target=vol_upsampled,
                   hemi=hemi,
                   path_output=path_temp,
                   input_mask=None,
                   interp_method="trilinear",
                   smooth_iter=0,
                   flip_faces=False,
                   cleanup=True)
    
    # do mapping
    map2surface(input_surf=os.path.join(path_temp, hemi+"."+name_mesh+"_def"),
                input_vol=vol_upsampled,
                hemi=hemi, 
                path_output=path_temp,
                input_white=None, 
                input_ind=None, 
                cleanup=True)
    
    # rename output surface
    os.rename(os.path.join(path_temp, hemi+".vol_upsampled_"+name_mesh+"_def_def.mgh"),
              os.path.join(path_output, hemi+"."+name_vol+"_"+name_mesh+".mgh"))
    
    # delete intermediate files
    if cleanup:
        sh.rmtree(path_temp, ignore_errors=True)