def mgh2nii(filename, path_output, out_type="nii"): """ This function converts a volume file from freesurfer mgh to nifti format. Inputs: *filename: full path of the input file. *path_outupt: path where output is written. *out_type: target type of file. created by Daniel Haenelt Date created: 06-01-2020 Last modified: 24-07-2020 """ import os from nipype.interfaces.freesurfer.preprocess import MRIConvert from lib.io.get_filename import get_filename # get filename path, name, ext = get_filename(filename) # convert volume to nifti format mc = MRIConvert() mc.inputs.in_file = filename mc.inputs.out_file = os.path.join(path_output, name + "." + out_type) mc.inputs.in_type = ext.replace('.', '') mc.inputs.out_type = out_type mc.run()
def smooth_surface(file_in, file_out, n_iter): """ This function smoothes a surface mesh using freesurfer. Inputs: *file_in: filename of input surface. *file_out: filename of output surface. *n_iter: number of smoothing iterations. created by Daniel Haenelt Date created: 13-07-2019 Last modified: 25-08-2020 """ import os import sys import subprocess from lib.io.get_filename import get_filename # make output folder path_output, _, _ = get_filename(file_out) if not os.path.exists(path_output): os.makedirs(path_output) # smooth surface try: subprocess.run( ['mris_smooth', '-n', str(n_iter), '-nw', file_in, file_out], check=True) except subprocess.CalledProcessError: sys.exit("Surface smoothing failed!")
def upsample_surf_mesh(file_in, file_out, n_iter, method): """ The scripts takes generated FreeSurfer surfaces and upsamples them using Jon Polimeni's function mris_mesh_subdivide. Inputs: *file_in: filename of input surface. *file_out: filename of output surface. *n_iter: number of upsampling iterations. *method: upsampling method (linear, loop, butterfly). created by Daniel Haenelt Date created: 01-11-2018 Last modified: 26-08-2020 """ import os import sys import subprocess from lib.io.get_filename import get_filename # make output folder path_output, _, _ = get_filename(file_out) if not os.path.exists(path_output): os.makedirs(path_output) # subdivide surface try: subprocess.run(['mris_mesh_subdivide', '--surf', file_in, '--out', file_out, '--method', method, '--iter', str(n_iter)], check = True) except subprocess.CalledProcessError: sys.exit("Surface subdivision failed!")
def inflate_surf_mesh(file_in, file_out, n_iter): """ The scripts takes a generated FreeSurfer surfaces and inflates it. Inputs: *file_in: filename of input surface. *file_out: filename of output surface. *n_iter: number of inflating iterations. created by Daniel Haenelt Date created: 17-12-2019 Last modified: 26-08-2020 """ import os import sys import subprocess from lib.io.get_filename import get_filename # make output folder path_output, _, _ = get_filename(file_out) if not os.path.exists(path_output): os.makedirs(path_output) # inflate surface try: subprocess.run([ 'mris_inflate', '-n', str(n_iter), '-no-save-sulc', file_in, file_out ], check=True) except subprocess.CalledProcessError: sys.exit("Surface inflation failed!")
def get_b0_orientation(surf_in, vol_in, write_output=False, path_output="", name_output=""): """ This function computes the angle between surface normals and B0-direction per vertex. Inputs: *surf_in: input of surface mesh. *vol_in: input of corresponding nifti volume. *write output: write out to disk (boolean). *path_output: path where to save output. *name_output: basename of output file. Outputs: *theta: angle in radians. created by Daniel Haenelt Date created: 31-07-2020 Last modified: 31-07-2020 """ import os import numpy as np import nibabel as nb from nibabel.affines import apply_affine from nibabel.freesurfer.io import read_geometry, write_morph_data from lib.io.get_filename import get_filename from lib.surface.vox2ras import vox2ras from lib_gbb.normal import get_normal # make subfolders if write_output and not os.path.exists(path_output): os.makedirs(path_output) # get hemi from surface filename _, hemi, _ = get_filename(surf_in) # load surface vtx, fac = read_geometry(surf_in) # get transformation matrix _, r2v = vox2ras(vol_in) # ras-tkr -> voxel v2s = nb.load(vol_in).affine # voxel -> scanner-ras M = v2s.dot(r2v) # apply affine transformation vtx = apply_affine(M, vtx) # get surface normals n = get_normal(vtx, fac) # get angle between b0 and surface normals in radians theta = np.arccos(np.dot(n, [0, 0, 1])) # write output if write_output: write_morph_data(os.path.join(path_output, hemi + "." + name_output), theta) return theta
def crop_coordinate_mapping(input, pad=0, overwrite_file=True, path_output=None): """ Crops a padded coordinate mapping. The output file can either overwrite the input file or a new file is created with a suffix in a defined output directory. Inputs: *input: input file. *pad: image padding size. *overwrite_file: output file overwrites input file. *path_output: path where output is saved if input file is not overwritten. created by Daniel Haenelt Date created: 21-11-2018 Last modified: 22-01-2020 """ import os import numpy as np import nibabel as nb from lib.io.get_filename import get_filename # define output folder if path_output is not None: if not os.path.exists(path_output): os.makedirs(path_output) # get input path and file name path, file, ext = get_filename(input) # load data data_img = nb.load(input) data_array = data_img.get_fdata() # get matrix size x_size = np.size(data_array, 0) y_size = np.size(data_array, 1) z_size = np.size(data_array, 2) # crop image matrix data_array = data_array[pad:x_size - pad, pad:y_size - pad, pad:z_size - pad, :] # write cropped coordinate mapping output = nb.Nifti1Image(data_array, data_img.affine, data_img.header) output.set_data_dtype(np.float) # write coordinate mapping for each time point if overwrite_file is True: os.remove(input) nb.save(output, input) else: fileOUT = os.path.join(path_output, file + '_crop' + ext) nb.save(output, fileOUT)
def upsample_volume(file_in, file_out, dxyz=[0.4, 0.4, 0.4], rmode="Cu"): """ This function upsamples a nifti volume using the afni function 3dresample. Before running the function, set the afni environment by calling AFNI in the terminal. Output is an upsampled nifti volume. Inputs: *file_in: nifti input filename. *file_out: nifti output filename. *dxyz: array of target resolution in single dimensions. *rmode: interpolation methods (Linear, NN, Cu, Bk). created by Daniel Haenelt Date created: 16-12-2019 Last modified: 29-05-2020 """ import os import numpy as np from sh import gunzip from shutil import copyfile from lib.io.get_filename import get_filename # get path and file extension of input file path_in, _, ext_in = get_filename(file_in) # make temporary copy of input file tmp = np.random.randint(0, 10, 5) tmp_string = ''.join(str(i) for i in tmp) file_tmp = os.path.join(path_in, "tmp_" + tmp_string + ext_in) copyfile(file_in, file_tmp) if os.path.splitext(file_tmp)[1] == ".gz": gunzip(file_tmp) file_tmp = os.path.splitext(file_tmp)[0] # upsample volume os.system("3dresample " + \ "-dxyz " + str(dxyz[0]) + " " + str(dxyz[1]) + " " + str(dxyz[2]) + " " +\ "-rmode " + str(rmode) + " " + \ "-inset " + file_tmp + " " + \ "-prefix " + file_out) # remove temporary copy os.remove(file_tmp)
use_lowpass = False TR = 3 # repetition time in s cutoff_highpass = 270 # cutoff in s for baseline correction cutoff_lowpass = 0 order_lowpass = 0 name_sess = "GE_EPI2" name_output = "" # path to SPM12 folder pathSPM = "/data/pt_01880/source/spm12" pathLIB1 = "/data/hu_haenelt/projects/scripts/lib/preprocessing" pathLIB2 = "/data/hu_haenelt/projects/scripts/lib/processing" """ do not edit below """ # get path from first entry path_file, _, _ = get_filename(img_input[0]) # make output folder path_output = os.path.join(os.path.dirname(os.path.dirname(path_file)), "results", "raw", "native") if not os.path.exists(path_output): os.makedirs(path_output) # get image header information data_img = nb.load(img_input[0]) data_img.header["dim"][0] = 3 data_img.header["dim"][4] = 1 header = data_img.header affine = data_img.affine # get image dimension
def skullstrip_refined(file_mask1, file_mask2): """ The purpose of the following function is to enhance the skullstrip mask in native space. It uses a second mask which was manually corrected during the freesurfer segmentation. This corrected brainmask is converted to native space and multiplied with the initial brainmask. Inputs: *file_mask1: brainmask in original space. *file_mask2: manually corrected brainmask in freesurfer space (brain.finalsurfs.mgz). Outputs: *file_out: filename of enhanced brainmask. created by Daniel Haenelt Date created: 31-01-2020 Last modified: 31-01-2020 """ import os import nibabel as nb from nipype.interfaces.freesurfer import ApplyVolTransform from lib.io.get_filename import get_filename # get output path and basename path_output, name_output, _ = get_filename(file_mask1) # filename of temporary and enhanced brainmask file_temp = os.path.join(path_output, "temp.nii") file_out = os.path.join(path_output, name_output + "_enhanced.nii") # bring skullstrip_mask from conformed space into original space transmask = ApplyVolTransform() transmask.inputs.source_file = file_mask2 transmask.inputs.target_file = file_mask1 transmask.inputs.reg_header = True transmask.inputs.interp = "nearest" transmask.inputs.transformed_file = file_temp transmask.inputs.args = "--no-save-reg" transmask.run() # load first brainmask in original space mask1 = nb.load(file_mask1) mask1_array = mask1.get_fdata() # load second brainmask transformed into original space mask2 = nb.load(file_temp) mask2_array = mask2.get_fdata() # make second brainmask binary mask2_array[mask2_array == 1] = 0 mask2_array[mask2_array > 0] = 1 # multiply both masks mask_enhanced_array = mask1_array * mask2_array # write enhancec brainmask mask_enhanced = nb.Nifti1Image(mask_enhanced_array, mask1.affine, mask1.header) nb.save(mask_enhanced, file_out) # remove temporary file os.remove(file_temp) return file_out
"/data/pt_01880/temp/try2/Run_8/ubold.nii", "/data/pt_01880/temp/try2/Run_9/ubold.nii", "/data/pt_01880/temp/try2/Run_10/ubold.nii", ] # parameters TR_old = 5 # effective TR of bold+vaso TR_new = 3 # TR of upsampled bold corrected time series vaso_shift = 2.8425 # start of vaso block (asymmetric TR) vaso_threshold = 6 """ do not edit below """ for i in range(len(img_vaso)): # get filenames path_bold, name_bold, ext_bold = get_filename(img_bold[i]) path_vaso, name_vaso, ext_vaso = get_filename(img_vaso[i]) # upsample time series regrid_time_series(img_bold[i], path_bold, TR_old, TR_new, t_start=0) regrid_time_series(img_vaso[i], path_vaso, TR_old, TR_new, t_start=vaso_shift) # new filenames file_bold = os.path.join(path_bold, name_bold + "_upsampled" + ext_bold) file_vaso = os.path.join(path_vaso, name_vaso + "_upsampled" + ext_vaso) file_vaso_corrected = os.path.join( path_vaso, name_vaso + "_upsampled_corrected" + ext_vaso)
def expand_coordinate_mapping(cmap_in, path_output=None, name_output=None, write_output=False): """ This function removes black background in a coordinate mapping to omit interpolation problems at the edges of a coordinate slab within a larger volume. Based on the cmap, a transformation matrix is computed from randomly sampled data points within the slab. The transformation matrix is then applied to all background voxels. Hence, this method is only really precise for coordinate mappings representing an affine transformation. However, this function can also be applied to nonlinear coordinate mappings since the preliminary goal is to avoid problems at the slab edges. Therefore, the actual data sampling should not be affected. The code snippet for computing the transformation matrix is taken from https://stackoverflow.com/questions/56220626/ how-to-compute-conformal-affine-transformation. Inputs: *cmap_in: filename of coordinate mapping. *path_output: path where output is written. *name_output: basename of output volume. *write_output: write nifti volume. Outputs: *nibabel object instance containing corrected cmap. created by Daniel Haenelt Date created: 18-06-2020 Last modified: 18-06-2020 """ import os import random import numpy as np import nibabel as nb from nibabel.affines import apply_affine from lib.io.get_filename import get_filename from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping # get file extension of cmap _, _, ext_cmap = get_filename(cmap_in) # load target cmap and generate source cmap cmap_target = nb.load(cmap_in) cmap_source = generate_coordinate_mapping(cmap_in, pad=0) arr_cmap_target = cmap_target.get_fdata() arr_cmap_source = cmap_source.get_fdata() # get image dimensions xdim = cmap_source.header["dim"][1] ydim = cmap_source.header["dim"][2] zdim = cmap_source.header["dim"][3] # random selection of 4 data points s_coords = [] t_coords = [] pts = np.where(arr_cmap_target[:, :, :, 0] != 0) r = random.sample(np.arange(len(pts[0])).tolist(), len(pts[0]))[:4] s_coords.append([pts[0][r[0]], pts[1][r[0]], pts[2][r[0]]]) s_coords.append([pts[0][r[1]], pts[1][r[1]], pts[2][r[1]]]) s_coords.append([pts[0][r[2]], pts[1][r[2]], pts[2][r[2]]]) s_coords.append([pts[0][r[3]], pts[1][r[3]], pts[2][r[3]]]) t_coords.append(arr_cmap_target[s_coords[0][0], s_coords[0][1], s_coords[0][2], :].tolist()) t_coords.append(arr_cmap_target[s_coords[1][0], s_coords[1][1], s_coords[1][2], :].tolist()) t_coords.append(arr_cmap_target[s_coords[2][0], s_coords[2][1], s_coords[2][2], :].tolist()) t_coords.append(arr_cmap_target[s_coords[3][0], s_coords[3][1], s_coords[3][2], :].tolist()) # get transformation matrix (target -> source) l = len(t_coords) B = np.vstack([np.transpose(t_coords), np.ones(l)]) D = 1.0 / np.linalg.det(B) entry = lambda r, d: np.linalg.det( np.delete(np.vstack([r, B]), (d + 1), axis=0)) M = [[(-1)**i * D * entry(R, i) for i in range(l)] for R in np.transpose(s_coords)] A, t = np.hsplit(np.array(M), [l - 1]) t = np.transpose(t)[0] # unittests print("Test cmap expansion:") for p, P in zip(np.array(t_coords), np.array(s_coords)): image_p = np.dot(A, p) + t result = "[OK]" if np.allclose(image_p, P) else "[ERROR]" print(p, " mapped to: ", image_p, " ; expected: ", P, result) # get affine transformation matrix by adding translation vector M = np.zeros((4, 4)) M[:3, :3] = A M[:3, -1] = t M[-1, -1] = 1 # get final transformation matrix (source -> target) M = np.linalg.inv(M) # transform source volume x = arr_cmap_source[:, :, :, 0].flatten() y = arr_cmap_source[:, :, :, 1].flatten() z = arr_cmap_source[:, :, :, 2].flatten() source_listed = np.array([x, y, z]).T source_transformed = apply_affine(M, source_listed) x_new = np.reshape(source_transformed[:, 0], (xdim, ydim, zdim)) y_new = np.reshape(source_transformed[:, 1], (xdim, ydim, zdim)) z_new = np.reshape(source_transformed[:, 2], (xdim, ydim, zdim)) # overwrite new cmap with old coordinate mapping (so that only background remains) x_new[arr_cmap_target[:, :, :, 0] > 0] = arr_cmap_target[:, :, :, 0][arr_cmap_target[:, :, :, 0] > 0] y_new[arr_cmap_target[:, :, :, 1] > 0] = arr_cmap_target[:, :, :, 1][arr_cmap_target[:, :, :, 1] > 0] z_new[arr_cmap_target[:, :, :, 2] > 0] = arr_cmap_target[:, :, :, 2][arr_cmap_target[:, :, :, 2] > 0] # overwrite input cmap array with final cmap array arr_cmap_target[:, :, :, 0] = x_new arr_cmap_target[:, :, :, 1] = y_new arr_cmap_target[:, :, :, 2] = z_new # nibabel instance of final cmap output = nb.Nifti1Image(arr_cmap_target, cmap_target.affine, cmap_target.header) # write output if write_output: nb.save(output, os.path.join(path_output, name_output + ext_cmap)) return output
The script applies the nighres t2s fitting module to a data set. created by Daniel Haenelt Date created: 22-05-2020 Last modified: 22-05-2020 """ import os import nibabel as nb from nighres.intensity import flash_t2s_fitting from lib.io.get_filename import get_filename # input file_list = ["/data/pt_01880/Experiment1_ODC/p4/anatomy/flash3/S13_3D_GRE_3ech_iso0p5_slab_8.42.nii", "/data/pt_01880/Experiment1_ODC/p4/anatomy/flash3/S13_3D_GRE_3ech_iso0p5_slab_16.03.nii", "/data/pt_01880/Experiment1_ODC/p4/anatomy/flash3/S13_3D_GRE_3ech_iso0p5_slab_25.nii"] te_list = [8.42,16.03,25.00] # in ms name_output = "3D_GRE_3ech_iso0p5_slab" """ do not edit below """ # get output path from first input entry path_output, _, _ = get_filename(file_list[0]) # t2s fitting res = flash_t2s_fitting(file_list, te_list) # write output nb.save(res["t2s"], os.path.join(path_output,name_output+"_t2s.nii")) nb.save(res["r2s"], os.path.join(path_output,name_output+"_r2s.nii")) nb.save(res["s0"], os.path.join(path_output,name_output+"_s0.nii")) nb.save(res["residuals"], os.path.join(path_output,name_output+"_residuals.nii"))
import numpy as np from lib.io.get_filename import get_filename # input file_in = ["/data/pt_01880/Experiment1_ODC/p4/odc/VASO1/Run_3/outlier/outlier_regressor_merge.txt"] # parameters TR_old = 5 # effective TR of bold+vaso TR_new = 3 # TR of upsampled bold corrected time series """ do not edit below """ for i in range(len(file_in)): # set output path and filename path_output, _, _ = get_filename(file_in[i]) name_output = "outlier_regressor_upsampled.txt" # load outlier textfile outlier = np.loadtxt(file_in[i]) # merge bold and vaso outliers to one timepoint outlier1 = outlier[::2] outlier2 = outlier[1::2] outlier_merge = outlier1 + outlier2 outlier_merge[outlier_merge != 0] = 1 # get time axes run_length = len(outlier_merge) * TR_old nt_old = int(run_length / TR_old)
def clean_coordinate_mapping(cmap_source, cmap_target, overwrite_file=True, save_mask=False): """ Voxels in the target coordinate mapping are masked out based on found voxel displacements in the source coordinate mapping. This is done to remove smeared regions caused by interpolations with background values in the case of deforming a slab within a larger image array. Inputs: *cmap_source: filename of source coordinate mapping. *cmap_target: filename of target coordinate mapping. *overwrite_file: overwrite target coordinate mapping (boolean). *save_mask: write out mask (boolean). Outputs: *results: nibabel instances of cleaned cmap and corresponding mask (dict). created by Daniel Haenelt Date created: 23-05-2020 Last modified: 23-05-2020 """ import os import numpy as np import nibabel as nb from lib.io.get_filename import get_filename # get filename path_file, _, _ = get_filename(cmap_target) # load data cmap1_img = nb.load(cmap_source) cmap1_array = cmap1_img.get_fdata() cmap2_img = nb.load(cmap_target) cmap2_array = cmap2_img.get_fdata() mask_img = nb.load(cmap_target) mask_img.header["dim"][0] = 3 mask_img.header["dim"][4] = 1 mask_array = np.zeros_like(mask_img.get_fdata()[:, :, :, 0]) x_max = cmap2_img.header["dim"][1] y_max = cmap2_img.header["dim"][2] z_max = cmap2_img.header["dim"][3] # get nearest voxel coordinates x0 = np.floor(cmap1_array[:, :, :, 0].flatten()).astype(int) x1 = np.ceil(cmap1_array[:, :, :, 0].flatten()).astype(int) y0 = np.floor(cmap1_array[:, :, :, 1].flatten()).astype(int) y1 = np.ceil(cmap1_array[:, :, :, 1].flatten()).astype(int) z0 = np.floor(cmap1_array[:, :, :, 2].flatten()).astype(int) z1 = np.ceil(cmap1_array[:, :, :, 2].flatten()).astype(int) # exclude voxels which do not fit in the target array outlier = [] outlier.extend(np.where(x0 < 0)[0]) outlier.extend(np.where(x1 < 0)[0]) outlier.extend(np.where(y0 < 0)[0]) outlier.extend(np.where(y1 < 0)[0]) outlier.extend(np.where(z0 < 0)[0]) outlier.extend(np.where(z1 < 0)[0]) outlier.extend(np.where(x0 >= x_max)[0]) outlier.extend(np.where(x1 >= x_max)[0]) outlier.extend(np.where(y0 >= y_max)[0]) outlier.extend(np.where(y1 >= y_max)[0]) outlier.extend(np.where(z0 >= z_max)[0]) outlier.extend(np.where(z1 >= z_max)[0]) outlier = list(np.unique(outlier)) x0 = np.delete(x0, outlier) x1 = np.delete(x1, outlier) y0 = np.delete(y0, outlier) y1 = np.delete(y1, outlier) z0 = np.delete(z0, outlier) z1 = np.delete(z1, outlier) # get final mask mask_array[x0, y0, z0] = 1 mask_array[x1, y1, z1] = 1 mask_array[x1, y0, z0] = 1 mask_array[x0, y1, z0] = 1 mask_array[x0, y0, z1] = 1 mask_array[x1, y1, z0] = 1 mask_array[x0, y1, z1] = 1 mask_array[x1, y0, z1] = 1 # apply mask to cmap cmap2_array[:, :, :, 0] *= mask_array cmap2_array[:, :, :, 1] *= mask_array cmap2_array[:, :, :, 2] *= mask_array # get output results = dict() results["cmap"] = nb.Nifti1Image(cmap2_array, cmap2_img.affine, cmap2_img.header) results["mask"] = nb.Nifti1Image(mask_array, mask_img.affine, mask_img.header) # write output if overwrite_file: nb.save(results["cmap"], cmap_target) if save_mask: nb.save(results["mask"], os.path.join(path_file, "cmap_mask.nii")) return results
def apply_registration(file_in, cmap_in, file_out, interpolation="linear", r=[0.4, 0.4, 0.4]): """ This function applies a coordinate mapping to a volume. Optionally, the voxel size of the output volume can be changed. This is achieved by adjusting the coordinate mapping to the new voxel size before application. Inputs: *file_in: filename of input volume. *cmap_in: filename of coordinate mapping. *file_out: filename of output volume. *interpolation: interpolation type (linear or nearest). *r: destination voxel size after upsampling (performed if not None). Outputs: *nibabel object instance of transformed input. created by Daniel Haenelt Date created: 30-05-2020 Last modified: 02-06-2020 """ import os import numpy as np import nibabel as nb from nighres.registration import apply_coordinate_mappings from lib.io.get_filename import get_filename from lib.utils.upsample_volume import upsample_volume # make output folder path_output = os.path.dirname(file_out) if not os.path.exists(path_output): os.makedirs(path_output) # filename for temporary cmap copy _, _, ext_cmap = get_filename(cmap_in) tmp = np.random.randint(0, 10, 5) tmp_string = ''.join(str(i) for i in tmp) file_tmp = os.path.join(path_output, "tmp_" + tmp_string + ext_cmap) file_tmp2 = os.path.join(path_output, "tmp2_" + tmp_string + ext_cmap) # adjust coordinate mapping if r: # upsample cmap upsample_volume(cmap_in, file_tmp, dxyz=r, rmode="Linear") upsample_volume(cmap_in, file_tmp2, dxyz=r, rmode="NN") # mask upsampled cmap cmap = nb.load(file_tmp) mask = nb.load(file_tmp2) cmap_array = cmap.get_fdata() mask_array = mask.get_fdata() mask_array = np.sum(mask_array, axis=3) mask_array[mask_array != 0] = 1 cmap_array[:, :, :, 0][mask_array == 0] = 0 cmap_array[:, :, :, 1][mask_array == 0] = 0 cmap_array[:, :, :, 2][mask_array == 0] = 0 cmap = nb.Nifti1Image(cmap_array, cmap.affine, cmap.header) else: cmap = nb.load(cmap_in) # apply coordinate mapping res = apply_coordinate_mappings( image=file_in, # input mapping1=cmap, # cmap interpolation=interpolation, # nearest or linear padding="zero", # closest, zero or max save_data=False, # save output data to file (boolean) overwrite=False, # overwrite existing results (boolean) output_dir=None, # output directory file_name=None, # base name with file extension for output ) # write output nb.save(res["result"], file_out) # remove temporary files if r: os.remove(file_tmp) os.remove(file_tmp2) return res["result"]
Before running the script, login to queen via ssh and set the fsl environment by calling FSL in the terminal. created by Daniel Haenelt Date created: 10-01-2020 Last modified: 03-03-2020 """ from lib.preprocessing.gnl_correction import gnl_correction from lib.io.get_filename import get_filename # input input = [ "/data/pt_01880/Experiment3_Stripes/p2/anatomy/S5_MP2RAGE_0p7_INV1_2.45.nii", ] file_bash = "/data/hu_haenelt/projects/gradunwarp/apply_grad.sh" file_coeff = "/data/hu_haenelt/projects/gradunwarp/7t_coeff.grad" python3_env = "daniel" python2_env = "daniel2" cleanup = True """ do not edit below """ for i in range(len(input)): # get filename path_output, _, _ = get_filename(input[i]) # gnl correction gnl_correction(input[i], file_bash, file_coeff, python3_env, python2_env, path_output, cleanup)
path_bbr = os.path.join(path_sub, "bbr") os.makedirs(path_sub) os.makedirs(path_mri) os.makedirs(path_surf) os.makedirs(path_t1) os.makedirs(path_bbr) # change path to output folder os.chdir(path_output) # copy surfaces for i in range(len(input_white)): # white surface _, hemi, _ = get_filename(input_white[i]) sh.copyfile(input_white[i], os.path.join(path_surf, hemi + ".white")) # copy volumes sh.copy(input_target, os.path.join(path_mri, "orig.nii")) sh.copy(input_source, os.path.join(path_bbr, "source.nii")) sh.copy(input_ana, os.path.join(path_t1, "T1.nii")) sh.copy(input_mask, os.path.join(path_t1, "mask.nii")) # remove nans remove_nans(os.path.join(path_mri, "orig.nii"), os.path.join(path_mri, "orig.nii")) remove_nans(os.path.join(path_bbr, "source.nii"), os.path.join(path_bbr, "source.nii")) remove_nans(os.path.join(path_t1, "T1.nii"), os.path.join(path_t1, "T1.nii")) remove_nans(os.path.join(path_t1, "mask.nii"),
def make_sphere(file_in, file_out, n_inflate=100, radius=None): """ The scripts takes a generated FreeSurfer mesh and transformes it into a sphere with defined radius. Inputs: *file_in: filename of input surface. *file_out: filename of output surface. *n_inflated: number of inflating iterations (if > 0). *radius: radius of final sphere in mm (if not None). created by Daniel Haenelt Date created: 26-08-2020 Last modified: 26-08-2020 """ import os import sys import subprocess import numpy as np from shutil import copyfile from nibabel.freesurfer.io import read_geometry, write_geometry from lib.io.get_filename import get_filename from lib.surface.inflate_surf_mesh import inflate_surf_mesh def cart2pol(x, y, z): r = np.sqrt(x**2 + y**2 + z**2) phi = np.arctan2(y, x) theta = np.arccos(z / r) return r, phi, theta def pol2cart(r, phi, theta): x = r * np.sin(theta) * np.cos(phi) y = r * np.sin(theta) * np.sin(phi) z = r * np.cos(theta) return x, y, z # make output folder path_output, _, _ = get_filename(file_out) if not os.path.exists(path_output): os.makedirs(path_output) # temporary file tmp = np.random.randint(0, 10, 5) tmp_string = ''.join(str(i) for i in tmp) file_tmp = os.path.join(path_output, tmp_string) # inflate surface mesh if n_inflate: inflate_surf_mesh(file_in, file_tmp, n_inflate) else: copyfile(file_in, file_tmp) # inflate surface try: subprocess.run(['mris_sphere', '-q', file_tmp, file_out], check=True) except subprocess.CalledProcessError: sys.exit("Sphere computation failed!") # change radius if radius: vtx, fac = read_geometry(file_out) r, phi, theta = cart2pol(vtx[:, 0], vtx[:, 1], vtx[:, 2]) r[:] = radius vtx[:, 0], vtx[:, 1], vtx[:, 2] = pol2cart(r, phi, theta) write_geometry(file_out, vtx, fac) # remove temporary file os.remove(file_tmp)
def mask_epi(file_epi, file_t1, file_mask, niter, sigma, file_reg=""): """ This function masks a mean epi image based on a skullstrip mask of the corresponding anatomy. The mask is transformed to native epi space via an initial transformation or via scanner coordinates. A rigid registration is applied to ensure a match between mask and epi. Finally, holes in the mask are filled, the mask is dilated and a Gaussian filter is applied. The masked epi is saved in the same folder with the prefix p. Inputs: *file_epi: input mean epi image. *file_t1: input of corresponding skullstripped anatomy. *file_mask: input of skullstrip mask of the corresponding anatomy. *niter: number of dilation iterations. *sigma: gaussian smoothing kernel. *file_reg: filename of ana -> epi coordinate mapping. created by Daniel Haenelt Date created: 13-02-2019 Last modified: 10-09-2020 """ import os import numpy as np import nibabel as nb import shutil as sh from scipy.ndimage import binary_fill_holes, gaussian_filter from scipy.ndimage.morphology import binary_dilation from nighres.registration import embedded_antsreg, apply_coordinate_mappings from lib.registration.get_scanner_transform import get_scanner_transform from lib.io.get_filename import get_filename from lib.cmap.expand_coordinate_mapping import expand_coordinate_mapping # get paths and filenames path_t1, name_t1, _ = get_filename(file_t1) path_epi, name_epi, _ = get_filename(file_epi) if file_reg: _, _, ext_reg = get_filename(file_reg) else: ext_reg = '.nii.gz' # filenames file_cmap_reg = os.path.join(path_t1, "cmap_reg" + ext_reg) file_cmap_ants = os.path.join(path_t1, "cmap_ants.nii.gz") file_cmap_def = os.path.join(path_t1, "cmap_def.nii.gz") file_ana_reg = os.path.join(path_t1, "ana_reg.nii.gz") file_ana_def = os.path.join(path_t1, "ana_def.nii.gz") file_mask_def = os.path.join(path_t1, "mask_def.nii.gz") file_mask_def2 = os.path.join(path_t1, "mask_def2.nii.gz") # get initial ana -> epi transformation from existing cmap or header if file_reg: sh.copyfile(file_reg, file_cmap_reg) else: get_scanner_transform(file_t1, file_epi, path_t1, True) os.rename( os.path.join(path_t1, name_t1 + "_2_" + name_epi + "_scanner.nii.gz"), file_cmap_reg) # scanner transform peeled t1 to epi ana_reg = apply_coordinate_mappings( file_t1, # input file_cmap_reg, # cmap interpolation="linear", # nearest or linear padding="zero", # closest, zero or max save_data=False, overwrite=False, output_dir=None, file_name=None) nb.save(ana_reg["result"], file_ana_reg) # rigid registration embedded_antsreg( file_ana_reg, # source image file_epi, # target image run_rigid=True, # whether or not to run a rigid registration first rigid_iterations=1000, # number of iterations in the rigid step run_affine=False, # whether or not to run an affine registration first affine_iterations=0, # number of iterations in the affine step run_syn=False, # whether or not to run a SyN registration coarse_iterations=0, # number of iterations at the coarse level medium_iterations=0, # number of iterations at the medium level fine_iterations=0, # number of iterations at the fine level cost_function= "CrossCorrelation", # CrossCorrelation or MutualInformation interpolation= "Linear", # interpolation for registration result (NeareastNeighbor or Linear) convergence= 1e-6, # threshold for convergence (can make algorithm very slow) ignore_affine= True, # ignore the affine matrix information extracted from the image header ignore_header= True, # ignore the orientation information and affine matrix information extracted from the image header save_data=True, # save output data to file overwrite=True, # overwrite existing results output_dir=path_t1, # output directory file_name="syn", # output basename ) # remove unnecessary files os.remove(os.path.join(path_t1, "syn_ants-def0.nii.gz")) os.remove(os.path.join(path_t1, "syn_ants-invmap.nii.gz")) # rename cmap os.rename(os.path.join(path_t1, "syn_ants-map.nii.gz"), file_cmap_ants) # remove outliers and expand cmap = nb.load(file_cmap_ants) arr_cmap = cmap.get_fdata() pts_cmap0 = arr_cmap[0, 0, 0, 0] pts_cmap1 = arr_cmap[0, 0, 0, 1] pts_cmap2 = arr_cmap[0, 0, 0, 2] arr_cmap[arr_cmap == 0] = 0 arr_cmap[arr_cmap == pts_cmap0] = 0 arr_cmap[arr_cmap == pts_cmap1] = 0 arr_cmap[arr_cmap == pts_cmap2] = 0 output = nb.Nifti1Image(arr_cmap, cmap.affine, cmap.header) nb.save(output, file_cmap_ants) expand_coordinate_mapping(cmap_in=file_cmap_ants, path_output=path_t1, name_output="cmap_ants", write_output=True) # apply ants cmap to header transformation cmap_def = apply_coordinate_mappings( file_cmap_reg, # input file_cmap_ants, interpolation="linear", # nearest or linear padding="zero", # closest, zero or max save_data=False, overwrite=False, output_dir=None, file_name=None) nb.save(cmap_def["result"], file_cmap_def) # remove outliers and expand arr_cmap = cmap_def["result"].get_fdata() pts_cmap0 = arr_cmap[0, 0, 0, 0] pts_cmap1 = arr_cmap[0, 0, 0, 1] pts_cmap2 = arr_cmap[0, 0, 0, 2] arr_cmap[arr_cmap == 0] = 0 arr_cmap[arr_cmap == pts_cmap0] = 0 arr_cmap[arr_cmap == pts_cmap1] = 0 arr_cmap[arr_cmap == pts_cmap2] = 0 output = nb.Nifti1Image(arr_cmap, cmap_def["result"].affine, cmap_def["result"].header) nb.save(output, file_cmap_def) expand_coordinate_mapping(cmap_in=file_cmap_def, path_output=path_t1, name_output="cmap_def", write_output=True) # apply final cmap to t1 and mask ana_def = apply_coordinate_mappings( file_t1, # input file_cmap_def, interpolation="linear", # nearest or linear padding="zero", # closest, zero or max save_data=False, overwrite=False, output_dir=None, file_name=None) nb.save(ana_def["result"], file_ana_def) mask_def = apply_coordinate_mappings( file_mask, # input file_cmap_def, interpolation="nearest", # nearest or linear padding="zero", # closest, zero or max save_data=False, overwrite=False, output_dir=None, file_name=None) nb.save(mask_def["result"], file_mask_def) # finalise mask arr_mask = mask_def["result"].get_fdata() arr_mask = binary_fill_holes(arr_mask).astype(int) # fill holes in mask arr_mask = binary_dilation(arr_mask, iterations=niter).astype( np.float) # dilate mask arr_mask = gaussian_filter(arr_mask, sigma=sigma) # apply gaussian filter # write final epi mask out_img = nb.Nifti1Image(arr_mask, mask_def["result"].affine, mask_def["result"].header) nb.save(out_img, file_mask_def2) # multiply epi and binary mask epi_img = nb.load(file_epi) arr_epi = epi_img.get_fdata() arr_epi *= arr_mask # multiply epi and mask # write masked epi out_img = nb.Nifti1Image(arr_epi, epi_img.affine, epi_img.header) nb.save(out_img, os.path.join(path_epi, "p" + name_epi + ".nii"))
# input file_in = [ "/data/pt_01880/Experiment3_Stripes/p3/colour/GE_EPI1/Run_1/data.nii", ] file_bash = "/data/hu_haenelt/projects/gradunwarp/apply_grad.sh" file_coeff = "/data/hu_haenelt/projects/gradunwarp/7t_coeff.grad" python3_env = "daniel" python2_env = "daniel2" cleanup = True """ do not edit below """ for i in range(len(file_in)): # get fileparts of input path_file, name_file, ext_file = get_filename(file_in[i]) # filenames file_vol0 = os.path.join(path_file, name_file + "_vol0" + ext_file) file_out = os.path.join(path_file, name_file + "_gnlcorr" + ext_file) # extract first volume fslroi = ExtractROI() fslroi.inputs.in_file = file_in[i] fslroi.inputs.roi_file = file_vol0 fslroi.inputs.output_type = "NIFTI" fslroi.inputs.t_min = 0 fslroi.inputs.t_size = 1 fslroi.run() # exexute gnl correction
def regrid_time_series(input, path_output, TR_old, TR_new, t_start=0): """ This function interpolates the time series onto a new time grid using cubic interpolation. Only for writing the new TR in the header of the output time series, AFNI has to be included in the search path. Inputs: *input: time series filename. *path_output: path where output is written. *TR_old: TR of time series in s. *TR_new: TR of regridded time series in s. *t_start: shift time series in s (t_start >= 0 and <= TR_old). created by Daniel Haenelt Date created: 19-02-2020 Last modified: 12-03-2020 """ import os import numpy as np import nibabel as nb from scipy.interpolate import InterpolatedUnivariateSpline as Interp from lib.io.get_filename import get_filename # get filename _, name_input, ext_input = get_filename(input) # print to console print("time series regridding for: "+name_input) # load data data = nb.load(input) nx = data.header["dim"][1] ny = data.header["dim"][2] nz = data.header["dim"][3] nt = data.header["dim"][4] # get time grid TT = TR_old * nt # total acquisition time TR_append = np.floor(t_start/TR_old + 1).astype(int) * TR_old # number of appended TRs in input array # input grid t_old = np.arange(-TR_append, TT + TR_append, TR_old) + t_start # output grid t_new = np.arange(0, TT + TR_append, TR_new) t_new_append = np.flip(np.arange(0,-TR_append,-TR_new)[1:]) if not len(t_new_append): t_new_append = -TR_new t_new = np.append(t_new_append, t_new) # load array with appended volumes n_append = int(TR_append/TR_old) data_array = np.zeros((nx, ny, nz, nt+2*n_append)) data_array[:,:,:,n_append:-n_append] = data.get_fdata() for i in range(n_append): data_array[:,:,:,i] = data.get_fdata()[:,:,:,0] data_array[:,:,:,-(i+1)] = data.get_fdata()[:,:,:,-1] # temporal interpolation data_array_regrid = np.zeros((nx,ny,nz,len(t_new))) for x in range(nx): for y in range(ny): for z in range(nz): cubic_interper = Interp(t_old, data_array[x,y,z,:], k=3) data_array_regrid[x,y,z,:] = cubic_interper(t_new) # delete appended volumes vols_keep1 = t_new >= 0 vols_keep2 = t_new < TT vols_keep = vols_keep1 * vols_keep2 data_array_regrid = data_array_regrid[:,:,:,vols_keep] # clean corrected array data_array_regrid[np.isnan(data_array_regrid)] = 0 data_array_regrid[data_array_regrid < 0] = 0 # update data header data.header["dim"][4] = np.shape(data_array_regrid)[3] data.header["datatype"] = 16 # write output output = nb.Nifti1Image(data_array_regrid, data.affine, data.header) nb.save(output, os.path.join(path_output,name_input+"_upsampled"+ext_input)) # change TR in header os.system("3drefit " + \ "-TR " + str(TR_new) + " " + \ os.path.join(path_output,name_input+"_upsampled"+ext_input))
def deform_surface(input_surf, input_orig, input_deform, input_target, hemi, path_output, input_mask=None, interp_method="nearest", smooth_iter=0, flip_faces=False, cleanup=True): """ This function deforms a surface mesh in freesurfer convention using a coordinate map containing voxel coordinates. The computation takes quite a while because in the case of removed vertices, i.e. if a mask is given as input, the remaining faces are reindexed. Inputs: *input_surf: surface mesh to be transformed. *input_orig: freesurfer orig.mgz. *input_deform: deformation (coordinate mapping). *input_target: target volume. *hemi: hemisphere. *path_output: path where to save output. *input_mask: mask volume. *interp_method: interpolation method (nearest or trilinear). *smooth_iter: number of smoothing iterations applied to final image (if set > 0). *flip_faces: reverse normal direction of mesh. *cleanup: remove intermediate files. created by Daniel Haenelt Date created: 06-02-2019 Last modified: 20-06-2020 """ import os import numpy as np import nibabel as nb import shutil as sh from nibabel.freesurfer.io import write_geometry, read_geometry from nibabel.affines import apply_affine from nipype.interfaces.freesurfer import SampleToSurface from nipype.interfaces.freesurfer import SmoothTessellation from lib.io.get_filename import get_filename from lib.io.mgh2nii import mgh2nii from lib.surface.vox2ras import vox2ras # set freesurfer path environment os.environ["SUBJECTS_DIR"] = path_output # freesurfer subject tmp = np.random.randint(0, 10, 5) tmp_string = ''.join(str(i) for i in tmp) sub = "tmp_" + tmp_string # make output folder if not os.path.exists(path_output): os.makedirs(path_output) # mimic freesurfer folder structure (with some additional folder for intermediate files) path_sub = os.path.join(path_output, sub) path_mri = os.path.join(path_sub, "mri") path_surf = os.path.join(path_sub, "surf") os.makedirs(path_sub) os.makedirs(path_mri) os.makedirs(path_surf) # get file extension of orig _, name_orig, ext_orig = get_filename(input_orig) # name of surface file name_surf = os.path.basename(input_surf) # copy orig, cmap and input surface to mimicked freesurfer folders sh.copyfile(input_surf, os.path.join(path_surf, hemi + ".source")) if ext_orig != ".mgz": mgh2nii(input_orig, path_mri, "mgz") os.rename(os.path.join(path_mri, name_orig + ".mgz"), os.path.join(path_mri, "orig.mgz")) else: sh.copyfile(input_orig, os.path.join(path_mri, "orig.mgz")) # read surface geometry vtx, fac = read_geometry(input_surf) # get affine vox2ras-tkr transformation to target volume vox2ras_tkr, _ = vox2ras(input_target) # divide coordinate mapping into its x, y and z components cmap_img = nb.load(input_deform) cmap_img.header["dim"][0] = 3 cmap_img.header["dim"][4] = 1 # apply vox2ras transformation to coordinate mappings cmap_array = cmap_img.get_fdata() cmap_array = apply_affine(vox2ras_tkr, cmap_array) components = ["x", "y", "z"] vtx_new = np.zeros([len(vtx), 3]) for i in range(len(components)): temp_array = cmap_array[:, :, :, i] temp_img = nb.Nifti1Image(temp_array, cmap_img.affine, cmap_img.header) nb.save(temp_img, os.path.join(path_mri, components[i] + "_deform.nii")) # mri_vol2surf sampler = SampleToSurface() sampler.inputs.subject_id = sub sampler.inputs.reg_header = True sampler.inputs.hemi = hemi sampler.inputs.source_file = os.path.join( path_mri, components[i] + "_deform.nii") sampler.inputs.surface = "source" sampler.inputs.sampling_method = "point" sampler.inputs.sampling_range = 0 sampler.inputs.sampling_units = "mm" sampler.inputs.interp_method = interp_method sampler.inputs.out_type = "mgh" sampler.inputs.out_file = os.path.join( path_surf, hemi + "." + components[i] + "_sampled.mgh") sampler.run() data_img = nb.load( os.path.join(path_surf, hemi + "." + components[i] + "_sampled.mgh")) vtx_new[:, i] = np.squeeze(data_img.get_fdata()) if input_mask: # mri_vol2surf (background) sampler = SampleToSurface() sampler.inputs.subject_id = sub sampler.inputs.reg_header = True sampler.inputs.hemi = hemi sampler.inputs.source_file = input_mask sampler.inputs.surface = "source" sampler.inputs.sampling_method = "point" sampler.inputs.sampling_range = 0 sampler.inputs.sampling_units = "mm" sampler.inputs.interp_method = "nearest" sampler.inputs.out_type = "mgh" sampler.inputs.out_file = os.path.join(path_surf, hemi + ".background.mgh") sampler.run() # get new indices background_list = nb.load( os.path.join(path_surf, hemi + ".background.mgh")).get_fdata() background_list = np.squeeze(background_list).astype(int) # only keep vertex indices within the slab ind_keep = np.arange(0, len(vtx[:, 0])) ind_keep[background_list == 0] = -1 ind_keep = ind_keep[ind_keep != -1] # get new vertices vtx_new = vtx_new[ind_keep, :] # get new faces fac_keep = np.zeros(len(fac[:, 0])) fac_keep += np.in1d(fac[:, 0], ind_keep) fac_keep += np.in1d(fac[:, 1], ind_keep) fac_keep += np.in1d(fac[:, 2], ind_keep) fac_temp = fac[fac_keep == 3, :] fac_new = fac[fac_keep == 3, :] # sort new faces c_step = 0 n_step = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100] for i in range(len(ind_keep)): temp = np.where(ind_keep[i] == fac_temp) fac_new[temp] = i # print status counter = np.floor(i / len(ind_keep) * 100).astype(int) if counter == n_step[c_step]: print("sort faces: " + str(counter) + " %") c_step += 1 # remove singularities (vertices without faces) fac_counter = 0 fac_old = fac_new.copy() n_singularity = np.zeros(len(vtx_new)) c_step = 0 for i in range(len(vtx_new)): row, col = np.where(fac_old == i) n_singularity[i] = len(row) if not n_singularity[i]: fac_temp = fac_new.copy() fac_temp[fac_temp >= fac_counter] = -1 fac_temp[fac_temp != -1] = 0 fac_new += fac_temp fac_counter -= 1 # update face counter fac_counter += 1 # print status counter = np.floor(i / len(vtx_new) * 100).astype(int) if counter == n_step[c_step]: print("clean vertices: " + str(counter) + " %") c_step += 1 # vertices and indices without singularities vtx_new = vtx_new[n_singularity != 0] ind_keep = ind_keep[n_singularity != 0] # save index mapping between original and transformed surface np.savetxt(os.path.join(path_output, name_surf + "_ind.txt"), ind_keep, fmt='%d') else: fac_new = fac # flip faces if flip_faces: fac_new = np.flip(fac_new, axis=1) # write new surface write_geometry(os.path.join(path_output, name_surf + "_def"), vtx_new, fac_new) # smooth surface if smooth_iter: smooth = SmoothTessellation() smooth.inputs.in_file = os.path.join(path_output, name_surf + "_def") smooth.inputs.out_file = os.path.join(path_output, name_surf + "_def_smooth") smooth.inputs.smoothing_iterations = smooth_iter smooth.inputs.disable_estimates = True smooth.run() # delete intermediate files if cleanup: sh.rmtree(path_sub, ignore_errors=True)
def deweight_mask(file_in, mask_in, mask_max=0.25, sigma_gaussian=10.0, write_output=None, path_output=None): """ This function computes a binary mask by pooling all voxels above a given threshold and replaces all image voxels by its gaussian filtered image voxels within this binary mask. Inputs: *file_in: filename of input image. *mask_in: filename of input mask. *mask_max: cutoff threshold. *sigma_gaussian: sigma for gaussian filter. *write_output: write output image (boolean). *path_output: path where output is written. Outputs: *data_array: image matrix with filtered voxels. created by Daniel Haenelt Date created: 20-04-2020 Last modified: 20-04-2020 """ import os import nibabel as nb from scipy.ndimage import gaussian_filter from lib.io.get_filename import get_filename # get basename of phase file _, name_file, ext_file = get_filename(file_in) # load unwrapped phase data data = nb.load(file_in) data_array = data.get_fdata() # load standard deviation data mask = nb.load(mask_in) mask_array = mask.get_fdata() # threshold standard deviation mask_array[mask_array < mask_max] = 0 mask_array[mask_array != 0] = 1 # apply gaussian filter to phase data data_array_gaussian = gaussian_filter(data_array, sigma_gaussian, order=0, output=None, mode='reflect', cval=0.0, truncate=4.0) # replace data data_array[mask_array == 1] = data_array_gaussian[mask_array == 1] # write output if write_output: output = nb.Nifti1Image(data_array, data.affine, data.header) nb.save(output, os.path.join(path_output, name_file + "_filtered" + ext_file)) return data_array
def gnl_correction(input, file_bash, file_coeff, python3_env, python2_env, path_output, cleanup=True): """ The purpose of the following function is to correct for gradient nonlinearities. A corrected file is written using spline interpolation. The function needs FSL to be included in the search path. Inputs: *input: filename of input image. *file_bash: filename of bash script which calls the gradient unwarping toolbox. *file_coeff: filename of siemens coefficient file. *python3_env: name of python3 virtual environment. *python2_env: name of python2 virtual environment. *path_output: path where output is written. *cleanup: delete intermediate files. created by Daniel Haenelt Date created: 10-01-2020 Last modified: 10-01-2020 """ import os import shutil as sh import numpy as np import nibabel as nb from nipype.interfaces.fsl import ConvertWarp, Merge from nipype.interfaces.fsl.maths import MeanImage from nipype.interfaces.fsl.preprocess import ApplyWarp from lib.io.get_filename import get_filename from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping # get fileparts path, name, ext = get_filename(input) # make subfolders path_grad = os.path.join(path_output, "grad") if not os.path.exists(path_grad): os.makedirs(path_grad) # parse arguments file_output = os.path.join(path_output, name + "_gnlcorr" + ext) file_warp = os.path.join(path_grad, "warp.nii.gz") file_jacobian = os.path.join(path_grad, "warp_jacobian.nii.gz") # run gradient unwarp os.system("bash " + file_bash + \ " " + python3_env + \ " " + python2_env + \ " " + path_grad + \ " " + input + \ " trilinear.nii.gz" + \ " " + file_coeff) # now create an appropriate warpfield output (relative convention) convertwarp = ConvertWarp() convertwarp.inputs.reference = os.path.join(path_grad, "trilinear.nii.gz") convertwarp.inputs.warp1 = os.path.join(path_grad, "fullWarp_abs.nii.gz") convertwarp.inputs.abswarp = True convertwarp.inputs.out_relwarp = True convertwarp.inputs.out_file = file_warp convertwarp.inputs.args = "--jacobian=" + file_jacobian convertwarp.run() # convertwarp's jacobian output has 8 frames, each combination of one-sided differences, so average them calcmean = MeanImage() calcmean.inputs.in_file = file_jacobian calcmean.inputs.dimension = "T" calcmean.inputs.out_file = file_jacobian calcmean.run() # apply warp to first volume applywarp = ApplyWarp() applywarp.inputs.in_file = input applywarp.inputs.ref_file = input applywarp.inputs.relwarp = True applywarp.inputs.field_file = file_warp applywarp.inputs.output_type = "NIFTI" applywarp.inputs.out_file = file_output applywarp.inputs.interp = "spline" applywarp.run() # normalise warped output image to initial intensity range data_img = nb.load(input) data_array = data_img.get_fdata() max_data = np.max(data_array) min_data = np.min(data_array) data_img = nb.load(file_output) data_array = data_img.get_fdata() data_array[data_array < min_data] = 0 data_array[data_array > max_data] = max_data output = nb.Nifti1Image(data_array, data_img.affine, data_img.header) nb.save(output, file_output) # calculate gradient deviations os.system("calc_grad_perc_dev" + \ " --fullwarp=" + file_warp + \ " -o " + os.path.join(path_grad,"grad_dev")) # merge directions merger = Merge() merger.inputs.in_files = [ os.path.join(path_grad, "grad_dev_x.nii.gz"), os.path.join(path_grad, "grad_dev_y.nii.gz"), os.path.join(path_grad, "grad_dev_z.nii.gz") ] merger.inputs.dimension = 't' merger.inputs.merged_file = os.path.join(path_grad, "grad_dev.nii.gz") merger.run() # convert from % deviation to absolute data_img = nb.load(os.path.join(path_grad, "grad_dev.nii.gz")) data_array = data_img.get_fdata() data_array = data_array / 100 output = nb.Nifti1Image(data_array, data_img.affine, data_img.header) nb.save(output, os.path.join(path_grad, "grad_dev.nii.gz")) # warp coordinate mapping generate_coordinate_mapping(input, 0, path_grad, suffix="gnl", time=False, write_output=True) applywarp = ApplyWarp() applywarp.inputs.in_file = os.path.join(path_grad, "cmap_gnl.nii") applywarp.inputs.ref_file = input applywarp.inputs.relwarp = True applywarp.inputs.field_file = file_warp applywarp.inputs.out_file = os.path.join(path_grad, "cmap_gnl.nii") applywarp.inputs.interp = "trilinear" applywarp.inputs.output_type = "NIFTI" applywarp.run() # clean intermediate files if cleanup: sh.rmtree(path_grad, ignore_errors=True)
def get_alff(input, TR, path_output, hp_freq=0.01, lp_freq=0.08, cleanup=True): """ This function calculates ALFF and fALFF from a preprocessed (motion correction, nuisance regression, etc.) resting-state time series. ALFF is computed by bandpass filtering the time time series and computing the voxel-wise standard deviation of the filtered time series. fALFF is computed by dividing ALFF by the voxel-wise standard deviation of the unfiltered time series. Additionally, ALFF and fALFF are expressed in z-score. This function follows the script found in https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/alff/alff.py Inputs: *input: input time series. *TR: repetition time in s. *hp_freq: highpass cutoff frequency in Hz. *lp_freq: lowpass cutoff frequency in Hz. *cleanup: delete intermediate files. created by Daniel Haenelt Date created: 27-02-2019 Last modified: 15-04-2020 """ import os import nibabel as nb from scipy.stats import zscore from nipype.interfaces.afni.preprocess import Bandpass from nipype.interfaces.afni.utils import TStat, Calc from lib.io.get_filename import get_filename # make output folder if not os.path.exists(path_output): os.makedirs(path_output) # get path and filename _, file, _ = get_filename(input) # filtering bandpass = Bandpass() bandpass.inputs.in_file = input bandpass.inputs.highpass = hp_freq bandpass.inputs.lowpass = lp_freq bandpass.inputs.tr = TR bandpass.inputs.outputtype = 'NIFTI' bandpass.inputs.out_file = os.path.join(path_output, file + "_filtered.nii") bandpass.run() # standard deviation over frequency stddev_filtered = TStat() stddev_filtered.inputs.in_file = os.path.join(path_output, file + "_filtered.nii") stddev_filtered.inputs.args = "-stdev" stddev_filtered.inputs.outputtype = 'NIFTI' stddev_filtered.inputs.out_file = os.path.join(path_output, 'alff.nii') stddev_filtered.run() # standard deviation of the unfiltered nuisance corrected image stddev_unfiltered = TStat() stddev_unfiltered.inputs.in_file = input stddev_unfiltered.inputs.args = "-stdev" stddev_unfiltered.inputs.outputtype = 'NIFTI' stddev_unfiltered.inputs.out_file = os.path.join(path_output, 'temp.nii') stddev_unfiltered.run() # falff calculations falff = Calc() falff.inputs.in_file_a = os.path.join(path_output, 'alff.nii') falff.inputs.in_file_b = os.path.join(path_output, 'temp.nii') falff.inputs.args = '-float' falff.inputs.expr = '(1.0*a)/(1.0*b)' falff.inputs.outputtype = 'NIFTI' falff.inputs.out_file = os.path.join(path_output, 'falff.nii') falff.run() # alff in z-score alff_img = nb.load(os.path.join(path_output, "alff.nii")) alff_array = alff_img.get_fdata() alff_array = zscore(alff_array, axis=None) output = nb.Nifti1Image(alff_array, alff_img.affine, alff_img.header) nb.save(output, os.path.join(path_output, "alff_z.nii")) # falff in z-score falff_img = nb.load(os.path.join(path_output, "falff.nii")) falff_array = falff_img.get_fdata() falff_array = zscore(falff_array, axis=None) output = nb.Nifti1Image(falff_array, falff_img.affine, falff_img.header) nb.save(output, os.path.join(path_output, "falff_z.nii")) # cleanup if cleanup: os.remove(os.path.join(path_output, "temp.nii")) os.remove(os.path.join(path_output, file + "_filtered.nii"))
def apply_fieldmap(file_fmap_magn, file_fmap_phase, file_epi, file_epi_moco, file_surf, delta_te=1.02, smooth=2.5, udir="y-", bw=16.304, nerode=1, cleanup=True): """ This function computes a deformation field from a fieldmap acquisition and applies the inverse transformation to the undistorted surface. The following steps are performed: 1. get median time series 2. skullstrip epi 3. register fieldmap to epi 4. mask fieldmap 5. prepare field 6. get deforamtion field 7. apply inverse deformation to surfaces. 8. remove intermediate files (optional). To run the script, FSL and Freesurfer have to be in the PATH environment. The basenames of the surface files should be in freesurfer convention with the hemisphere indicated as prefix. Inputs: *fiele_fmap_magn: fieldmap magnitude image. *file_fmap_phase: fieldmap phase difference image. *file_epi: filename of raw time series. *file_epi_moco: filname of motion corrected time series. *file_surf: list of surface filnames. *delta_te: echo time difference of fieldmap in ms. *smooth: smoothing kernel for fieldmap unmasking. *udir: direction for fieldmap unmasking. *bw: BandwidthPerPixelPhaseEncode in Hz/px. *nerode: number of skullstrip mask eroding iterations. *cleanup: removes temporary files at the end of the script (boolean). created by Daniel Haenelt Date created: 31-01-2020 Last modified: 20-06-2020 """ import os import numpy as np import nibabel as nb from nipype.interfaces import fsl from lib.skullstrip.skullstrip_epi import skullstrip_epi from lib.io.get_filename import get_filename from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping from lib.surface.deform_surface import deform_surface # prepare path and filename path_fmap0, name_fmap0, ext_fmap0 = get_filename(file_fmap_magn) path_fmap1, name_fmap1, ext_fmap1 = get_filename(file_fmap_phase) path_data, name_data, ext_data = get_filename(file_epi) path_udata, name_udata, ext_udata = get_filename(file_epi_moco) # filename with file extension name_fmap0 += ext_fmap0 name_fmap1 += ext_fmap1 name_data += ext_data name_udata += ext_udata # change directory to fieldmap directory os.chdir(path_fmap0) # get matrix size in phase encoding direction from uncorrected epi data = nb.load(file_epi) phase_encode = data.header.get_dim_info()[1] ImageMatrixPhaseEncode = data.header["dim"][phase_encode+1] # calculate median epi udata = nb.load(file_epi_moco) arr_udata = udata.get_fdata() arr_udata_median = np.median(arr_udata, axis=3) udata_median = nb.Nifti1Image(arr_udata_median, udata.affine, udata.header) udata_median.header["dim"][0] = 3 udata_median.header["dim"][4] = 1 nb.save(udata_median, os.path.join(path_udata, "median_"+name_udata)) # calculate skullstrip mask of that image skullstrip_epi(os.path.join(path_udata, "median_"+name_udata), roi_size=10, scale=0.75, nerode=1, ndilate=2, savemask=True, cleanup=True) # erode skullstrip mask for j in range(nerode): erode = fsl.ErodeImage() erode.inputs.in_file = os.path.join(path_udata, "mask_median_"+name_udata) erode.inputs.output_type = "NIFTI" erode.inputs.out_file = os.path.join(path_udata, "mask_median_"+name_udata) erode.run() # register fmap1 to median epi (fsl.FLIRT) flirt = fsl.FLIRT() flirt.inputs.cost_func = "mutualinfo" flirt.inputs.dof = 6 flirt.inputs.interp = "trilinear" # trlinear, nearestneighbour, sinc or spline flirt.inputs.in_file = file_fmap_magn flirt.inputs.reference = os.path.join(path_udata, "median_"+name_udata) flirt.inputs.output_type = "NIFTI" flirt.inputs.out_file = os.path.join(path_fmap0, "r"+name_fmap0) flirt.inputs.out_matrix_file = os.path.join(path_fmap0, "fmap2epi.txt") flirt.run() # apply registration to fmap2 applyxfm = fsl.preprocess.ApplyXFM() applyxfm.inputs.in_file = file_fmap_phase applyxfm.inputs.reference = os.path.join(path_udata, "median_"+name_udata) applyxfm.inputs.in_matrix_file = os.path.join(path_fmap0, "fmap2epi.txt") applyxfm.inputs.interp = "trilinear" applyxfm.inputs.output_type = "NIFTI" applyxfm.inputs.out_file = os.path.join(path_fmap1, "r"+name_fmap1) applyxfm.inputs.apply_xfm = True applyxfm.run() # apply skullstrip mask to fmap1 and fmap2 and save with same header information fmap1_img = nb.load(os.path.join(path_fmap0, "r"+name_fmap0)) arr_fmap1 = fmap1_img.get_fdata() fmap2_img = nb.load(os.path.join(path_fmap1, "r"+name_fmap1)) arr_fmap2 = fmap2_img.get_fdata() mask_img = nb.load(os.path.join(path_udata, "mask_median_"+name_udata)) arr_mask = mask_img.get_fdata() arr_fmap1 = arr_fmap1 * arr_mask arr_fmap2 = (arr_fmap2 * arr_mask) arr_fmap2 = arr_fmap2 + np.abs(np.min(arr_fmap2)) arr_fmap2 = arr_fmap2 / np.max(arr_fmap2) * 4095 # rescale phase image to be within 0-4095 fmap1_img = nb.Nifti1Image(arr_fmap1, fmap1_img.affine, fmap1_img.header) nb.save(fmap1_img, os.path.join(path_fmap0, "pr"+name_fmap0)) fmap2_img = nb.Nifti1Image(arr_fmap2, fmap1_img.affine, fmap1_img.header) nb.save(fmap2_img, os.path.join(path_fmap1, "pr"+name_fmap1)) # prepare fieldmap (saves fieldmap in rad/s) prepare = fsl.PrepareFieldmap() prepare.inputs.in_magnitude = os.path.join(path_fmap0, "pr"+name_fmap0) prepare.inputs.in_phase = os.path.join(path_fmap1, "pr"+name_fmap1) prepare.inputs.out_fieldmap = os.path.join(path_fmap0, "fieldmap.nii") prepare.inputs.delta_TE = delta_te prepare.inputs.scanner = "SIEMENS" prepare.inputs.output_type = "NIFTI" prepare.run() # effective echo spacing in s dwell_time = 1/(bw * ImageMatrixPhaseEncode) # unmask fieldmap (fsl.FUGUE) fugue = fsl.preprocess.FUGUE() fugue.inputs.in_file = os.path.join(path_udata, name_udata) fugue.inputs.dwell_time = dwell_time fugue.inputs.fmap_in_file = os.path.join(path_fmap0, "fieldmap.nii") fugue.inputs.smooth3d = smooth fugue.inputs.unwarp_direction = udir fugue.inputs.save_shift = True fugue.inputs.shift_out_file = os.path.join(path_fmap0, "vdm.nii") fugue.inputs.output_type = "NIFTI" fugue.run() # warp coordinate mapping generate_coordinate_mapping(file_epi, 0, path_fmap0, suffix="fmap", time=False, write_output=True) # apply inverse fieldmap to coordinate mapping fugue = fsl.preprocess.FUGUE() fugue.inputs.in_file = os.path.join(path_fmap0, "cmap_fmap.nii") fugue.inputs.shift_in_file = os.path.join(path_fmap0, "vdm.nii") fugue.inputs.forward_warping = False fugue.inputs.unwarp_direction = udir fugue.inputs.output_type = "NIFTI" fugue.run() # apply cmap to surface for i in range(len(file_surf)): path_surf, hemi, name_surf = get_filename(file_surf[i]) deform_surface(input_surf=file_surf[i], input_orig=os.path.join(path_udata, "median_"+name_udata), input_deform=os.path.join(path_fmap0, "cmap_fmap_unwarped.nii"), input_target=os.path.join(path_udata, "median_"+name_udata), hemi=hemi, path_output=path_surf, input_mask=None, interp_method="trilinear", smooth_iter=0, flip_faces=False, cleanup=True) # delete created files if cleanup: os.remove(os.path.join(path_fmap0, "cmap_fmap.nii")) os.remove(os.path.join(path_fmap0, "cmap_fmap_unwarped.nii")) os.remove(os.path.join(path_fmap0, "fieldmap.nii")) os.remove(os.path.join(path_fmap0, "fmap2epi.txt")) os.remove(os.path.join(path_fmap1, os.path.splitext(name_fmap1)[0]+"_flirt.mat")) os.remove(os.path.join(path_fmap0, "r"+name_fmap0)) os.remove(os.path.join(path_fmap0, "pr"+name_fmap0)) os.remove(os.path.join(path_fmap1, "r"+name_fmap1)) os.remove(os.path.join(path_fmap1, "pr"+name_fmap1)) os.remove(os.path.join(path_fmap0, os.path.splitext(name_udata)[0])+"_unwarped.nii") os.remove(os.path.join(path_fmap0, "vdm.nii")) os.remove(os.path.join(path_udata, "mask_median_"+name_udata)) os.remove(os.path.join(path_udata, "median_"+name_udata)) os.remove(os.path.join(path_udata, "pmedian_"+name_udata))
# apply deformation if deformation_in1 is not None: apply_coordinate_mappings(file_in[i], deformation_in1, deformation_in2, interpolation=interpolation, padding='closest', save_data=True, overwrite=True, output_dir=path_def, file_name=None, ) # get filename of deformed file if deformation_in1 is not None: _, name_file, _ = get_filename(file_in[i]) filename_def = os.path.join(path_def, name_file+"_def-img.nii") else: filename_def = file_in[i] # map to ana for j in range(len(surf_in)): # hemisphere hemi = os.path.splitext(os.path.basename(surf_in[j]))[0] # sample on surface map2surface(surf_in[j], filename_def, hemi, path_surf,
from scipy.stats import pearsonr, shapiro from lib.io.get_filename import get_filename input = [ "/data/pt_01880/Experiment1_ODC/p4/retinotopy3/pol_anticlock/uadata.nii", "/data/pt_01880/Experiment1_ODC/p4/retinotopy3/pol_clock/uadata.nii", "/data/pt_01880/Experiment1_ODC/p4/retinotopy3/ecc_expanding/uadata.nii", "/data/pt_01880/Experiment1_ODC/p4/retinotopy3/ecc_contracting/uadata.nii", ] input_ref = "/data/pt_01880/Experiment1_ODC/p4/retinotopy3/diagnosis/mean_uadata.nii" input_mask_ref = "" r_threshold = 0.95 """ do not edit below """ # get filename from first input entry _, name_file, _ = get_filename(input[0]) # create output folder if len(input) < 2: path_output = os.path.join(os.path.dirname(input[0]), "correlation") else: path_output = os.path.join(os.path.dirname(os.path.dirname(input[0])), "correlation") if not os.path.exists(path_output): os.makedirs(path_output) # load reference volume data_0 = nb.load(input_ref).get_fdata() if len(input_mask_ref) > 0:
def mesh_sampling_layer(surf_in, file_in, boundaries_in, path_output, layer, r=[0.4, 0.4, 0.4], interpolation="Cu", average_layer=False, write_profile=True, write_upsampled=True): """ This function samples data from an image volume to a surface mesh from specific layers defined by a levelset image. If average_layer is true, the parameter layer should contain only two integers which denote the start and ending layer. Inputs: *surf_in: filename of input surface mesh. *file_in: filename of input volume from which data is sampled. *boundaries_in: filename of 4D levelset image. *path_output: path where output is written. *layer: which layers to sample (array of integers). *r: destination voxel size after upsampling (performed if not None). *interpolation: interpolation method for upsampling of file from whic data is sampled. *average_layer: average across cortex. *write_profile: write sampled profile. *write_upsampled: write upsampled file. created by Daniel Haenelt Date created: 18-12-2019 Last modified: 24-07-2020 """ import os import sys import shutil as sh import numpy as np import nibabel as nb from os.path import join, exists, basename, splitext from nighres.laminar import profile_sampling from lib.io.get_filename import get_filename from lib.utils.upsample_volume import upsample_volume from lib.mapping import map2surface # make output folder if not exists(path_output): os.makedirs(path_output) # filenames _, name_file, ext_file = get_filename(file_in) _, hemi, name_surf = get_filename(surf_in) name_surf = name_surf[1:] name_profile = splitext(basename(file_in))[0] + "_profile" # check hemi if not hemi == "lh" and not hemi == "rh": sys.exit("Could not identify hemi from filename!") # upsample volume if not r == None: name_file = name_file + "_upsampled" upsample_volume(file_in, join(path_output, name_file + ext_file), r, interpolation) else: if file_in != join(path_output, name_file + ext_file): sh.copyfile(file_in, join(path_output, name_file + ext_file)) # get profile sampling tmp = np.random.randint(0, 10, 5) tmp_string = ''.join(str(i) for i in tmp) profile = profile_sampling(boundaries_in, join(path_output, name_file + ext_file), save_data=write_profile, overwrite=write_profile, output_dir=path_output, file_name="profile_" + tmp_string) # rename profile sampling output if write_profile: os.rename( join(path_output, "profile_" + tmp_string + "_lps-data.nii.gz"), join(path_output, name_profile + ".nii.gz")) # load profile if write_profile: data = nb.load(join(path_output, name_profile + ".nii.gz")) else: data = profile["result"] data.header["dim"][0] = 3 # do mapping tmp2 = np.random.randint(0, 10, 5) tmp2_string = ''.join(str(i) for i in tmp2) if not average_layer: for i in range(len(layer)): data_array = data.get_fdata()[:, :, :, layer[i]] out = nb.Nifti1Image(data_array, data.affine, data.header) nb.save(out, join(path_output, "temp_" + tmp2_string + ".nii")) # do the mapping map2surface(surf_in, join(path_output, "temp_" + tmp2_string + ".nii"), hemi, path_output, input_white=None, input_ind=None, cleanup=True) # rename mapping file os.rename( join( path_output, hemi + ".temp_" + tmp2_string + "_" + name_surf + "_def.mgh"), join( path_output, hemi + "." + name_file + "_layer" + str(layer[i]) + ".mgh")) else: if len(layer) != 2: sys.exit("For averaging, layer should only contain two elements!") data_array = data.get_fdata()[:, :, :, layer[0]:layer[1]] data_array = np.mean(data_array, axis=3) out = nb.Nifti1Image(data_array, data.affine, data.header) nb.save(out, join(path_output, "temp_" + tmp2_string + ".nii")) # do the mapping map2surface(surf_in, join(path_output, "temp_" + tmp2_string + ".nii"), hemi, path_output, input_white=None, input_ind=None, cleanup=True) # rename mapping file os.rename( join(path_output, hemi + ".temp_" + tmp2_string + "_" + name_surf + "_def.mgh"), join( path_output, hemi + "." + name_file + "_avg_layer" + str(layer[0]) + "_" + str(layer[1]) + ".mgh")) # clean temp os.remove(join(path_output, "temp_" + tmp2_string + ".nii")) # clean file if not write_upsampled: os.remove(join(path_output, name_file + ext_file))
def slice_timing_correction(input, TR_old, TR_new, order, prefix="a"): """ This function performs slice timing correction of a nifti time series. For interleaved slice ordering, interleaved ascending is assumed. The correction is done by temporal interpolation of single voxel time series using cubic interpolation. To omit extrapolation errors at the edges, the first and last volumes of the time series are appended at the beginning and at the end, respectively. These time points are removed again after the interpolation step. The interpolated time series is sampled onto a regular grid with a defined new TR. Therefore, the reference slice is always the first slice acquired at t = 0. Only for writing the new TR in the header of the output time series, AFNI has to be included in the search path. Inputs: *input: filename of nifti time series. *TR_old: TR of time series in seconds. *TR_new: TR of slice timing corrected time series in s. *order: slice ordering (ascending, descending, interleaved). *prefix: prefix of output time series basename. created by Daniel Haenelt Date created: 11-03-2019 Last modified: 18-03-2019 """ import os import sys import numpy as np import nibabel as nb from scipy.interpolate import InterpolatedUnivariateSpline as Interp from lib.io.get_filename import get_filename # get filename path_file, name_file, ext_file = get_filename(input) # load data data = nb.load(input) nx = data.header["dim"][1] ny = data.header["dim"][2] nz = data.header["dim"][3] nt = data.header["dim"][4] # load array with appended volumes data_array = np.zeros((nx, ny, nz, nt+2)) data_array[:,:,:,0] = data.get_fdata()[:,:,:,0] data_array[:,:,:,-1] = data.get_fdata()[:,:,:,-1] data_array[:,:,:,1:-1] = data.get_fdata() # get slice order if order is "ascending": slice_order = np.arange(0, nz) elif order is "descending": slice_order = np.arange(nz-1,-1,-1) elif order is "interleaved" and np.mod(nz,2): # odd slice number slice_order = np.arange(0,nz,2) slice_order = np.append(slice_order, np.arange(1,nz,2)) elif order is "interleaved" and not np.mod(nz,2): # even slice number slice_order = np.arange(1,nz,2) slice_order = np.append(slice_order, np.arange(0,nz,2)) else: sys.exit("Choose a valid slice ordering!") # some parameters TA = TR_old / nz # acquisition time needed for one slice TT = TR_old * nt # total acquisition time TR_append = np.floor(TR_old/TR_new).astype(int) * TR_new # number of appended TRs in output array t_new = np.arange(-TR_append, TT+TR_append, TR_new) # grid points of output array # temporal interpolation data_array_corrected = np.zeros((nx,ny,nz,len(t_new))) for z in range(nz): print("Slice timing correction for slice: "+str(z+1)+"/"+str(nz)) for x in range(nx): for y in range(ny): t = np.arange(z*TA-TR_old, z*TA+(nt+1)*TR_old, TR_old) cubic_interper = Interp(t, data_array[x,y,slice_order[z],:], k=3) data_array_corrected[x,y,slice_order[z],:] = cubic_interper(t_new) # delete appended volumes vols_keep1 = t_new >= 0 vols_keep2 = t_new < TT vols_keep = vols_keep1 * vols_keep2 data_array_corrected = data_array_corrected[:,:,:,vols_keep] # clean corrected array data_min = np.min(data.get_fdata()) data_max = np.max(data.get_fdata()) data_array_corrected[np.isnan(data_array_corrected)] = 0 data_array_corrected[data_array_corrected < data_min] = data_min data_array_corrected[data_array_corrected > data_max] = data_max # update data header data.header["dim"][4] = np.shape(data_array_corrected)[3] data.header["datatype"] = 16 # write output output = nb.Nifti1Image(data_array_corrected, data.affine, data.header) nb.save(output, os.path.join(path_file,prefix+name_file+ext_file)) # change TR in header os.system("3drefit " + \ "-TR " + str(TR_new) + " " + \ os.path.join(path_file,prefix+name_file+ext_file))