def getonefile(files, name): merger = Merge() merger.inputs.in_files = files merger.inputs.dimension = "t" merger.inputs.tr = 2.00 merger.inputs.output_type = "NIFTI_GZ" merger.inputs.merged_file = "/media/phoenix/SeagateDrive/Dataset/Outputs/Belief_Updating/Higher_level_inputs/%s.nii.gz" % ( name) merger.run()
def weird_convert_dti_dcm(in_dcm): import os import numpy as np import re subjid = re.search('R[0-9X]+', in_dcm).group() year = re.search('_201[1234]', in_dcm).group()[1:] visit_dict = {'2012': 1, '2013': 2, '2014': 3, '2011': 4} visit = visit_dict[year] scanid = re.search('S[0-9]+', in_dcm).group() ton_dir = '/data1/cooked/TONf' test_fn = os.path.join(ton_dir, subjid, 'visit_{}'.format(visit), 'DTI', '_'.join([subjid, 'visit', str(visit), 'DTI', scanid])) + '.bvals' if os.path.exists(test_fn): assert np.all(np.loadtxt(test_fn) != 0) converter = Dcm2nii() converter.inputs.source_names = in_dcm converter.inputs.gzip_output = True converter.inputs.output_dir = os.getcwd() converter.run() merger = Merge() merger.inputs.in_files = converter.output_files merger.inputs.dimension = 't' merged_result = merger.run() fn_base = os.path.basename(in_dcm).split('.')[0] merged_file = os.path.join(os.getcwd(), fn_base + '.nii.gz') os.rename(merged_result.outputs.merged_file, merged_file) in_bval = converter.bvals[0] in_bvec = converter.bvecs[0] b0_idx = 0 assert np.all(np.loadtxt(in_bval) != 0) # Load (and transpose!!) bvec_arr = np.loadtxt(in_bvec).T out_bvec = np.zeros((bvec_arr.shape[0] + 1, bvec_arr.shape[1])) out_bvec[:] = np.nan out_bvec[b0_idx, :] = 0 out_bvec[np.where(np.isnan(out_bvec))] = bvec_arr.flatten() bval_arr = np.loadtxt(in_bval) out_bval = np.zeros((bval_arr.shape[0] + 1,)) out_bval[:] = np.nan out_bval[b0_idx] = 0 out_bval[np.isnan(out_bval)] = bval_arr out_bvec_fn = os.path.join(os.getcwd(), fn_base + '.bvecs') np.savetxt(out_bvec_fn, out_bvec, fmt='%.8f') out_bval_fn = os.path.join(os.getcwd(), fn_base + '.bvals') np.savetxt(out_bval_fn, out_bval, fmt='%.6f') return merged_file, out_bvec_fn, out_bval_fn
def __init__(self, in_files=['path'], dimension="enumerate(('t','x','y','z','a'))", **options): from nipype.interfaces.fsl import Merge fu = Merge() fu.inputs.in_files = in_files fu.inputs.dimension = dimension for ef in options: setattr(fu.inputs, ef, options[ef]) self.res = fu.run()
def merge_files(output_dir, subject, file_list): merger = Merge() merger.inputs.in_files = [os.path.join(output_dir, subject, f) for f in file_list] merger.inputs.dimension = 't' merger.inputs.output_type = 'NIFTI_GZ' merger.inputs.merged_file = os.path.join(output_dir, subject, 'DWI_concat.nii.gz') result = merger.run() print('Merging complete') return None
def fslmath_Merge(filelist, output_prefix): from nipype.interfaces.fsl import Merge import os mergeM = Merge() mergeM.inputs.in_files = filelist mergeM.inputs.dimension = 't' mergeM.inputs.output_type = 'NIFTI_GZ' print "Merge [" + os.path.basename(filelist[0]) + ".." + os.path.basename(filelist[len(filelist)-1]) + "]:" + mergeM.cmdline res = mergeM.run() outfile = d2s.move_to_results(res.outputs.merged_file, output_prefix) return outfile
def coregistration_4D(source_file, ref, out_file=None, spm_path=None): ''' Coregistration with spm + fsl for 4D files. Why? Nor SPM, nor fsl are able to do this by default :param source_file: path to input 4D file :param ref: reference file to co-register the source-file to :param out_file: output file :param spm_path: path to spm :return: path to coregistered file ''' if spm_path is not None: mlab.MatlabCommand.set_default_paths(spm_path) if spm.SPMCommand().version is None: raise Exception('SPM path not set correctly:', spm_path, spm.SPMCommand().version) main_dir, source_file_name = os.path.split(source_file) if out_file is None: out_file = os.path.join(main_dir, 'r' + source_file_name) split_folder = os.path.join(main_dir, '4D_split') if not os.path.exists(os.path.join(main_dir, '4D_split')): os.mkdir(split_folder) split = Split(in_file=source_file, dimension='t') split.inputs.in_file = source_file split.inputs.dimension = 't' split.inputs.out_base_name = os.path.join(split_folder, '4D_vol_') split.inputs.output_type = 'NIFTI' split = split.run() split_files = split.outputs.out_files index_file = split_files.pop(0) coreg = spm.Coregister() coreg.inputs.target = ref coreg.inputs.source = index_file coreg.inputs.apply_to_files = split_files coreg = coreg.run() merger = Merge() merger.inputs.in_files = coreg.outputs.coregistered_files merger.inputs.dimension = 't' merger.inputs.output_type = 'NIFTI_GZ' merger.inputs.merged_file = out_file merger = merger.run() shutil.rmtree(split_folder) return merger.outputs.merged_file
if run1Sample: for fb in [0,1]: x=[] for subj in subject_list: fbLoc=subjectinfo(subj,fb) fname= '/home/jmuraskin/Projects/CCD/CPAC-out/pipeline_CCD_v1/%s_data_/dr_tempreg_maps_files_to_standard_smooth/_scan_feedback_%d/_csf_threshold_0.96/_gm_threshold_0.7/_wm_threshold_0.96/_compcor_ncomponents_5_selector_pc10.linear1.wm0.global0.motion1.quadratic1.gm0.compcor1.csf1/_spatial_map_PNAS_Smith09_rsn10/_fwhm_6/_dr_tempreg_maps_files_smooth_03/temp_reg_map_0003_antswarp_maths.nii.gz' % (subj,fbLoc) # fname = '/home/jmuraskin/Projects/CCD/CPAC-out/pipeline_CCD_v1/%s_data_/dr_tempreg_maps_files_to_standard_smooth/_scan_feedback_%d/%s%d.nii.gz' % (fbLoc,subj,t,i) x.append(fname) subjs = len(x) merger = Merge() merger.inputs.in_files = x merger.inputs.dimension = 't' merger.inputs.output_type = 'NIFTI_GZ' merger.inputs.merged_file = './DMN_merged_%s.nii.gz' % fbNames[fb] merger.run() #get meanFD values for each subject and add as covariate meanFD=zscore(motionTest[motionTest.FB==fbNames[fb]][motionTest.Subject_ID.isin(subject_list)]['meanFD']) model = MultipleRegressDesign() model.inputs.contrasts = [['group mean', 'T',['reg1'],[1]],['group neg mean', 'T',['reg1'],[-1]]] model.inputs.regressors = dict(reg1=list(motionTest[motionTest.FB==fbNames[fb]][motionTest.Subject_ID.isin(subject_list)]['scanorder']-1.5),FD=list(meanFD)) model.run() if runWithRandomise: os.mkdir(fbNames[fb]) randomiseCommand='./randomise_forpython.sh -i %s -o ./%s/fb -D -d design.mat -t design.con -e design.grp -m %s -T -n %d' % ('DMN_merged_%s.nii.gz' % fbNames[fb],fbNames[fb],'/usr/share/fsl/5.0/data/standard/MNI152_T1_3mm_brain_mask.nii.gz',nperms) os.system(randomiseCommand) shutil.move(fbNames[fb],pairedFolder) shutil.move('DMN_merged_%s.nii.gz' % fbNames[fb],pairedFolder)
import os
tr = int( subprocess.check_output(['cat', subject_dir + '/rest/Rest_TR.txt' ]).strip('\n').strip(' ').split('.')[0]) print(nvols, tr) print(len(outlier_vols)) print('Length outliers: ' + str(len(outlier_vols))) proc = subprocess.Popen(['fslsplit', str(bptf)]) proc.wait() s = [] for i in range(nvols): if i not in outlier_vols: s += ['vol' + str(i).zfill(4) + '.nii.gz'] merger = Merge() merger.inputs.in_files = s merger.inputs.dimension = 't' merger.inputs.tr = tr merger.run() wait10() for j in s: os.remove(j) temp_ofile = os.path.join(subject_dir, s[0].replace('.nii.gz', '_merged.nii.gz')) if os.path.exists(temp_ofile): os.rename(temp_ofile, 'RestVW_bptf_good_vols.nii.gz')
def gnl_correction(input, file_bash, file_coeff, python3_env, python2_env, path_output, cleanup=True): """ The purpose of the following function is to correct for gradient nonlinearities. A corrected file is written using spline interpolation. The function needs FSL to be included in the search path. Inputs: *input: filename of input image. *file_bash: filename of bash script which calls the gradient unwarping toolbox. *file_coeff: filename of siemens coefficient file. *python3_env: name of python3 virtual environment. *python2_env: name of python2 virtual environment. *path_output: path where output is written. *cleanup: delete intermediate files. created by Daniel Haenelt Date created: 10-01-2020 Last modified: 10-01-2020 """ import os import shutil as sh import numpy as np import nibabel as nb from nipype.interfaces.fsl import ConvertWarp, Merge from nipype.interfaces.fsl.maths import MeanImage from nipype.interfaces.fsl.preprocess import ApplyWarp from lib.io.get_filename import get_filename from lib.cmap.generate_coordinate_mapping import generate_coordinate_mapping # get fileparts path, name, ext = get_filename(input) # make subfolders path_grad = os.path.join(path_output, "grad") if not os.path.exists(path_grad): os.makedirs(path_grad) # parse arguments file_output = os.path.join(path_output, name + "_gnlcorr" + ext) file_warp = os.path.join(path_grad, "warp.nii.gz") file_jacobian = os.path.join(path_grad, "warp_jacobian.nii.gz") # run gradient unwarp os.system("bash " + file_bash + \ " " + python3_env + \ " " + python2_env + \ " " + path_grad + \ " " + input + \ " trilinear.nii.gz" + \ " " + file_coeff) # now create an appropriate warpfield output (relative convention) convertwarp = ConvertWarp() convertwarp.inputs.reference = os.path.join(path_grad, "trilinear.nii.gz") convertwarp.inputs.warp1 = os.path.join(path_grad, "fullWarp_abs.nii.gz") convertwarp.inputs.abswarp = True convertwarp.inputs.out_relwarp = True convertwarp.inputs.out_file = file_warp convertwarp.inputs.args = "--jacobian=" + file_jacobian convertwarp.run() # convertwarp's jacobian output has 8 frames, each combination of one-sided differences, so average them calcmean = MeanImage() calcmean.inputs.in_file = file_jacobian calcmean.inputs.dimension = "T" calcmean.inputs.out_file = file_jacobian calcmean.run() # apply warp to first volume applywarp = ApplyWarp() applywarp.inputs.in_file = input applywarp.inputs.ref_file = input applywarp.inputs.relwarp = True applywarp.inputs.field_file = file_warp applywarp.inputs.output_type = "NIFTI" applywarp.inputs.out_file = file_output applywarp.inputs.interp = "spline" applywarp.run() # normalise warped output image to initial intensity range data_img = nb.load(input) data_array = data_img.get_fdata() max_data = np.max(data_array) min_data = np.min(data_array) data_img = nb.load(file_output) data_array = data_img.get_fdata() data_array[data_array < min_data] = 0 data_array[data_array > max_data] = max_data output = nb.Nifti1Image(data_array, data_img.affine, data_img.header) nb.save(output, file_output) # calculate gradient deviations os.system("calc_grad_perc_dev" + \ " --fullwarp=" + file_warp + \ " -o " + os.path.join(path_grad,"grad_dev")) # merge directions merger = Merge() merger.inputs.in_files = [ os.path.join(path_grad, "grad_dev_x.nii.gz"), os.path.join(path_grad, "grad_dev_y.nii.gz"), os.path.join(path_grad, "grad_dev_z.nii.gz") ] merger.inputs.dimension = 't' merger.inputs.merged_file = os.path.join(path_grad, "grad_dev.nii.gz") merger.run() # convert from % deviation to absolute data_img = nb.load(os.path.join(path_grad, "grad_dev.nii.gz")) data_array = data_img.get_fdata() data_array = data_array / 100 output = nb.Nifti1Image(data_array, data_img.affine, data_img.header) nb.save(output, os.path.join(path_grad, "grad_dev.nii.gz")) # warp coordinate mapping generate_coordinate_mapping(input, 0, path_grad, suffix="gnl", time=False, write_output=True) applywarp = ApplyWarp() applywarp.inputs.in_file = os.path.join(path_grad, "cmap_gnl.nii") applywarp.inputs.ref_file = input applywarp.inputs.relwarp = True applywarp.inputs.field_file = file_warp applywarp.inputs.out_file = os.path.join(path_grad, "cmap_gnl.nii") applywarp.inputs.interp = "trilinear" applywarp.inputs.output_type = "NIFTI" applywarp.run() # clean intermediate files if cleanup: sh.rmtree(path_grad, ignore_errors=True)