def get_signal(substitutions_a, substitutions_b, functional_file_template="~/ni_data/ofM.dr/preprocessing/{preprocessing_dir}/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_trial-{scan}.nii.gz", mask="~/ni_data/templates/DSURQEc_200micron_bin.nii.gz", ): mask = path.abspath(path.expanduser(mask)) out_t_names = [] out_cope_names = [] out_varcb_names = [] for substitution in substitutions_a+substitutions_b: ts_name = path.abspath(path.expanduser("{subject}_{session}.mat".format(**substitution))) out_t_name = path.abspath(path.expanduser("{subject}_{session}_tstat.nii.gz".format(**substitution))) out_cope_name = path.abspath(path.expanduser("{subject}_{session}_cope.nii.gz".format(**substitution))) out_varcb_name = path.abspath(path.expanduser("{subject}_{session}_varcb.nii.gz".format(**substitution))) out_t_names.append(out_t_name) out_cope_names.append(out_cope_name) out_varcb_names.append(out_varcb_name) functional_file = path.abspath(path.expanduser(functional_file_template.format(**substitution))) if not path.isfile(ts_name): masker = NiftiMasker(mask_img=mask) ts = masker.fit_transform(functional_file).T ts = np.mean(ts, axis=0) header = "/NumWaves 1\n/NumPoints 1490\n/PPheights 1.308540e+01 4.579890e+00\n\n/Matrix" np.savetxt(ts_name, ts, delimiter="\n", header=header, comments="") glm = fsl.GLM(in_file=functional_file, design=ts_name, output_type='NIFTI_GZ') glm.inputs.contrasts = path.abspath(path.expanduser("run0.con")) glm.inputs.out_t_name = out_t_name glm.inputs.out_cope = out_cope_name glm.inputs.out_varcb_name = out_varcb_name print(glm.cmdline) glm_run=glm.run() copemerge = fsl.Merge(dimension='t') varcopemerge = fsl.Merge(dimension='t')
def create_2lvl(name="group"): import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu wk = pe.Workflow(name=name) inputspec = pe.Node(niu.IdentityInterface(fields=['copes','varcopes', 'template', "contrasts", "regressors"]),name='inputspec') model = pe.Node(fsl.MultipleRegressDesign(),name='l2model') #wk.connect(inputspec,('copes',get_len),model,'num_copes') wk.connect(inputspec, 'contrasts', model, "contrasts") wk.connect(inputspec, 'regressors', model, "regressors") mergecopes = pe.Node(fsl.Merge(dimension='t'),name='merge_copes') mergevarcopes = pe.Node(fsl.Merge(dimension='t'),name='merge_varcopes') flame = pe.Node(fsl.FLAMEO(run_mode='ols'),name='flameo') wk.connect(inputspec,'copes',mergecopes,'in_files') wk.connect(inputspec,'varcopes',mergevarcopes,'in_files') wk.connect(model,'design_mat',flame,'design_file') wk.connect(model,'design_con',flame, 't_con_file') wk.connect(mergecopes, 'merged_file', flame, 'cope_file') wk.connect(mergevarcopes,'merged_file',flame,'var_cope_file') wk.connect(model,'design_grp',flame,'cov_split_file') bet = pe.Node(fsl.BET(mask=True,frac=0.3),name="template_brainmask") wk.connect(inputspec,'template',bet,'in_file') wk.connect(bet,'mask_file',flame,'mask_file') outputspec = pe.Node(niu.IdentityInterface(fields=['zstat','tstat','cope', 'varcope','mrefvars', 'pes','res4d','mask', 'tdof','weights','pstat']), name='outputspec') wk.connect(flame,'copes',outputspec,'cope') wk.connect(flame,'var_copes',outputspec,'varcope') wk.connect(flame,'mrefvars',outputspec,'mrefvars') wk.connect(flame,'pes',outputspec,'pes') wk.connect(flame,'res4d',outputspec,'res4d') wk.connect(flame,'weights',outputspec,'weights') wk.connect(flame,'zstats',outputspec,'zstat') wk.connect(flame,'tstats',outputspec,'tstat') wk.connect(flame,'tdof',outputspec,'tdof') wk.connect(bet,'mask_file',outputspec,'mask') ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval', iterfield=['in_file']) wk.connect(flame,'zstats',ztopval,'in_file') wk.connect(ztopval,'out_file',outputspec,'pstat') return wk
def modify_func_fm_run(FILEROOT): # Input is like sub-sub_run-01_epi.nii.gz (already in fmap dir) NII = FILEROOT + '.nii.gz' JSONFILE = FILEROOT + '.json' JSON_DAT = read_json(JSONFILE) TASKNAME = JSON_DAT['SeriesDescription'].split('_')[1] # Splits into AP/PA sets # sub-<label>[_ses-<label>][_acq-<label>][_ce-<label>]_dir-<label>[_run-<index>]_epi.json LROOT = FILEROOT.split('_') APNAME = LROOT[0] + '_dir-AP_' + '_'.join(LROOT[1:]) PANAME = LROOT[0] + '_dir-PA_' + '_'.join(LROOT[1:]) # Volumes 1/3 are AP, 2/4 PA SPLITTER = fsl.Split(in_file=NII, dimension='t', out_base_name='tmp') SPLITTER.run() MERGE_AP = fsl.Merge(in_files=['tmp0000.nii.gz', 'tmp0002.nii.gz'], dimension='t', merged_file=APNAME + '.nii.gz') MERGE_AP.run() MERGE_PA = fsl.Merge(in_files=['tmp0001.nii.gz', 'tmp0003.nii.gz'], dimension='t', merged_file=PANAME + '.nii.gz') MERGE_PA.run() os.remove(NII) os.remove(JSONFILE) for FILE in os.listdir('.'): if FILE.startswith('tmp0'): os.remove(FILE) # TODO: should we flip 1st volume and reorient? # Find which func data to use it for (right now assumes series description matches, may need to be more open). # Kludge: if stub of IntendedFor is there, populate with all runs os.chdir('..') INTENDEDFOR = [] if 'IntendedFor' in JSON_DAT: STUB_TASKS = JSON_DAT['IntendedFor'] for STUB in STUB_TASKS: INTEND_TASK = glob.glob('func/*task-{}_*bold.nii.gz'.format(STUB)) INTENDEDFOR += INTEND_TASK else: INTENDEDFOR = glob.glob('func/*task-{}_*bold.nii.gz'.format(TASKNAME)) JSON_DAT['IntendedFor'] = sorted(INTENDEDFOR) os.chdir('fmap') write_json(APNAME + '.json', JSON_DAT) write_json(PANAME + '.json', JSON_DAT)
def create_2lvl_rand(name="group_randomize"): import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu import nipype.interfaces.io as nio wk = pe.Workflow(name=name) inputspec = pe.Node( niu.IdentityInterface(fields=['copes', 'varcopes', 'template']), name='inputspec') model = pe.Node(fsl.L2Model(), name='l2model') wk.connect(inputspec, ('copes', get_len), model, 'num_copes') mergecopes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes') mergevarcopes = pe.Node(fsl.Merge(dimension='t'), name='merge_varcopes') rand = pe.Node(fsl.Randomise(base_name='OneSampleT', raw_stats_imgs=True, tfce=True), name='randomize') wk.connect(inputspec, 'copes', mergecopes, 'in_files') wk.connect(inputspec, 'varcopes', mergevarcopes, 'in_files') wk.connect(model, 'design_mat', rand, 'design_mat') wk.connect(model, 'design_con', rand, 'tcon') wk.connect(mergecopes, 'merged_file', rand, 'in_file') #wk.connect(model,'design_grp',rand,'cov_split_file') bet = pe.Node(fsl.BET(mask=True, frac=0.3), name="template_brainmask") wk.connect(inputspec, 'template', bet, 'in_file') wk.connect(bet, 'mask_file', rand, 'mask') outputspec = pe.Node(niu.IdentityInterface(fields=[ 'f_corrected_p_files', 'f_p_files', 'fstat_files', 't_corrected_p_files', 't_p_files', 'tstat_file', 'mask' ]), name='outputspec') wk.connect(rand, 'f_corrected_p_files', outputspec, 'f_corrected_p_files') wk.connect(rand, 'f_p_files', outputspec, 'f_p_files') wk.connect(rand, 'fstat_files', outputspec, 'fstat_files') wk.connect(rand, 't_corrected_p_files', outputspec, 't_corrected_p_files') wk.connect(rand, 't_p_files', outputspec, 't_p_files') wk.connect(rand, 'tstat_files', outputspec, 'tstat_file') wk.connect(bet, 'mask_file', outputspec, 'mask') return wk
def segstats_workflow(c, name='segstats'): import nipype.interfaces.fsl as fsl import nipype.interfaces.io as nio import nipype.pipeline.engine as pe workflow = segstats(name='segstats') plot = workflow.get_node('roiplotter') workflow.remove_nodes([plot]) inputspec = workflow.get_node('inputspec') # merge files grabbed merge = pe.Node(fsl.Merge(), name='merge_files') datagrabber = c.datagrabber.create_dataflow() workflow.connect(datagrabber, 'datagrabber.in_files', merge, 'in_files') workflow.connect(merge, 'merged_file', inputspec, 'tsnr_file') workflow.connect(datagrabber, 'datagrabber.reg_file', inputspec, 'reg_file') workflow.inputs.inputspec.sd = c.surf_dir workflow.connect(datagrabber, 'subject_id_iterable', inputspec, 'subject') sinker = pe.Node(nio.DataSink(), name='sinker') sinker.inputs.base_directory = c.sink_dir workflow.connect(datagrabber, 'subject_id_iterable', sinker, 'container') def get_subs(subject_id): subs = [('_subject_id_%s' % subject_id, '')] return subs workflow.connect(datagrabber, ('subject_id_iterable', get_subs), sinker, 'substitutions') outputspec = workflow.get_node('outputspec') workflow.connect(outputspec, 'roi_file', sinker, 'segstat.@roi') return workflow
def epi_sbref_registration(name='EPI_SBrefRegistration'): workflow = pe.Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=['epi_brain', 'sbref_brain']), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['epi_registered', 'out_mat']), name='outputnode') mean = pe.Node(fsl.MeanImage(dimension='T'), name='EPImean') inu = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='EPImeanBias') epi_sbref = pe.Node(fsl.FLIRT(dof=6, out_matrix_file='init.mat'), name='EPI2SBRefRegistration') epi_split = pe.Node(fsl.Split(dimension='t'), name='EPIsplit') epi_xfm = pe.MapNode(fsl.ApplyXfm(), name='EPIapplyxfm', iterfield=['in_file']) epi_merge = pe.Node(fsl.Merge(dimension='t'), name='EPImergeback') workflow.connect([ (inputnode, epi_split, [('epi_brain', 'in_file')]), (inputnode, epi_sbref, [('sbref_brain', 'reference')]), (inputnode, epi_xfm, [('sbref_brain', 'reference')]), (inputnode, mean, [('epi_brain', 'in_file')]), (mean, inu, [('out_file', 'input_image')]), (inu, epi_sbref, [('output_image', 'in_file')]), (epi_split, epi_xfm, [('out_files', 'in_file')]), (epi_sbref, epi_xfm, [('out_matrix_file', 'in_matrix_file')]), (epi_xfm, epi_merge, [('out_file', 'in_files')]), (epi_sbref, outputnode, [('out_matrix_file', 'out_mat')]), (epi_merge, outputnode, [('merged_file', 'epi_registered')]) ]) return workflow
def create_eddy_correct_pipeline(name='eddy_correct'): """ .. deprecated:: 0.9.3 Use :func:`nipype.workflows.dmri.preprocess.epi.ecc_pipeline` instead. Creates a pipeline that replaces eddy_correct script in FSL. It takes a series of diffusion weighted images and linearly co-registers them to one reference image. No rotation of the B-matrix is performed, so this pipeline should be executed after the motion correction pipeline. Example ------- >>> nipype_eddycorrect = create_eddy_correct_pipeline('nipype_eddycorrect') >>> nipype_eddycorrect.inputs.inputnode.in_file = 'diffusion.nii' >>> nipype_eddycorrect.inputs.inputnode.ref_num = 0 >>> nipype_eddycorrect.run() # doctest: +SKIP Inputs:: inputnode.in_file inputnode.ref_num Outputs:: outputnode.eddy_corrected """ warnings.warn( ('This workflow is deprecated from v.1.0.0, use ' 'nipype.workflows.dmri.preprocess.epi.ecc_pipeline instead'), DeprecationWarning) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'ref_num']), name='inputnode') pipeline = pe.Workflow(name=name) split = pe.Node(fsl.Split(dimension='t'), name='split') pick_ref = pe.Node(niu.Select(), name='pick_ref') coregistration = pe.MapNode(fsl.FLIRT(no_search=True, padding_size=1, interp='trilinear'), name='coregistration', iterfield=['in_file']) merge = pe.Node(fsl.Merge(dimension='t'), name='merge') outputnode = pe.Node(niu.IdentityInterface(fields=['eddy_corrected']), name='outputnode') pipeline.connect([(inputnode, split, [('in_file', 'in_file')]), (split, pick_ref, [('out_files', 'inlist')]), (inputnode, pick_ref, [('ref_num', 'index')]), (split, coregistration, [('out_files', 'in_file')]), (pick_ref, coregistration, [('out', 'reference')]), (coregistration, merge, [('out_file', 'in_files')]), (merge, outputnode, [('merged_file', 'eddy_corrected')]) ]) return pipeline
def _run_interface(self, runtime): in_files = self.inputs.in_files if not isinstance(in_files, list): in_files = [self.inputs.in_files] # Generate output average name early self._results['out_avg'] = fname_presuffix(self.inputs.in_files[0], suffix='_avg', newpath=runtime.cwd) if self.inputs.to_ras: in_files = [reorient(inf, newpath=runtime.cwd) for inf in in_files] if len(in_files) == 1: filenii = nb.load(in_files[0]) filedata = filenii.get_data() # magnitude files can have an extra dimension empty if filedata.ndim == 5: sqdata = np.squeeze(filedata) if sqdata.ndim == 5: raise RuntimeError('Input image (%s) is 5D' % in_files[0]) else: in_files = [ fname_presuffix(in_files[0], suffix='_squeezed', newpath=runtime.cwd) ] nb.Nifti1Image(sqdata, filenii.affine, filenii.header).to_filename(in_files[0]) if np.squeeze(nb.load(in_files[0]).get_data()).ndim < 4: self._results['out_file'] = in_files[0] self._results['out_avg'] = in_files[0] # TODO: generate identity out_mats and zero-filled out_movpar return runtime in_files = in_files[0] else: magmrg = fsl.Merge(dimension='t', in_files=self.inputs.in_files) in_files = magmrg.run().outputs.merged_file mcflirt = fsl.MCFLIRT(cost='normcorr', save_mats=True, save_plots=True, ref_vol=0, in_file=in_files) mcres = mcflirt.run() self._results['out_mats'] = mcres.outputs.mat_file self._results['out_movpar'] = mcres.outputs.par_file self._results['out_file'] = mcres.outputs.out_file hmcnii = nb.load(mcres.outputs.out_file) hmcdat = hmcnii.get_data().mean(axis=3) if self.inputs.zero_based_avg: hmcdat -= hmcdat.min() nb.Nifti1Image(hmcdat, hmcnii.affine, hmcnii.header).to_filename(self._results['out_avg']) return runtime
def create_non_uniformity_correct_4D_file(auto_clip=False, clip_low=7, clip_high=200, n_procs=12): """non_uniformity_correct_4D_file corrects functional files for nonuniformity on a timepoint by timepoint way. Internally it implements a workflow to split the in_file, correct each separately and then merge them back together. This is an ugly workaround as we have to find the output of the workflow's datasink somewhere, but it should work. Parameters ---------- in_file : str Absolute path to nifti-file. auto_clip : bool (default: False) whether to let 3dUniformize decide on clipping boundaries clip_low : float (default: 7), lower clipping bound for 3dUniformize clip_high : float (default: 200), higher clipping bound for 3dUniformize n_procs : int (default: 12), the number of processes to run the internal workflow with Returns ------- out_file : non-uniformity corrected file List of absolute paths to nifti-files. """ # nodes input_node = pe.Node(IdentityInterface( fields=['in_file', 'auto_clip', 'clip_low', 'clip_high', 'output_directory', 'sub_id']), name='inputspec') split = pe.Node(Function(input_names='in_file', output_names=['out_files'], function=split_4D_to_3D), name='split') uniformer = pe.MapNode( Uniformize(clip_high=clip_high, clip_low=clip_low, auto_clip=auto_clip, outputtype='NIFTI_GZ'), name='uniformer', iterfield=['in_file']) merge = pe.MapNode(fsl.Merge(dimension='t'), name='merge', iterfield=['in_files']) datasink = pe.Node(nio.DataSink(infields=['topup'], container=''), name='sinker') datasink.inputs.parameterization = False # workflow nuc_wf = pe.Workflow(name='nuc') nuc_wf.connect(input_node, 'sub_id', datasink, 'container') nuc_wf.connect(input_node, 'output_directory', datasink, 'base_directory') nuc_wf.connect(input_node, 'in_file', split, 'in_file') nuc_wf.connect(split, 'out_files', uniformer, 'in_file') nuc_wf.connect(uniformer, 'out_file', merge, 'in_files') nuc_wf.connect(merge, 'merged_file', datasink, 'uni') # nuc_wf.run('MultiProc', plugin_args={'n_procs': n_procs}) # out_file = glob.glob(os.path.join(td, 'uni', fn_base + '_0000*.nii.gz'))[0] return nuc_wf
def fsl_RegrSliceWise(input_file,txtregr_Path,regr_Path): # scale Nifti data by factor 10 dataName = os.path.basename(input_file).split('.')[0] # proof data existence regrTextFiles = findRegData(txtregr_Path) if len(regrTextFiles) == 0: print('No regression with physio data!') output_file = os.path.join(regr_Path, os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz' shutil.copyfile(input_file, output_file) return output_file fslPath = scaleBy10(input_file, inv=False) # split input_file in slices mySplit = fsl.Split(in_file=fslPath, dimension='z', out_base_name=dataName) print(mySplit.cmdline) mySplit.run() os.remove(fslPath) # sparate ref and src volume in slices sliceFiles = findSlicesData(os.getcwd(), dataName) if not len(regrTextFiles) == len(sliceFiles): sys.exit('Error: Not enough txt.Files in %s' % txtregr_Path) print('Start separate slice Regression ... ') # start to regression slice by slice print('For all Sices ...') for i in range(len(sliceFiles)): slc = sliceFiles[i] regr = regrTextFiles[i] # only take the columns [1,2,7,9,11,12,13] of the reg-.txt Files output_file = os.path.join(regr_Path, os.path.basename(slc)) myRegr = fsl.FilterRegressor(in_file=slc,design_file=regr,out_file=output_file,filter_columns=[1,2,7,9,11,12,13]) print(myRegr.cmdline) myRegr.run() os.remove(slc) # merge slices to a single volume mcf_sliceFiles = findSlicesData(regr_Path, dataName) output_file = os.path.join(regr_Path, os.path.basename(input_file).split('.')[0]) + '_RGR.nii.gz' myMerge = fsl.Merge(in_files=mcf_sliceFiles, dimension='z', merged_file=output_file) print(myMerge.cmdline) myMerge.run() for slc in mcf_sliceFiles: os.remove(slc) # unscale result data by factor 10ˆ(-1) output_file = scaleBy10(output_file, inv=True) return output_file
def copes1_2_anat_func(fixed, cope1_10Hz_r1, cope1_10Hz_r2, cope1_10Hz_r3, func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3, mask_brain): import os import re import nipype.interfaces.ants as ants import nipype.interfaces.fsl as fsl cwd = os.getcwd() copes1 = [cope1_10Hz_r1, cope1_10Hz_r2, cope1_10Hz_r3] trans = [ func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3 ] copes1_2_anat = [] FEtdof_t1_2_anat = [] for i in range(len(copes1)): moving = copes1[i] transform = trans[i] ants_apply = ants.ApplyTransforms() ants_apply.inputs.dimension = 3 ants_apply.inputs.input_image = moving ants_apply.inputs.reference_image = fixed ants_apply.inputs.transforms = transform ants_apply.inputs.output_image = 'cope1_2_anat_10Hz_r{0}.nii.gz'.format( i + 1) ants_apply.run() copes1_2_anat.append( os.path.abspath('cope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) dof = fsl.ImageMaths() dof.inputs.in_file = 'cope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1) dof.inputs.op_string = '-mul 0 -add 147 -mas' dof.inputs.in_file2 = mask_brain dof.inputs.out_file = 'FEtdof_t1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1) dof.run() FEtdof_t1_2_anat.append( os.path.abspath('FEtdof_t1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) merge = fsl.Merge() merge.inputs.dimension = 't' merge.inputs.in_files = copes1_2_anat merge.inputs.merged_file = 'copes1_2_anat_10Hz.nii.gz' merge.run() merge.inputs.in_files = FEtdof_t1_2_anat merge.inputs.merged_file = 'dofs_t1_2_anat_10Hz.nii.gz' merge.run() copes1_2_anat = os.path.abspath('copes1_2_anat_10Hz.nii.gz') dofs_t1_2_anat = os.path.abspath('dofs_t1_2_anat_10Hz.nii.gz') return copes1_2_anat, dofs_t1_2_anat
def apply_all_corrections(name='UnwarpArtifacts'): """ Combines two lists of linear transforms with the deformation field map obtained typically after the SDC process. Additionally, computes the corresponding bspline coefficients and the map of determinants of the jacobian. """ inputnode = pe.Node(niu.IdentityInterface(fields=['in_sdc', 'in_hmc', 'in_ecc', 'in_dwi']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_warp', 'out_coeff', 'out_jacobian']), name='outputnode') warps = pe.MapNode(fsl.ConvertWarp(relwarp=True), iterfield=['premat', 'postmat'], name='ConvertWarp') selref = pe.Node(niu.Select(index=[0]), name='Reference') split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') unwarp = pe.MapNode(fsl.ApplyWarp(), iterfield=['in_file', 'field_file'], name='UnwarpDWIs') coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'), iterfield=['in_file'], name='CoeffComp') jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True), iterfield=['in_file'], name='JacobianComp') jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'), iterfield=['in_file', 'operand_files'], name='ModulateDWIs') thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], name='RemoveNegative') merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') wf = pe.Workflow(name=name) wf.connect([ (inputnode, warps, [('in_sdc', 'warp1'), ('in_hmc', 'premat'), ('in_ecc', 'postmat'), ('in_dwi', 'reference')]), (inputnode, split, [('in_dwi', 'in_file')]), (split, selref, [('out_files', 'inlist')]), (warps, unwarp, [('out_file', 'field_file')]), (split, unwarp, [('out_files', 'in_file')]), (selref, unwarp, [('out', 'ref_file')]), (selref, coeffs, [('out', 'reference')]), (warps, coeffs, [('out_file', 'in_file')]), (selref, jacobian, [('out', 'reference')]), (coeffs, jacobian, [('out_file', 'in_file')]), (unwarp, jacmult, [('out_file', 'in_file')]), (jacobian, jacmult, [('out_jacobian', 'operand_files')]), (jacmult, thres, [('out_file', 'in_file')]), (thres, merge, [('out_file', 'in_files')]), (warps, outputnode, [('out_file', 'out_warp')]), (coeffs, outputnode, [('out_file', 'out_coeff')]), (jacobian, outputnode, [('out_jacobian', 'out_jacobian')]), (merge, outputnode, [('merged_file', 'out_file')]) ]) return wf
def create_2lvl_rand(name="group_randomize", mask=None, iters=5000): import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu wk = pe.Workflow(name=name) inputspec = pe.Node(niu.IdentityInterface(fields=[ 'copes', 'varcopes', 'template', "contrasts", "group", "regressors" ]), name='inputspec') model = pe.Node(fsl.MultipleRegressDesign(), name='l2model') wk.connect(inputspec, 'contrasts', model, "contrasts") wk.connect(inputspec, 'regressors', model, "regressors") wk.connect(inputspec, 'group', model, 'groups') mergecopes = pe.Node(fsl.Merge(dimension='t'), name='merge_copes') rand = pe.Node(fsl.Randomise(base_name='TwoSampleT', raw_stats_imgs=True, tfce=True, num_perm=iters), name='randomize') wk.connect(inputspec, 'copes', mergecopes, 'in_files') wk.connect(model, 'design_mat', rand, 'design_mat') wk.connect(model, 'design_con', rand, 'tcon') wk.connect(mergecopes, 'merged_file', rand, 'in_file') wk.connect(model, 'design_grp', rand, 'x_block_labels') if mask == None: bet = pe.Node(fsl.BET(mask=True, frac=0.3), name="template_brainmask") wk.connect(inputspec, 'template', bet, 'in_file') wk.connect(bet, 'mask_file', rand, 'mask') else: wk.connect(inputspec, 'template', rand, 'mask') outputspec = pe.Node(niu.IdentityInterface(fields=[ 'f_corrected_p_files', 'f_p_files', 'fstat_files', 't_corrected_p_files', 't_p_files', 'tstat_file', 'mask' ]), name='outputspec') wk.connect(rand, 'f_corrected_p_files', outputspec, 'f_corrected_p_files') wk.connect(rand, 'f_p_files', outputspec, 'f_p_files') wk.connect(rand, 'fstat_files', outputspec, 'fstat_files') wk.connect(rand, 't_corrected_p_files', outputspec, 't_corrected_p_files') wk.connect(rand, 't_p_files', outputspec, 't_p_files') wk.connect(rand, 'tstat_files', outputspec, 'tstat_file') if mask == None: wk.connect(bet, 'mask_file', outputspec, 'mask') else: wk.connect(inputspec, 'template', outputspec, 'mask') return wk
def create_nonbrain_meansignal(name='nonbrain_meansignal'): nonbrain_meansignal = Workflow(name=name) inputspec = Node(utility.IdentityInterface(fields=['func_file']), name='inputspec') # Split raw 4D functional image into 3D niftis split_image = Node(fsl.Split(dimension='t', output_type='NIFTI'), name='split_image') # Create a brain mask for each of the 3D images brain_mask = MapNode(fsl.BET(frac=0.3, mask=True, no_output=True, robust=True), iterfield=['in_file'], name='brain_mask') # Merge the 3D masks into a 4D nifti (producing a separate mask per volume) merge_mask = Node(fsl.Merge(dimension='t'), name='merge_mask') # Reverse the 4D brain mask, to produce a 4D non brain mask reverse_mask = Node(fsl.ImageMaths(op_string='-sub 1 -mul -1'), name='reverse_mask') # Apply the mask on the raw functional data apply_mask = Node(fsl.ImageMaths(), name='apply_mask') # Highpass filter the non brain image highpass = create_highpass_filter(name='highpass') # Extract the mean signal from the non brain image mean_signal = Node(fsl.ImageMeants(), name='mean_signal') outputspec = Node(utility.IdentityInterface(fields=['nonbrain_regressor']), name='outputspec') nonbrain_meansignal.connect(inputspec, 'func_file', split_image, 'in_file') nonbrain_meansignal.connect(split_image, 'out_files', brain_mask, 'in_file') nonbrain_meansignal.connect(brain_mask, 'mask_file', merge_mask, 'in_files') nonbrain_meansignal.connect(merge_mask, 'merged_file', reverse_mask, 'in_file') nonbrain_meansignal.connect(reverse_mask, 'out_file', apply_mask, 'mask_file') nonbrain_meansignal.connect(inputspec, 'func_file', apply_mask, 'in_file') nonbrain_meansignal.connect(apply_mask, 'out_file', highpass, 'inputspec.in_file') nonbrain_meansignal.connect(highpass, 'outputspec.filtered_file', mean_signal, 'in_file') nonbrain_meansignal.connect(mean_signal, 'out_file', outputspec, 'nonbrain_regressor') return nonbrain_meansignal
def dwi_flirt(name='DWICoregistration', excl_nodiff=False, flirt_param={}): """ Generates a workflow for linear registration of dwi volumes """ inputnode = pe.Node(niu.IdentityInterface(fields=['reference', 'in_file', 'ref_mask', 'in_xfms', 'in_bval']), name='inputnode') initmat = pe.Node(niu.Function(input_names=['in_bval', 'in_xfms', 'excl_nodiff'], output_names=['init_xfms'], function=_checkinitxfm), name='InitXforms') initmat.inputs.excl_nodiff = excl_nodiff dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate') split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') pick_ref = pe.Node(niu.Select(), name='Pick_b0') n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') enhb0 = pe.Node(niu.Function(input_names=['in_file', 'in_mask', 'clip_limit'], output_names=['out_file'], function=enhance), name='B0Equalize') enhb0.inputs.clip_limit = 0.015 enhdw = pe.MapNode(niu.Function(input_names=['in_file', 'in_mask'], output_names=['out_file'], function=enhance), name='DWEqualize', iterfield=['in_file']) flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration', iterfield=['in_file', 'in_matrix_file']) thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'], name='RemoveNegative') merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_xfms']), name='outputnode') wf = pe.Workflow(name=name) wf.connect([ (inputnode, split, [('in_file', 'in_file')]), (inputnode, dilate, [('ref_mask', 'in_file')]), (inputnode, enhb0, [('ref_mask', 'in_mask')]), (inputnode, initmat, [('in_xfms', 'in_xfms'), ('in_bval', 'in_bval')]), (inputnode, n4, [('reference', 'input_image'), ('ref_mask', 'mask_image')]), (dilate, flirt, [('out_file', 'ref_weight'), ('out_file', 'in_weight')]), (n4, enhb0, [('output_image', 'in_file')]), (split, enhdw, [('out_files', 'in_file')]), (dilate, enhdw, [('out_file', 'in_mask')]), (enhb0, flirt, [('out_file', 'reference')]), (enhdw, flirt, [('out_file', 'in_file')]), (initmat, flirt, [('init_xfms', 'in_matrix_file')]), (flirt, thres, [('out_file', 'in_file')]), (thres, merge, [('out_file', 'in_files')]), (merge, outputnode, [('merged_file', 'out_file')]), (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) ]) return wf
def create_realign_flow(name='realign'): """Realign a time series to the middle volume using spline interpolation Uses MCFLIRT to realign the time series and ApplyWarp to apply the rigid body transformations using spline interpolation (unknown order). Example ------- >>> wf = create_realign_flow() >>> wf.inputs.inputspec.func = 'f3.nii' >>> wf.run() # doctest: +SKIP """ realignflow = pe.Workflow(name=name) inputnode = pe.Node(interface=util.IdentityInterface(fields=[ 'func', ]), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface( fields=['realigned_file', 'rms_files', 'par_file']), name='outputspec') start_dropper = pe.Node(util.Function( input_names=['in_vol_fn', 'n_frames'], output_names=['out_fn'], function=remove_first_n_frames), name='start_dropper') start_dropper.inputs.n_frames = 5 realigner = pe.Node(fsl.MCFLIRT(save_mats=True, stats_imgs=True, save_rms=True, save_plots=True), name='realigner') splitter = pe.Node(fsl.Split(dimension='t'), name='splitter') warper = pe.MapNode(fsl.ApplyWarp(interp='spline'), iterfield=['in_file', 'premat'], name='warper') joiner = pe.Node(fsl.Merge(dimension='t'), name='joiner') realignflow.connect(inputnode, 'func', start_dropper, 'in_vol_fn') realignflow.connect(start_dropper, 'out_fn', realigner, 'in_file') realignflow.connect(start_dropper, ('out_fn', select_volume, 'middle'), realigner, 'ref_vol') realignflow.connect(realigner, 'out_file', splitter, 'in_file') realignflow.connect(realigner, 'mat_file', warper, 'premat') realignflow.connect(realigner, 'variance_img', warper, 'ref_file') realignflow.connect(splitter, 'out_files', warper, 'in_file') realignflow.connect(warper, 'out_file', joiner, 'in_files') realignflow.connect(joiner, 'merged_file', outputnode, 'realigned_file') realignflow.connect(realigner, 'rms_files', outputnode, 'rms_files') realignflow.connect(realigner, 'par_file', outputnode, 'par_file') return realignflow
def create_eddy_correct_pipeline(name="eddy_correct"): """Creates a pipeline that replaces eddy_correct script in FSL. It takes a series of diffusion weighted images and linearly corregisters them to one reference image. Example ------- >>> nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect") >>> nipype_eddycorrect.inputs.inputnode.in_file = 'diffusion.nii' >>> nipype_eddycorrect.inputs.inputnode.ref_num = 0 >>> nipype_eddycorrect.run() # doctest: +SKIP Inputs:: inputnode.in_file inputnode.ref_num Outputs:: outputnode.eddy_corrected """ inputnode = pe.Node( interface=util.IdentityInterface(fields=["in_file", "ref_num"]), name="inputnode") pipeline = pe.Workflow(name=name) split = pe.Node(fsl.Split(dimension='t'), name="split") pipeline.connect([(inputnode, split, [("in_file", "in_file")])]) pick_ref = pe.Node(util.Select(), name="pick_ref") pipeline.connect([(split, pick_ref, [("out_files", "inlist")]), (inputnode, pick_ref, [("ref_num", "index")])]) coregistration = pe.MapNode(fsl.FLIRT(no_search=True, padding_size=1), name="coregistration", iterfield=["in_file"]) pipeline.connect([(split, coregistration, [("out_files", "in_file")]), (pick_ref, coregistration, [("out", "reference")])]) merge = pe.Node(fsl.Merge(dimension="t"), name="merge") pipeline.connect([(coregistration, merge, [("out_file", "in_files")])]) outputnode = pe.Node( interface=util.IdentityInterface(fields=["eddy_corrected"]), name="outputnode") pipeline.connect([(merge, outputnode, [("merged_file", "eddy_corrected")]) ]) return pipeline
def generate_common_mask(inputs): mask_list = [] for subj in inputs: mask = os.path.join(OUTPUT, 'stage1', 'mask_idc_' + subj + '.nii.gz') mask_list.append(mask) allFile = os.path.join(OUTPUT, 'stage1', 'maskALL.nii.gz') cmd_out = os.path.join(OUTPUT, 'stage1', 'stage1_maskall_idc_' + subj + '.out') fslmerge = fsl.Merge(dimension='t', terminal_output='stream',in_files=mask_list, merged_file=allFile, output_type='NIFTI_GZ') write_cmd_out(fslmerge.cmdline, cmd_out) fslmerge.run() oFile = os.path.join(OUTPUT, 'stage1', 'mask.nii.gz') fslmaths = fsl.ImageMaths(in_file=allFile, op_string='-Tmin', out_file=oFile) fslmaths.run()
def warp_CBF_map( self, CBF_dir, CBF_list ): """Apply warp to the CBF map and smooth.""" try: # # # # create the pool of threads for i in range( self.procs_ ): t = threading.Thread( target = self.CBF_modulation_, args=[CBF_dir] ) t.daemon = True t.start() # Stack the items for item in CBF_list: self.queue_CBF_.put(item) # block until all tasks are done self.queue_CBF_.join() # # 4D image with modulated CBF CBF_modulated_template_4D = os.path.join(self.ana_dir_, "CBF_modulated_template_4D.nii.gz") # merger = fsl.Merge() merger.inputs.in_files = self.CBF_modulated_template_ merger.inputs.dimension = 't' merger.inputs.output_type = 'NIFTI_GZ' merger.inputs.merged_file = CBF_modulated_template_4D merger.run() # # Smooth the 4D image for sigma in [2, 3, 4]: CBF_mod_smooth_4D = os.path.join(self.ana_dir_, "CBF_modulated_template_4D_%s_sigma.nii.gz"%sigma) # maths = fsl.ImageMaths() maths.inputs.in_file = CBF_modulated_template_4D maths.inputs.op_string = "-fmean -kernel gauss %s"%sigma maths.inputs.out_file = CBF_mod_smooth_4D maths.run() # # except Exception as inst: print inst _log.error(inst) quit(-1) except IOError as e: print "I/O error({0}): {1}".format(e.errno, e.strerror) quit(-1) except: print "Unexpected error:", sys.exc_info()[0] quit(-1)
def merge_phases(in_file: Path, phasediff: Path, merged: Path): """ Combine two images into one 4D file Arguments: in_file {Path} -- [path to 4D, AP oriented file] phasediff {Path} -- [path to phase-different, PA oriented file] """ AP_b0 = index_img(str(in_file), 0) AP_file = merged.parent / f"AP_b0{FSLOUTTYPE}" nib.save(AP_b0, str(AP_file)) merger = fsl.Merge() merger.inputs.in_files = [AP_file, phasediff] merger.inputs.dimension = "t" merger.inputs.merged_file = merged return merger
def fsl_SeparateSliceMoCo(input_file, par_folder): # scale Nifti data by factor 10 dataName = os.path.basename(input_file).split('.')[0] fslPath = scaleBy10(input_file, inv=False) mySplit = fsl.Split(in_file=fslPath, dimension='z', out_base_name=dataName) print(mySplit.cmdline) mySplit.run() os.remove(fslPath) # sparate ref and src volume in slices sliceFiles = findSlicesData(os.getcwd(), dataName) # refFiles = findSlicesData(os.getcwd(),'ref') print('For all slices ... ') # start to correct motions slice by slice for i in range(len(sliceFiles)): slc = sliceFiles[i] # ref = refFiles[i] # take epi as ref output_file = os.path.join(par_folder, os.path.basename(slc)) myMCFLIRT = fsl.preprocess.MCFLIRT(in_file=slc, out_file=output_file, save_plots=True, terminal_output='none') print(myMCFLIRT.cmdline) myMCFLIRT.run() os.remove(slc) # os.remove(ref) # merge slices to a single volume mcf_sliceFiles = findSlicesData(par_folder, dataName) output_file = os.path.join( os.path.dirname(input_file), os.path.basename(input_file).split('.')[0]) + '_mcf.nii.gz' myMerge = fsl.Merge(in_files=mcf_sliceFiles, dimension='z', merged_file=output_file) print(myMerge.cmdline) myMerge.run() for slc in mcf_sliceFiles: os.remove(slc) # unscale result data by factor 10ˆ(-1) output_file = scaleBy10(output_file, inv=True) return output_file
def generate_mask(self, **kwargs): self.mask_list = [] parallel = False ncores = 2 for i in kwargs.keys(): if i == 'parallel': if kwargs[i]: parallel = True elif i == 'ncores': try: ncores = int(kwargs[i]) except: print 'number of cores not specified properly..', ncores def dual_reg_mask(subj): iFile = os.path.join(os.path.dirname(self.indir), subj, 'idc_' + subj + self.featdir_sfix, self.ff_data_name) oFile = os.path.join(self.outdir, 'stage1', 'mask_idc_' + subj + '.nii.gz') self.mask_list.append(oFile) fslmaths = fsl.ImageMaths(in_file=iFile, op_string='-Tstd -bin', out_file=oFile, output_type='NIFTI_GZ', out_data_type='char') fslmaths.run() if not parallel: for subj in self.subjects: dual_reg_mask(subj) #else: # pool = Pool(processes=ncores) # pool.map(dual_reg_mask, self.subjects) #once the masks are all made, then make the average: allFile = os.path.join(self.outdir, 'stage1', 'maskALL.nii.gz') fslmerge = fsl.Merge(dimension='t', terminal_output='stream', in_files=self.mask_list, merged_file=allFile, output_type='NIFTI_GZ') fslmerge.run() oFile = os.path.join(self.outdir, 'stage1', 'mask.nii.gz') fslmaths = fsl.ImageMaths(in_file=allFile, op_string='-Tmin', out_file=oFile) fslmaths.run()
def merge_and_mean(name='mm'): inputnode = pe.Node(niu.IdentityInterface(fields=['in_files']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['merged', 'mean']), name='outputnode') merge = pe.MapNode(fsl.Merge(dimension='z'), name='Merge', iterfield=['in_files']) mean = pe.MapNode(fsl.ImageMaths(op_string='-Tmean'), name='Mean', iterfield=['in_file']) wf = pe.Workflow(name=name) wf.connect([(inputnode, merge, [(('in_files', transpose), 'in_files')]), (merge, mean, [('merged_file', 'in_file')]), (merge, outputnode, [('merged_file', 'merged')]), (mean, outputnode, [('out_file', 'mean')])]) return wf
def varcopes1_2_anat_func(fixed, varcope1_10Hz_r1, varcope1_10Hz_r2, varcope1_10Hz_r3, func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3): import os import re import nipype.interfaces.ants as ants import nipype.interfaces.fsl as fsl cwd = os.getcwd() varcopes1 = [varcope1_10Hz_r1, varcope1_10Hz_r2, varcope1_10Hz_r3] trans = [ func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3 ] varcopes1_2_anat = [] for i in range(len(varcopes1)): moving = varcopes1[i] transform = trans[i] ants_apply = ants.ApplyTransforms() ants_apply.inputs.dimension = 3 ants_apply.inputs.input_image = moving ants_apply.inputs.reference_image = fixed ants_apply.inputs.transforms = transform ants_apply.inputs.output_image = 'varcope1_2_anat_10Hz_r{0}.nii.gz'.format( i + 1) ants_apply.run() varcopes1_2_anat.append( os.path.abspath('varcope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) merge = fsl.Merge() merge.inputs.dimension = 't' merge.inputs.in_files = varcopes1_2_anat merge.inputs.merged_file = 'varcopes1_2_anat_10Hz.nii.gz' merge.run() varcopes1_2_anat = os.path.abspath('varcopes1_2_anat_10Hz.nii.gz') return varcopes1_2_anat
def _run_interface(self, runtime): if len(self.inputs.in_files) == 1: self._results['out_file'] = self.inputs.in_files[0] self._results['out_avg'] = self.inputs.in_files[0] # TODO: generate identity out_mats and zero-filled out_movpar return runtime magmrg = fsl.Merge(dimension='t', in_files=self.inputs.in_files) mcflirt = fsl.MCFLIRT(cost='normcorr', save_mats=True, save_plots=True, ref_vol=0, in_file=magmrg.run().outputs.merged_file) mcres = mcflirt.run() self._results['out_mats'] = mcres.outputs.mat_file self._results['out_movpar'] = mcres.outputs.par_file self._results['out_file'] = mcres.outputs.out_file mean = fsl.MeanImage(dimension='T', in_file=mcres.outputs.out_file) self._results['out_avg'] = mean.run().outputs.out_file return runtime
def _multiple_pe_hmc(in_files, in_movpar, in_ref=None): """ This function interprets that we are dealing with a multiple PE (phase encoding) input if it finds several files in in_files. If we have several images with various PE directions, it will compute the HMC parameters between them using an embedded workflow. It just forwards the two inputs otherwise. """ import os from nipype.interfaces import fsl from nipype.interfaces import ants if len(in_files) == 1: out_file = in_files[0] out_movpar = in_movpar else: if in_ref is None: in_ref = 0 # Head motion correction fslmerge = fsl.Merge(dimension='t', in_files=in_files) hmc = fsl.MCFLIRT(ref_vol=in_ref, save_mats=True, save_plots=True) hmc.inputs.in_file = fslmerge.run().outputs.merged_file hmc_res = hmc.run() out_file = hmc_res.outputs.out_file out_movpar = hmc_res.outputs.par_file mean = fsl.MeanImage( dimension='T', in_file=out_file) inu = ants.N4BiasFieldCorrection( dimension=3, input_image=mean.run().outputs.out_file) inu_res = inu.run() out_ref = inu_res.outputs.output_image bet = fsl.BET( frac=0.6, mask=True, in_file=out_ref) out_mask = bet.run().outputs.mask_file return (out_file, out_ref, out_mask, out_movpar)
def average_template_( self, Template_4D, List, Template ): """Merge the list in a 4D image; average the 4D imge in a 3D image; flip the image and and average the flipped and unflipped iamges.""" try: # # # merge tissues in a 4D file merger = fsl.Merge() merger.inputs.in_files = List merger.inputs.dimension = 't' merger.inputs.output_type = 'NIFTI_GZ' merger.inputs.merged_file = Template_4D merger.run() # average over frames maths = fsl.ImageMaths( in_file = Template_4D, op_string = '-Tmean', out_file = Template ) maths.run(); # Flip the frames swap = fsl.SwapDimensions() swap.inputs.in_file = Template swap.inputs.new_dims = ("-x","y","z") swap.inputs.out_file = "%s_flipped.nii.gz"%Template[:-7] swap.run() # average the frames maths = fsl.ImageMaths( in_file = Template, op_string = '-add %s -div 2'%Template[:-7], out_file = Template ) maths.run(); # # except Exception as inst: print inst _log.error(inst) quit(-1) except IOError as e: print "I/O error({0}): {1}".format(e.errno, e.strerror) quit(-1) except: print "Unexpected error:", sys.exc_info()[0] quit(-1)
def merge_and_mean(muscle_heatmaps, heatmap_concatenated, heatmap_group_average): heatmap_list = list() # heatmap_list is a list with two values per entry, first is the tag and # second is the location of a warped heatmap for the muscle for that subject for list_entry in muscle_heatmaps: heatmap_list.append(list_entry[1]) merge_heatmaps = fsl.Merge() merge_heatmaps.inputs.in_files = heatmap_list merge_heatmaps.inputs.dimension = 't' merge_heatmaps.inputs.merged_file = heatmap_concatenated merge_heatmaps.inputs.output_type = 'NIFTI_GZ' # parent_logger.critical(merge_heatmaps.cmdline) merge_results = merge_heatmaps.run() mean_heatmap = fsl.MeanImage() mean_heatmap.inputs.in_file = heatmap_concatenated mean_heatmap.inputs.dimension = 'T' mean_heatmap.inputs.out_file = heatmap_group_average mean_heatmap.inputs.output_type = 'NIFTI_GZ' # parent_logger.critical(mean_heatmap.cmdline) mean_result = mean_heatmap.run()
def fsl_merge(in_files=traits.Undefined, dimension='t'): """ Merge the NifTI files in `in_files` in the given `dimension`. This uses `fslmerge`. Parameters ---------- in_files: list of str. Paths to the files to merge. dimension: str Character indicating the merging dimension. Choices: 't', 'x', 'y', 'z' Returns ------- merger: fsl.Merge """ merger = fsl.Merge() merger.inputs.dimension = dimension merger.inputs.output_type = "NIFTI_GZ" merger.inputs.in_files = in_files return merger
def dualreg4d(self, dr_dir, dr_pfix, ics, oDir): for ic in ics: print 'writing out 4d file for component: ', ic fourDlist = [] for s in range(0, self.subj_list.shape[0]): subj = str(int(self.subj_list[s][0])) pe_file = os.path.join( dr_dir, 'stage2', dr_pfix + subj + '_ic' + str(ic) + '.nii.gz') if not os.path.exists(pe_file): print 'CANNOT FIND filtered func data for :', subj print 'Looked here: ', pe_file print 'Cannot continue...must exit...' sys.exit(0) fourDlist.append(pe_file) oFile = os.path.join( oDir, 'dr_stage2_merged_pe_ic' + str(ic) + '.nii.gz') fslmerge = fsl.Merge(dimension='t', terminal_output='stream', in_files=fourDlist, merged_file=oFile, output_type='NIFTI_GZ') fslmerge.run()