def test_despike(): input_map = dict( args=dict(argstr='%s', ), environ=dict(usedefault=True, ), ignore_exception=dict(usedefault=True, ), in_file=dict( argstr='%s', mandatory=True, ), out_file=dict(argstr='-prefix %s', ), outputtype=dict(), ) instance = afni.Despike() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value
def despike(self, fileobj=None, out_file=None, args=None, suffix=None): #setting files fileobj, out_file = self.FuncHandler(fileobj, out_file, suffix=suffix) args_in = "" #add in terminal flags here (ex: "-overwrite") if you want them called with ubiquity #accross the whole script any time this command is called. Otherwise add flags the the "args" argument of the command if args is not None: args_in = args_in + args afni.Despike(in_file=fileobj, out_file=out_file, args=args_in).run() #https://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.afni/preprocess.html#despike #remove temp files if type(fileobj) == models.BIDSImageFile: fileobj = os.path.join(self._output_dir, fileobj.filename) if "_desc-temp" in fileobj: os.remove(fileobj)
def init_bold_main_wf(opts, inho_cor_only=False, name='bold_main_wf'): """ This workflow controls the functional preprocessing stages of the pipeline when both functional and anatomical images are provided. **Parameters** opts parser options for preprocess inho_cor_only whether to run the bias correction steps, or further processing steps. **Inputs** bold Input BOLD series NIfTI file coreg_anat Anatomical reference for BOLD alignment coreg_mask Brain mask for anatomical reference WM_mask WM mask inherited from the common space registration CSF_mask CSF mask inherited from the common space registration vascular_mask vascular mask inherited from the common space registration labels Anatomical labels inherited from the common space registration unbiased_to_atlas_affine affine transform from the dataset template space to the commonspace space unbiased_to_atlas_warp non-linear transform from the dataset template space to the commonspace space native_to_unbiased_affine affine transform from the subject anatomical to the dataset template space native_to_unbiased_warp non-linear transform from the subject anatomical to the dataset template space commonspace_ref commonspace anatomical template **Outputs** input_bold The provided input BOLD file bold_ref Initial EPI median volume subsequently used as 3D reference EPI volume motcorr_params motion parameters file provided from antsMotionCorr init_denoise Corrected 3D ref EPI after initial correction step denoise_mask resampled mask used for final denoising corrected_EPI 3D reference EPI volume after bias field correction output_warped_bold Bias field corrected 3D EPI volume warped to the anatomical space bold_to_anat_affine affine transform from the EPI space to the anatomical space bold_to_anat_warp non-linear transform from the EPI space to the anatomical space bold_to_anat_inverse_warp inverse non-linear transform from the EPI space to the anatomical space resampled_bold Original BOLD timeseries resampled through motion realignment and susceptibility distortion correction based on registration to the anatomical image resampled_ref_bold 3D median EPI volume from the resampled native BOLD timeseries confounds_csv .csv file with measured confound timecourses, including global signal, WM signal, CSF signal, 6 rigid body motion parameters + their first temporal derivate + the 12 parameters squared (24 motion parameters), and aCompCorr timecourses FD_voxelwise Voxelwise framewise displacement (FD) measures that can be integrated to future confound regression. These measures are computed from antsMotionCorrStats. pos_voxelwise Voxel distancing across time based on rigid body movement parameters, which can be integrated for a voxelwise motion regression These measures are computed from antsMotionCorrStats. FD_csv .csv file with global framewise displacement (FD) measures EPI_brain_mask EPI brain mask for resampled bold EPI_WM_mask EPI WM mask for resampled bold EPI_CSF_mask EPI CSF mask for resampled bold EPI_labels EPI anatomical labels for resampled bold commonspace_bold Motion and SDC-corrected EPI timeseries resampled into common space by applying transforms from the anatomical common space registration commonspace_mask EPI brain mask for commonspace bold commonspace_WM_mask EPI WM mask for commonspace bold commonspace_CSF_mask EPI CSF mask for commonspace bold commonspace_vascular_mask EPI vascular mask for commonspace bold commonspace_labels EPI anatomical labels for commonspace bold """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'inho_cor_anat', 'inho_cor_mask', 'coreg_anat', 'coreg_mask', 'native_to_commonspace_transform_list', 'native_to_commonspace_inverse_list', 'commonspace_to_native_transform_list', 'commonspace_to_native_inverse_list', 'commonspace_ref' ]), name="inputnode") outputnode = pe.Node(niu.IdentityInterface(fields=[ 'input_bold', 'bold_ref', 'motcorr_params', 'init_denoise', 'denoise_mask', 'corrected_EPI', 'output_warped_bold', 'bold_to_anat_affine', 'bold_to_anat_warp', 'bold_to_anat_inverse_warp', 'native_bold', 'native_bold_ref', 'native_brain_mask', 'native_WM_mask', 'native_CSF_mask', 'native_labels', 'confounds_csv', 'FD_voxelwise', 'pos_voxelwise', 'FD_csv', 'commonspace_bold', 'commonspace_mask', 'commonspace_WM_mask', 'commonspace_CSF_mask', 'commonspace_vascular_mask', 'commonspace_labels' ]), name='outputnode') boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name="boldbuffer") # this node will serve as a relay of outputs from the inho_cor main_wf to the inputs for the rest of the main_wf for bold_only transitionnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_file', 'bold_ref', 'init_denoise', 'denoise_mask', 'corrected_EPI' ]), name="transitionnode") if inho_cor_only or (not opts.bold_only): bold_reference_wf = init_bold_reference_wf(opts=opts) inho_cor_wf = init_inho_correction_wf(opts=opts, image_type='EPI', name="bold_inho_cor_wf") if opts.apply_despiking: despike = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike') workflow.connect([ (inputnode, despike, [('bold', 'in_file')]), (despike, boldbuffer, [('out_file', 'bold_file')]), ]) else: workflow.connect([ (inputnode, boldbuffer, [('bold', 'bold_file')]), ]) if opts.detect_dummy: workflow.connect([ (bold_reference_wf, transitionnode, [ ('outputnode.bold_file', 'bold_file'), ]), ]) else: workflow.connect([ (boldbuffer, transitionnode, [ ('bold_file', 'bold_file'), ]), ]) workflow.connect([ (inputnode, inho_cor_wf, [ ('inho_cor_anat', 'inputnode.anat_ref'), ('inho_cor_mask', 'inputnode.anat_mask'), ('bold', 'inputnode.name_source'), ]), (boldbuffer, bold_reference_wf, [ ('bold_file', 'inputnode.bold_file'), ]), (bold_reference_wf, inho_cor_wf, [ ('outputnode.ref_image', 'inputnode.target_img'), ]), (bold_reference_wf, transitionnode, [ ('outputnode.ref_image', 'bold_ref'), ]), (inho_cor_wf, transitionnode, [ ('outputnode.init_denoise', 'init_denoise'), ('outputnode.denoise_mask', 'denoise_mask'), ('outputnode.corrected', 'corrected_EPI'), ]), ]) if inho_cor_only: return workflow bold_stc_wf = init_bold_stc_wf(opts=opts) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(opts=opts) bold_commonspace_trans_wf = init_bold_preproc_trans_wf( opts=opts, resampling_dim=opts.commonspace_resampling, name='bold_commonspace_trans_wf') bold_commonspace_trans_wf.inputs.inputnode.mask_transforms_list = [] bold_commonspace_trans_wf.inputs.inputnode.mask_inverses = [] bold_confs_wf = init_bold_confs_wf(opts=opts, name="bold_confs_wf") if not opts.bold_only: def commonspace_transforms(to_commonspace_transform_list, to_commonspace_inverse_list, bold_to_anat_warp, bold_to_anat_affine): # transforms_list,inverses return to_commonspace_transform_list + [ bold_to_anat_warp, bold_to_anat_affine ], to_commonspace_inverse_list + [0, 0] bold_to_commonspace_transforms = pe.Node( Function(input_names=[ 'to_commonspace_transform_list', 'to_commonspace_inverse_list', 'bold_to_anat_warp', 'bold_to_anat_affine' ], output_names=[ 'to_commonspace_transform_list', 'to_commonspace_inverse_list' ], function=commonspace_transforms), name='bold_to_commonspace_transforms') cross_modal_reg_wf = init_cross_modal_reg_wf(opts=opts) def SyN_coreg_transforms_prep(bold_to_anat_warp, bold_to_anat_affine): # transforms_list,inverses return [bold_to_anat_warp, bold_to_anat_affine], [0, 0] transforms_prep = pe.Node(Function( input_names=['bold_to_anat_warp', 'bold_to_anat_affine'], output_names=['transforms_list', 'inverses'], function=SyN_coreg_transforms_prep), name='transforms_prep') bold_native_trans_wf = init_bold_preproc_trans_wf( opts=opts, resampling_dim=opts.nativespace_resampling, name='bold_native_trans_wf') workflow.connect([ (inputnode, cross_modal_reg_wf, [('coreg_anat', 'inputnode.anat_ref'), ('coreg_mask', 'inputnode.anat_mask')]), (inputnode, bold_native_trans_wf, [ ('commonspace_to_native_transform_list', 'inputnode.mask_transforms_list'), ('commonspace_to_native_inverse_list', 'inputnode.mask_inverses'), ('bold', 'inputnode.name_source'), ]), (transitionnode, cross_modal_reg_wf, [ ('corrected_EPI', 'inputnode.ref_bold_brain'), ('denoise_mask', 'inputnode.moving_mask'), ]), (cross_modal_reg_wf, outputnode, [ ('outputnode.bold_to_anat_affine', 'bold_to_anat_affine'), ('outputnode.bold_to_anat_warp', 'bold_to_anat_warp'), ('outputnode.bold_to_anat_inverse_warp', 'bold_to_anat_inverse_warp'), ('outputnode.output_warped_bold', 'output_warped_bold'), ]), (cross_modal_reg_wf, transforms_prep, [ ('outputnode.bold_to_anat_affine', 'bold_to_anat_affine'), ('outputnode.bold_to_anat_warp', 'bold_to_anat_warp'), ]), (transforms_prep, bold_native_trans_wf, [ ('transforms_list', 'inputnode.transforms_list'), ('inverses', 'inputnode.inverses'), ]), (cross_modal_reg_wf, bold_native_trans_wf, [('outputnode.output_warped_bold', 'inputnode.ref_file')]), (cross_modal_reg_wf, bold_to_commonspace_transforms, [ ('outputnode.bold_to_anat_affine', 'bold_to_anat_affine'), ('outputnode.bold_to_anat_warp', 'bold_to_anat_warp'), ]), (bold_hmc_wf, bold_native_trans_wf, [('outputnode.motcorr_params', 'inputnode.motcorr_params')]), (bold_native_trans_wf, bold_confs_wf, [ ('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_ref', 'inputnode.ref_bold'), ('outputnode.brain_mask', 'inputnode.brain_mask'), ('outputnode.WM_mask', 'inputnode.WM_mask'), ('outputnode.CSF_mask', 'inputnode.CSF_mask'), ('outputnode.vascular_mask', 'inputnode.vascular_mask'), ]), (bold_native_trans_wf, outputnode, [ ('outputnode.bold', 'native_bold'), ('outputnode.bold_ref', 'native_bold_ref'), ('outputnode.brain_mask', 'native_brain_mask'), ('outputnode.WM_mask', 'native_WM_mask'), ('outputnode.CSF_mask', 'native_CSF_mask'), ('outputnode.vascular_mask', 'native_vascular_mask'), ('outputnode.labels', 'native_labels'), ]), ]) else: bold_to_commonspace_transforms = pe.Node( niu.IdentityInterface(fields=[ 'to_commonspace_transform_list', 'to_commonspace_inverse_list' ]), name="bold_to_commonspace_transforms") workflow.connect([ (bold_commonspace_trans_wf, bold_confs_wf, [ ('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_ref', 'inputnode.ref_bold'), ('outputnode.brain_mask', 'inputnode.brain_mask'), ('outputnode.WM_mask', 'inputnode.WM_mask'), ('outputnode.CSF_mask', 'inputnode.CSF_mask'), ('outputnode.vascular_mask', 'inputnode.vascular_mask'), ]), ]) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ (inputnode, bold_to_commonspace_transforms, [ ('native_to_commonspace_transform_list', 'to_commonspace_transform_list'), ('native_to_commonspace_inverse_list', 'to_commonspace_inverse_list'), ]), (transitionnode, bold_stc_wf, [ ('bold_file', 'inputnode.bold_file'), ]), (transitionnode, bold_hmc_wf, [ ('bold_ref', 'inputnode.ref_image'), ]), (bold_hmc_wf, outputnode, [('outputnode.motcorr_params', 'motcorr_params')]), (transitionnode, outputnode, [ ('bold_ref', 'bold_ref'), ('init_denoise', 'init_denoise'), ('denoise_mask', 'denoise_mask'), ('corrected_EPI', 'corrected_EPI'), ]), (bold_hmc_wf, bold_confs_wf, [ ('outputnode.motcorr_params', 'inputnode.movpar_file'), ]), (bold_confs_wf, outputnode, [ ('outputnode.confounds_csv', 'confounds_csv'), ('outputnode.FD_csv', 'FD_csv'), ('outputnode.FD_voxelwise', 'FD_voxelwise'), ('outputnode.pos_voxelwise', 'pos_voxelwise'), ]), (bold_to_commonspace_transforms, bold_commonspace_trans_wf, [ ('to_commonspace_transform_list', 'inputnode.transforms_list'), ('to_commonspace_inverse_list', 'inputnode.inverses'), ]), (bold_hmc_wf, bold_commonspace_trans_wf, [('outputnode.motcorr_params', 'inputnode.motcorr_params')]), (inputnode, bold_commonspace_trans_wf, [ ('bold', 'inputnode.name_source'), ('commonspace_ref', 'inputnode.ref_file'), ]), (bold_commonspace_trans_wf, outputnode, [ ('outputnode.bold', 'commonspace_bold'), ('outputnode.brain_mask', 'commonspace_mask'), ('outputnode.WM_mask', 'commonspace_WM_mask'), ('outputnode.CSF_mask', 'commonspace_CSF_mask'), ('outputnode.vascular_mask', 'commonspace_vascular_mask'), ('outputnode.labels', 'commonspace_labels'), ]), ]) if opts.apply_slice_mc: workflow.connect([ (bold_stc_wf, bold_hmc_wf, [('outputnode.stc_file', 'inputnode.bold_file')]), (bold_hmc_wf, bold_commonspace_trans_wf, [('outputnode.slice_corrected_bold', 'inputnode.bold_file')]), ]) if not opts.bold_only: workflow.connect([ (bold_hmc_wf, bold_native_trans_wf, [('outputnode.slice_corrected_bold', 'inputnode.bold_file')]), ]) else: workflow.connect([ (transitionnode, bold_hmc_wf, [('bold_file', 'inputnode.bold_file') ]), (bold_stc_wf, bold_commonspace_trans_wf, [('outputnode.stc_file', 'inputnode.bold_file')]), ]) if not opts.bold_only: workflow.connect([ (bold_stc_wf, bold_native_trans_wf, [('outputnode.stc_file', 'inputnode.bold_file')]), ]) return workflow
def hmc_afni(name='fMRI_HMC_afni', st_correct=False, despike=False, deoblique=False): """A head motion correction (HMC) workflow for functional scans""" workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['out_file', 'out_fd']), name='outputnode') drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'), name='drop_trs') reorient = pe.Node(afni.Resample( orientation='RPI', outputtype='NIFTI_GZ'), name='reorient') get_mean_RPI = pe.Node(afni.TStat( options='-mean', outputtype='NIFTI_GZ'), name='get_mean_RPI') # calculate hmc parameters hmc = pe.Node( afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'), name='motion_correct') get_mean_motion = get_mean_RPI.clone('get_mean_motion') hmc_A = hmc.clone('motion_correct_A') hmc_A.inputs.md1d_file = 'max_displacement.1D' # Compute the frame-wise displacement calc_fd = pe.Node(niu.Function( function=fd_jenkinson, input_names=['in_file', 'rmax'], output_names=['out_fd']), name='calc_fd') workflow.connect([ (inputnode, drop_trs, [('in_file', 'in_file_a'), ('start_idx', 'start_idx'), ('stop_idx', 'stop_idx')]), (inputnode, calc_fd, [('fd_radius', 'rmax')]), (reorient, get_mean_RPI, [('out_file', 'in_file')]), (reorient, hmc, [('out_file', 'in_file')]), (get_mean_RPI, hmc, [('out_file', 'basefile')]), (hmc, get_mean_motion, [('out_file', 'in_file')]), (reorient, hmc_A, [('out_file', 'in_file')]), (get_mean_motion, hmc_A, [('out_file', 'basefile')]), (hmc_A, outputnode, [('out_file', 'out_file')]), (hmc_A, calc_fd, [('oned_matrix_save', 'in_file')]), (calc_fd, outputnode, [('out_fd', 'out_fd')]), ]) # Slice timing correction, despiking, and deoblique st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts') deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique') despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike') if st_correct and despike and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, reorient, [('out_file', 'in_file')]) ]) elif st_correct and despike: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, reorient, [('out_file', 'in_file')]), ]) elif st_correct and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, reorient, [('out_file', 'in_file')]) ]) elif st_correct: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, reorient, [('out_file', 'in_file')]) ]) elif despike and deoblique: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, reorient, [('out_file', 'in_file')]) ]) elif despike: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, reorient, [('out_file', 'in_file')]), ]) elif deoblique: workflow.connect([ (drop_trs, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, reorient, [('out_file', 'in_file')]) ]) else: workflow.connect([ (drop_trs, reorient, [('out_file', 'in_file')]), ]) return workflow
def run(options): # fix! out_dir = os.path.join('option', '1') err_dir = os.path.join('option', '2') data_dir = os.path.join('option', '3') work_dir = os.path.join('something', 'else') # Workflow merica_wf = pe.Workflow('merica_wf') merica_wf.base_dir = work_dir inputspec = pe.Node(util.IdentityInterface(fields=options.keys()), name='inputspec') # Node: subject_iterable run_iterable = pe.Node(util.IdentityInterface(fields=['run'], mandatory_inputs=True), name='run_iterable') run_iterable.iterables = ('run', runs) info = dict(mri_files=[['run']]) # Create a datasource node to get the mri files datasource = pe.Node(nio.DataGrabber(infields=['run'], outfields=info.keys()), name='datasource') datasource.inputs.template = '*' datasource.inputs.base_directory = abspath(data_dir) datasource.inputs.field_template = dict(mri_files='%s/func/*.nii.gz') datasource.inputs.template_args = info datasource.inputs.sort_filelist = True datasource.inputs.ignore_exception = False datasource.inputs.raise_on_empty = True meica_wf.connect(run_iterable, 'run', datasource, 'run') # Create a Function node to rename output files getsubs = pe.Node(util.Function(input_names=['run', 'mri_files'], output_names=['subs'], function=get_subs), name='getsubs') getsubs.inputs.ignore_exception = False meica_wf.connect(run_iterable, 'run', getsubs, 'run') meica_wf.connect(datasource, 'mri_files', getsubs, 'mri_files') get_cm = pe.Node(util.Function(input_names=['fname'], output_names=['x', 'y', 'z'], function=find_CM), name='get_cm') get_obliquity = pe.Node(util.Function(input_names=['fname'], output_names=['angmerit'], function=check_obliquity), name='get_cm') if get_obliquity.is_oblique == True: deoblique = pe.Node(afni.Warp(deoblique=True) name='deoblique') merica_wf.connect(upstream, 't1', deoblique, 'in_file') warpspeed = pe.Node(afni.Warp(args='-card2oblique -newgrid 1.0')) if skull-stripped == False: unifeyes = pe.Node(afni.Unifize() name='unifeyes') if get_obliquity.is_oblique == True: merica_wf.connect(deoblique, 'out_file', unifeyes, 'in_file') else: merica_wf.connect(upstream, 't1', unifeyes, 'in_file') skullstrip = pe.Node(afni.SkullStrip(args='-shrink_fac_bot_lim 0.3 -orig_vol') name='skullstrip') autobots = pe.Node(afni.Autobox() name='autobots') merica_wf.connect(skullstrip, 'out_file', autobots, 'in_file') # Moving on to functional preprocessing, be back later! if despike == True: despike = pe.Node(afni.Despike() name='despike') if skull-stripped == False: merica_wf.connect(autobots, 'out_file', despike, 'in_file') else: merica_wf.connect(upstream, 't1', despike, 'in_file') meica_wf.connect(run_iterable, 'run', get_cm, 'fname') meica_wf.connect(run_iterable, 'run', get_cm, 'fname')
def fsl_run_level_wf( model, step, bids_dir, output_dir, work_dir, subject_id, database_path, smoothing_fwhm=None, smoothing_level=None, smoothing_type=None, use_rapidart=False, detrend_poly=None, align_volumes=None, smooth_autocorrelations=False, despike=False, name="fsl_run_level_wf", ): """Generate run level workflow for a given model.""" bids_dir = Path(bids_dir) work_dir = Path(work_dir) workflow = pe.Workflow(name=name) level = step["Level"] dimensionality = 3 # Nipype FSL.SUSAN Default if smoothing_type == "inp": dimensionality = 2 workflow.__desc__ = "" (work_dir / model["Name"]).mkdir(exist_ok=True) include_entities = {} if "Input" in model: if "Include" in model["Input"]: include_entities = model["Input"]["Include"] include_entities.update({"subject": subject_id}) getter = pe.Node( BIDSGet( database_path=database_path, fixed_entities=include_entities, align_volumes=align_volumes, ), name="func_select", ) get_info = pe.MapNode( GetRunModelInfo(model=step, detrend_poly=detrend_poly), iterfield=[ "metadata_file", "regressor_file", "events_file", "entities" ], name=f"get_{level}_info", ) despiker = pe.MapNode( afni.Despike(outputtype="NIFTI_GZ"), iterfield=["in_file"], name="despiker", ) realign_runs = pe.MapNode( fsl.MCFLIRT(output_type="NIFTI_GZ", interpolation="sinc"), iterfield=["in_file", "ref_file"], name="func_realign", ) wrangle_volumes = pe.MapNode( IdentityInterface(fields=["functional_file"]), iterfield=["functional_file"], name="wrangle_volumes", ) specify_model = pe.MapNode( modelgen.SpecifyModel(high_pass_filter_cutoff=-1.0, input_units="secs"), iterfield=["functional_runs", "subject_info", "time_repetition"], name=f"model_{level}_specify", ) fit_model = pe.MapNode( IdentityInterface( fields=[ "session_info", "interscan_interval", "contrasts", "functional_data" ], mandatory_inputs=True, ), iterfield=[ "functional_data", "session_info", "interscan_interval", "contrasts" ], name=f"model_{level}_fit", ) first_level_design = pe.MapNode( fsl.Level1Design( bases={"dgamma": { "derivs": False }}, model_serial_correlations=False, ), iterfield=["session_info", "interscan_interval", "contrasts"], name=f"model_{level}_design", ) generate_model = pe.MapNode( fsl.FEATModel(output_type="NIFTI_GZ"), iterfield=["fsf_file", "ev_files"], name=f"model_{level}_generate", ) estimate_model = pe.MapNode( fsl.FILMGLS( threshold=0.0, # smooth_autocorr=True output_type="NIFTI_GZ", results_dir="results", smooth_autocorr=False, autocorr_noestimate=True, ), iterfield=["design_file", "in_file", "tcon_file"], name=f"model_{level}_estimate", ) if smooth_autocorrelations: first_level_design.inputs.model_serial_correlations = True estimate_model.inputs.smooth_autocorr = True estimate_model.inputs.autocorr_noestimate = False calculate_p = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-ztop", suffix="_pval"), iterfield=["in_file"], name=f"model_{level}_caculate_p", ) image_pattern = ("[sub-{subject}/][ses-{session}/]" "[sub-{subject}_][ses-{session}_]" "task-{task}_[acq-{acquisition}_]" "[rec-{reconstruction}_][run-{run}_]" "[echo-{echo}_][space-{space}_]contrast-{contrast}_" "stat-{stat<effect|variance|z|p|t|F>}_statmap.nii.gz") run_rapidart = pe.MapNode( ra.ArtifactDetect( use_differences=[True, False], use_norm=True, zintensity_threshold=3, norm_threshold=1, bound_by_brainmask=True, mask_type="file", parameter_source="FSL", ), iterfield=["realignment_parameters", "realigned_files", "mask_file"], name="rapidart_run", ) reshape_rapidart = pe.MapNode( Function( input_names=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], output_names=["run_info", "contrast_entities"], function=utils.reshape_ra, ), iterfield=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], name="reshape_rapidart", ) mean_img = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-Tmean", suffix="_mean"), iterfield=["in_file", "mask_file"], name="smooth_susan_avgimg", ) median_img = pe.MapNode( fsl.ImageStats(output_type="NIFTI_GZ", op_string="-k %s -p 50"), iterfield=["in_file", "mask_file"], name="smooth_susan_medimg", ) merge = pe.Node(Merge(2, axis="hstack"), name="smooth_merge") run_susan = pe.MapNode( fsl.SUSAN(output_type="NIFTI_GZ"), iterfield=["in_file", "brightness_threshold", "usans"], name="smooth_susan", ) mask_functional = pe.MapNode(ApplyMask(), iterfield=["in_file", "mask_file"], name="mask_functional") # Exists solely to correct undesirable behavior of FSL # that results in loss of constant columns correct_matrices = pe.MapNode( Function( input_names=["design_matrix"], output_names=["design_matrix"], function=utils.correct_matrix, ), iterfield=["design_matrix"], run_without_submitting=True, name=f"correct_{level}_matrices", ) collate = pe.Node( MergeAll( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps", "contrast_metadata", ], check_lengths=True, ), name=f"collate_{level}", ) collate_outputs = pe.Node( CollateWithMetadata( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps" ], field_to_metadata_map={ "effect_maps": { "stat": "effect" }, "variance_maps": { "stat": "variance" }, "zscore_maps": { "stat": "z" }, "pvalue_maps": { "stat": "p" }, "tstat_maps": { "stat": "t" }, }, ), name=f"collate_{level}_outputs", ) plot_matrices = pe.MapNode( PlotMatrices(output_dir=output_dir, database_path=database_path), iterfield=["mat_file", "con_file", "entities", "run_info"], run_without_submitting=True, name=f"plot_{level}_matrices", ) ds_contrast_maps = pe.MapNode( BIDSDataSink(base_directory=output_dir, path_patterns=image_pattern), iterfield=["entities", "in_file"], run_without_submitting=True, name=f"ds_{level}_contrast_maps", ) wrangle_outputs = pe.Node( IdentityInterface(fields=["contrast_metadata", "contrast_maps"]), name=f"wrangle_{level}_outputs", ) # Setup connections among nodes workflow.connect([( getter, get_info, [ ("metadata_files", "metadata_file"), ("events_files", "events_file"), ("regressor_files", "regressor_file"), ("entities", "entities"), ], )]) if align_volumes and despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, realign_runs, [("out_file", "in_file")]), (getter, realign_runs, [("reference_files", "ref_file")]), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif align_volumes and not despike: workflow.connect([ ( getter, realign_runs, [("functional_files", "in_file"), ("reference_files", "ref_file")], ), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, wrangle_volumes, [("out_file", "functional_file")]), ]) else: workflow.connect([(getter, wrangle_volumes, [("functional_files", "functional_file")])]) if use_rapidart: workflow.connect([ (get_info, run_rapidart, [("motion_parameters", "realignment_parameters")]), (getter, run_rapidart, [("mask_files", "mask_file")]), ( wrangle_volumes, run_rapidart, [("functional_file", "realigned_files")], ), ( run_rapidart, reshape_rapidart, [("outlier_files", "outlier_file")], ), ( get_info, reshape_rapidart, [("run_info", "run_info"), ("contrast_entities", "contrast_entities")], ), (wrangle_volumes, reshape_rapidart, [("functional_file", "functional_file")]), ( reshape_rapidart, specify_model, [("run_info", "subject_info")], ), (reshape_rapidart, plot_matrices, [("run_info", "run_info")]), (reshape_rapidart, collate, [("contrast_entities", "contrast_metadata")]), ]) else: workflow.connect([ (get_info, specify_model, [("run_info", "subject_info")]), (get_info, plot_matrices, [("run_info", "run_info")]), ( get_info, collate, [("contrast_entities", "contrast_metadata")], ), ]) if smoothing_level == "l1" or smoothing_level == "run": run_susan.inputs.fwhm = smoothing_fwhm run_susan.inputs.dimension = dimensionality estimate_model.inputs.mask_size = smoothing_fwhm workflow.connect([ (wrangle_volumes, mean_img, [("functional_file", "in_file")]), ( wrangle_volumes, median_img, [("functional_file", "in_file")], ), (getter, mean_img, [("mask_files", "mask_file")]), (getter, median_img, [("mask_files", "mask_file")]), (mean_img, merge, [("out_file", "in1")]), (median_img, merge, [("out_stat", "in2")]), (wrangle_volumes, run_susan, [("functional_file", "in_file")]), ( median_img, run_susan, [( ("out_stat", utils.get_btthresh), "brightness_threshold", )], ), (merge, run_susan, [(("out", utils.get_usans), "usans")]), (getter, mask_functional, [("mask_files", "mask_file")]), (run_susan, mask_functional, [("smoothed_file", "in_file")]), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) else: workflow.connect([ (getter, mask_functional, [("mask_files", "mask_file")]), ( wrangle_volumes, mask_functional, [("functional_file", "in_file")], ), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) workflow.connect([ ( get_info, specify_model, [("repetition_time", "time_repetition")], ), (specify_model, fit_model, [("session_info", "session_info")]), ( get_info, fit_model, [("repetition_time", "interscan_interval"), ("run_contrasts", "contrasts")], ), ( fit_model, first_level_design, [ ("interscan_interval", "interscan_interval"), ("session_info", "session_info"), ("contrasts", "contrasts"), ], ), (first_level_design, generate_model, [("fsf_files", "fsf_file")]), (first_level_design, generate_model, [("ev_files", "ev_files")]), ]) if detrend_poly: workflow.connect([ ( generate_model, correct_matrices, [("design_file", "design_matrix")], ), ( correct_matrices, plot_matrices, [("design_matrix", "mat_file")], ), ( correct_matrices, estimate_model, [("design_matrix", "design_file")], ), ]) else: workflow.connect([ (generate_model, plot_matrices, [("design_file", "mat_file")]), ( generate_model, estimate_model, [("design_file", "design_file")], ), ]) workflow.connect([ (getter, plot_matrices, [("entities", "entities")]), (generate_model, plot_matrices, [("con_file", "con_file")]), (fit_model, estimate_model, [("functional_data", "in_file")]), (generate_model, estimate_model, [("con_file", "tcon_file")]), ( estimate_model, calculate_p, [(("zstats", utils.flatten), "in_file")], ), ( estimate_model, collate, [ ("copes", "effect_maps"), ("varcopes", "variance_maps"), ("zstats", "zscore_maps"), ("tstats", "tstat_maps"), ], ), (calculate_p, collate, [("out_file", "pvalue_maps")]), ( collate, collate_outputs, [ ("effect_maps", "effect_maps"), ("variance_maps", "variance_maps"), ("zscore_maps", "zscore_maps"), ("pvalue_maps", "pvalue_maps"), ("tstat_maps", "tstat_maps"), ("contrast_metadata", "metadata"), ], ), ( collate_outputs, ds_contrast_maps, [("out", "in_file"), ("metadata", "entities")], ), ( collate_outputs, wrangle_outputs, [("metadata", "contrast_metadata"), ("out", "contrast_maps")], ), ]) return workflow
selectfiles = Node(SelectFiles(template), name='selectfiles') selectfiles.inputs.base_directory = rawdir selectfiles.inputs.sort_files = True #Outputs: anat, epi, flair, mask, wm_noise, csf_noise, mni_template ####EPI preprocessing#### #Convert EPI dicoms to nii (with embeded metadata) epi_stack = Node(dcmstack.DcmStack(), name='epistack') epi_stack.inputs.embed_meta = True epi_stack.inputs.out_format = 'epi' epi_stack.inputs.out_ext = '.nii' #Outputs: out_file #Despiking using afni (position based on Jo et al. (2013)). despike = Node(afni.Despike(), name='despike') despike.inputs.outputtype = 'NIFTI' #Outputs: out_file #Slice timing corrected (gets timing from header) st_corr = Node(spm.SliceTiming(), name='slicetiming_correction') st_corr.inputs.ref_slice = 1 #Outputs: timecorrected_files #Realignment using SPM <--- Maybe just estimate and apply all transforms at the end? realign = Node(spm.Realign(), name='realign') realign.inputs.register_to_mean = False realign.inputs.quality = 1.0 #Outputs: realignment_parameters, reliced epi images (motion corrected) tsnr = Node(misc.TSNR(), name='tsnr')
def init_bold_main_wf(opts, bias_cor_only=False, aCompCor_method='50%', name='bold_main_wf'): """ This workflow controls the functional preprocessing stages of the pipeline when both functional and anatomical images are provided. **Parameters** apply_despiking whether to apply despiking using AFNI's 3dDespike https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dDespike.html. tr repetition time for the EPI tpattern specification for the within TR slice acquisition method. The input is fed to AFNI's 3dTshift no_STC whether to apply slice timing correction (STC) or not detect_dummy whether to detect and remove dummy volumes at the beginning of the EPI Sequences slice_mc whether to apply slice-specific motion correction through 2D registration of each slice, which can improve the correction of within-TR motion bias_reg_script path to registration script that will be applied for bias field correction. The script must follow the template structure of registration scripts in shell_scripts/. Default is set to 'Rigid' registration. coreg_script path to registration script for EPI to anat coregistraion. The script must follow the template structure of registration scripts in shell_scripts/. Default is set to 'SyN' registration. nativespace_resampling Specified dimensions for the resampling of the corrected EPI in native space. commonspace_resampling Specified dimensions for the resampling of the corrected EPI in common space. **Inputs** bold Input BOLD series NIfTI file anat_preproc Preprocessed anatomical image after bias field correction and denoising anat_mask Brain mask inherited from the common space registration WM_mask Eroded WM mask inherited from the common space registration CSF_mask Eroded CSF mask inherited from the common space registration labels Anatomical labels inherited from the common space registration commonspace_transforms_list list of transforms to be applied to resample to commonspace commonspace_inverses Specification for the application of inverse affine transform for the provided commonspace transforms **Outputs** input_bold The provided input BOLD file bold_ref Initial EPI median volume subsequently used as 3D reference EPI volume motcorr_params motion parameters file provided from antsMotionCorr corrected_EPI 3D reference EPI volume after bias field correction itk_bold_to_anat Composite transforms from the EPI space to the anatomical space itk_anat_to_bold Composite transforms from the anatomical space to the EPI space output_warped_bold Bias field corrected 3D EPI volume warped to the anatomical space resampled_bold Original BOLD timeseries resampled through motion realignment and susceptibility distortion correction based on registration to the anatomical image resampled_ref_bold 3D median EPI volume from the resampled native BOLD timeseries confounds_csv .csv file with measured confound timecourses, including global signal, WM signal, CSF signal, 6 rigid body motion parameters + their first temporal derivate + the 12 parameters squared (24 motion parameters), and aCompCorr timecourses FD_voxelwise Voxelwise framewise displacement (FD) measures that can be integrated to future confound regression. These measures are computed from antsMotionCorrStats. pos_voxelwise Voxel distancing across time based on rigid body movement parameters, which can be integrated for a voxelwise motion regression These measures are computed from antsMotionCorrStats. FD_csv .csv file with global framewise displacement (FD) measures EPI_brain_mask EPI brain mask for resampled bold EPI_WM_mask EPI WM mask for resampled bold EPI_CSF_mask EPI CSF mask for resampled bold EPI_labels EPI anatomical labels for resampled bold commonspace_bold Motion and SDC-corrected EPI timeseries resampled into common space by applying transforms from the anatomical common space registration commonspace_mask EPI brain mask for commonspace bold commonspace_WM_mask EPI WM mask for commonspace bold commonspace_CSF_mask EPI CSF mask for commonspace bold commonspace_labels EPI anatomical labels for commonspace bold """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['subject_id', 'bold', 'anat_preproc', 'anat_mask', 'WM_mask', 'CSF_mask', 'vascular_mask', 'labels', 'template_to_common_affine', 'template_to_common_warp', 'anat_to_template_affine', 'anat_to_template_warp', 'template_anat']), name="inputnode") outputnode = pe.Node(niu.IdentityInterface( fields=['input_bold', 'bold_ref', 'motcorr_params', 'corrected_EPI', 'output_warped_bold', 'affine_bold2anat', 'warp_bold2anat', 'inverse_warp_bold2anat', 'resampled_bold', 'resampled_ref_bold', 'EPI_brain_mask', 'EPI_WM_mask', 'EPI_CSF_mask', 'EPI_labels', 'confounds_csv', 'FD_voxelwise', 'pos_voxelwise', 'FD_csv', 'commonspace_bold', 'commonspace_mask', 'commonspace_WM_mask', 'commonspace_CSF_mask', 'commonspace_vascular_mask', 'commonspace_labels']), name='outputnode') boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name="boldbuffer") # this node will serve as a relay of outputs from the bias_cor main_wf to the inputs for the rest of the main_wf for bold_only transitionnode = pe.Node(niu.IdentityInterface(fields=['bold_file', 'bold_ref', 'corrected_EPI']), name="transitionnode") if bias_cor_only or (not opts.bold_only): bold_reference_wf = init_bold_reference_wf( detect_dummy=opts.detect_dummy, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) bias_cor_wf = bias_correction_wf( bias_cor_method=opts.bias_cor_method, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory) if opts.apply_despiking: despike = pe.Node( afni.Despike(outputtype='NIFTI_GZ'), name='despike') workflow.connect([ (inputnode, despike, [('bold', 'in_file')]), (despike, boldbuffer, [('out_file', 'bold_file')]), ]) else: workflow.connect([ (inputnode, boldbuffer, [('bold', 'bold_file')]), ]) if opts.detect_dummy: workflow.connect([ (bold_reference_wf, transitionnode, [ ('outputnode.bold_file', 'bold_file'), ]), ]) else: workflow.connect([ (boldbuffer, transitionnode, [ ('bold_file', 'bold_file'), ]), ]) workflow.connect([ (inputnode, bias_cor_wf, [ ('anat_preproc', 'inputnode.anat'), ('anat_mask', 'inputnode.anat_mask'), ('bold', 'inputnode.name_source'), ]), (boldbuffer, bold_reference_wf, [ ('bold_file', 'inputnode.bold_file'), ]), (bold_reference_wf, bias_cor_wf, [ ('outputnode.ref_image', 'inputnode.ref_EPI'), ]), (bold_reference_wf, transitionnode, [ ('outputnode.ref_image', 'bold_ref'), ]), (bias_cor_wf, transitionnode, [ ('outputnode.corrected_EPI', 'corrected_EPI'), ]), ]) if opts.bold_only and bias_cor_only: return workflow bold_stc_wf = init_bold_stc_wf( no_STC=opts.no_STC, tr=opts.TR, tpattern=opts.tpattern, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(slice_mc=opts.apply_slice_mc, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc, local_threads=opts.local_threads) if not opts.bold_only: def commonspace_transforms(template_to_common_warp, template_to_common_affine, anat_to_template_warp, anat_to_template_affine, warp_bold2anat, affine_bold2anat): # transforms_list,inverses return [template_to_common_warp, template_to_common_affine, anat_to_template_warp, anat_to_template_affine, warp_bold2anat, affine_bold2anat], [0, 0, 0, 0, 0, 0] commonspace_transforms_prep = pe.Node(Function(input_names=['template_to_common_warp', 'template_to_common_affine', 'anat_to_template_warp', 'anat_to_template_affine', 'warp_bold2anat', 'affine_bold2anat'], output_names=[ 'transforms_list', 'inverses'], function=commonspace_transforms), name='commonspace_transforms_prep') else: def commonspace_transforms(template_to_common_warp, template_to_common_affine, anat_to_template_warp, anat_to_template_affine): # transforms_list,inverses return [template_to_common_warp, template_to_common_affine, anat_to_template_warp, anat_to_template_affine], [0, 0, 0, 0] commonspace_transforms_prep = pe.Node(Function(input_names=['template_to_common_warp', 'template_to_common_affine', 'anat_to_template_warp', 'anat_to_template_affine', ], output_names=[ 'transforms_list', 'inverses'], function=commonspace_transforms), name='commonspace_transforms_prep') bold_commonspace_trans_wf = init_bold_commonspace_trans_wf(resampling_dim=opts.commonspace_resampling, brain_mask=str(opts.brain_mask), WM_mask=str(opts.WM_mask), CSF_mask=str(opts.CSF_mask), vascular_mask=str(opts.vascular_mask), atlas_labels=str(opts.labels), slice_mc=opts.apply_slice_mc, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) bold_confs_wf = init_bold_confs_wf( aCompCor_method=aCompCor_method, name="bold_confs_wf", rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ (inputnode, commonspace_transforms_prep, [ ("template_to_common_affine", "template_to_common_affine"), ("template_to_common_warp", "template_to_common_warp"), ("anat_to_template_affine", "anat_to_template_affine"), ("anat_to_template_warp", "anat_to_template_warp"), ]), (inputnode, bold_confs_wf, [('anat_mask', 'inputnode.t1_mask'), ('WM_mask', 'inputnode.WM_mask'), ('CSF_mask', 'inputnode.CSF_mask'), ('vascular_mask', 'inputnode.vascular_mask'), ('labels', 'inputnode.t1_labels'), ('bold', 'inputnode.name_source'), ]), (transitionnode, bold_stc_wf, [ ('bold_file', 'inputnode.bold_file'), ]), (transitionnode, bold_hmc_wf, [ ('bold_ref', 'inputnode.ref_image'), ]), (bold_hmc_wf, outputnode, [ ('outputnode.motcorr_params', 'motcorr_params')]), (transitionnode, outputnode, [ ('bold_ref', 'bold_ref'), ('corrected_EPI', 'corrected_EPI'), ]), (bold_hmc_wf, bold_confs_wf, [ ('outputnode.motcorr_params', 'inputnode.movpar_file'), ]), (bold_confs_wf, outputnode, [ ('outputnode.brain_mask', 'EPI_brain_mask'), ('outputnode.WM_mask', 'EPI_WM_mask'), ('outputnode.CSF_mask', 'EPI_CSF_mask'), ('outputnode.EPI_labels', 'EPI_labels'), ('outputnode.confounds_csv', 'confounds_csv'), ('outputnode.FD_csv', 'FD_csv'), ('outputnode.FD_voxelwise', 'FD_voxelwise'), ('outputnode.pos_voxelwise', 'pos_voxelwise'), ]), (commonspace_transforms_prep, bold_commonspace_trans_wf, [ ('transforms_list', 'inputnode.transforms_list'), ('inverses', 'inputnode.inverses'), ]), (bold_hmc_wf, bold_commonspace_trans_wf, [ ('outputnode.motcorr_params', 'inputnode.motcorr_params')]), (inputnode, bold_commonspace_trans_wf, [ ('bold', 'inputnode.name_source'), ('template_anat', 'inputnode.ref_file'), ]), (bold_commonspace_trans_wf, outputnode, [ ('outputnode.bold', 'commonspace_bold'), ('outputnode.brain_mask', 'commonspace_mask'), ('outputnode.WM_mask', 'commonspace_WM_mask'), ('outputnode.CSF_mask', 'commonspace_CSF_mask'), ('outputnode.vascular_mask', 'commonspace_vascular_mask'), ('outputnode.labels', 'commonspace_labels'), ]), ]) if not opts.bold_only: bold_reg_wf = init_bold_reg_wf(coreg_script=opts.coreg_script, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) def SyN_coreg_transforms_prep(warp_bold2anat, affine_bold2anat): # transforms_list,inverses return [warp_bold2anat, affine_bold2anat], [0, 0] transforms_prep = pe.Node(Function(input_names=['warp_bold2anat', 'affine_bold2anat'], output_names=[ 'transforms_list', 'inverses'], function=SyN_coreg_transforms_prep), name='transforms_prep') # Apply transforms in 1 shot bold_bold_trans_wf = init_bold_preproc_trans_wf( resampling_dim=opts.nativespace_resampling, slice_mc=opts.apply_slice_mc, rabies_data_type=opts.data_type, rabies_mem_scale=opts.scale_min_memory, min_proc=opts.min_proc) workflow.connect([ (inputnode, bold_reg_wf, [ ('anat_preproc', 'inputnode.anat_preproc'), ('anat_mask', 'inputnode.anat_mask')]), (inputnode, bold_bold_trans_wf, [ ('bold', 'inputnode.name_source')]), (transitionnode, bold_reg_wf, [ ('corrected_EPI', 'inputnode.ref_bold_brain')]), (bold_reg_wf, outputnode, [ ('outputnode.affine_bold2anat', 'affine_bold2anat'), ('outputnode.warp_bold2anat', 'warp_bold2anat'), ('outputnode.inverse_warp_bold2anat', 'inverse_warp_bold2anat'), ('outputnode.output_warped_bold', 'output_warped_bold'), ]), (bold_reg_wf, transforms_prep, [ ('outputnode.affine_bold2anat', 'affine_bold2anat'), ('outputnode.warp_bold2anat', 'warp_bold2anat'), ]), (transforms_prep, bold_bold_trans_wf, [ ('transforms_list', 'inputnode.transforms_list'), ('inverses', 'inputnode.inverses'), ]), (bold_reg_wf, bold_bold_trans_wf, [ ('outputnode.output_warped_bold', 'inputnode.ref_file')]), (bold_reg_wf, commonspace_transforms_prep, [ ('outputnode.affine_bold2anat', 'affine_bold2anat'), ('outputnode.warp_bold2anat', 'warp_bold2anat'), ]), (bold_hmc_wf, bold_bold_trans_wf, [ ('outputnode.motcorr_params', 'inputnode.motcorr_params')]), (bold_bold_trans_wf, outputnode, [ ('outputnode.bold_ref', 'resampled_ref_bold'), ('outputnode.bold', 'resampled_bold'), ]), (bold_bold_trans_wf, bold_confs_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_ref', 'inputnode.ref_bold'), ]), ]) else: workflow.connect([ (bold_commonspace_trans_wf, bold_confs_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_ref', 'inputnode.ref_bold'), ]), ]) if opts.apply_slice_mc: workflow.connect([ (bold_stc_wf, bold_hmc_wf, [ ('outputnode.stc_file', 'inputnode.bold_file')]), (bold_hmc_wf, bold_commonspace_trans_wf, [ ('outputnode.slice_corrected_bold', 'inputnode.bold_file')]), ]) if not opts.bold_only: workflow.connect([ (bold_hmc_wf, bold_bold_trans_wf, [ ('outputnode.slice_corrected_bold', 'inputnode.bold_file')]), ]) else: workflow.connect([ (transitionnode, bold_hmc_wf, [ ('bold_file', 'inputnode.bold_file')]), (bold_stc_wf, bold_commonspace_trans_wf, [ ('outputnode.stc_file', 'inputnode.bold_file')]), ]) if not opts.bold_only: workflow.connect([ (bold_stc_wf, bold_bold_trans_wf, [ ('outputnode.stc_file', 'inputnode.bold_file')]), ]) return workflow
def hmc_afni(settings, name='fMRI_HMC_afni', st_correct=False, despike=False, deoblique=False, start_idx=None, stop_idx=None): """ A :abbr:`HMC (head motion correction)` workflow for functional scans .. workflow:: from mriqc.workflows.functional import hmc_afni wf = hmc_afni({'biggest_file_size_gb': 1}) """ biggest_file_gb = settings.get("biggest_file_size_gb", 1) workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['out_file', 'out_fd']), name='outputnode') if (start_idx is not None) or (stop_idx is not None): drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'), name='drop_trs') workflow.connect([ (inputnode, drop_trs, [('in_file', 'in_file_a'), ('start_idx', 'start_idx'), ('stop_idx', 'stop_idx')]), ]) else: drop_trs = pe.Node(niu.IdentityInterface(fields=['out_file']), name='drop_trs') workflow.connect([ (inputnode, drop_trs, [('in_file', 'out_file')]), ]) gen_ref = pe.Node(nwr.EstimateReferenceImage(mc_method="AFNI"), name="gen_ref") # calculate hmc parameters hmc = pe.Node( afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'), name='motion_correct', mem_gb=biggest_file_gb * 2.5) # Compute the frame-wise displacement fdnode = pe.Node(nac.FramewiseDisplacement(normalize=False, parameter_source="AFNI"), name='ComputeFD') workflow.connect([ (inputnode, fdnode, [('fd_radius', 'radius')]), (gen_ref, hmc, [('ref_image', 'basefile')]), (hmc, outputnode, [('out_file', 'out_file')]), (hmc, fdnode, [('oned_file', 'in_file')]), (fdnode, outputnode, [('out_file', 'out_fd')]), ]) # Slice timing correction, despiking, and deoblique st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts') deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique') despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike') if st_correct and despike and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, gen_ref, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), ]) elif st_correct and despike: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, gen_ref, [('out_file', 'in_file')]), (despike_node, hmc, [('out_file', 'in_file')]), ]) elif st_correct and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, gen_ref, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), ]) elif st_correct: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, gen_ref, [('out_file', 'in_file')]), (st_corr, hmc, [('out_file', 'in_file')]), ]) elif despike and deoblique: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, gen_ref, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), ]) elif despike: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, gen_ref, [('out_file', 'in_file')]), (despike_node, hmc, [('out_file', 'in_file')]), ]) elif deoblique: workflow.connect([ (drop_trs, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, gen_ref, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), ]) else: workflow.connect([ (drop_trs, gen_ref, [('out_file', 'in_file')]), (drop_trs, hmc, [('out_file', 'in_file')]), ]) return workflow
def create_workflow(func_runs, subject_id, subjects_dir, fwhm, slice_times, highpass_frequency, lowpass_frequency, TR, sink_directory, use_fsl_bp, num_components, whichvol, name='wmaze'): wf = pe.Workflow(name=name) datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run'], outfields=['func']), name='datasource') datasource.inputs.subject_id = subject_id datasource.inputs.run = func_runs datasource.inputs.template = '/home/data/madlab/data/mri/wmaze/%s/bold/bold_%03d/bold.nii.gz' datasource.inputs.sort_filelist = True # Rename files in case they are named identically name_unique = pe.MapNode(util.Rename(format_string='wmaze_%(run)02d'), iterfield = ['in_file', 'run'], name='rename') name_unique.inputs.keep_ext = True name_unique.inputs.run = func_runs wf.connect(datasource, 'func', name_unique, 'in_file') # Define the outputs for the preprocessing workflow output_fields = ['reference', 'motion_parameters', 'motion_parameters_plusDerivs', 'motionandoutlier_noise_file', 'noise_components', 'realigned_files', 'motion_plots', 'mask_file', 'smoothed_files', 'bandpassed_files', 'reg_file', 'reg_cost', 'reg_fsl_file', 'artnorm_files', 'artoutlier_files', 'artdisplacement_files', 'tsnr_file'] outputnode = pe.Node(util.IdentityInterface(fields=output_fields), name='outputspec') # Convert functional images to float representation img2float = pe.MapNode(fsl.ImageMaths(out_data_type='float', op_string = '', suffix='_dtype'), iterfield=['in_file'], name='img2float') wf.connect(name_unique, 'out_file', img2float, 'in_file') # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration despiker = pe.MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') num_threads = 4 despiker.inputs.environ = {'OMP_NUM_THREADS': '%d' % num_threads} despiker.plugin_args = {'bsub_args': '-n %d' % num_threads} despiker.plugin_args = {'bsub_args': '-R "span[hosts=1]"'} wf.connect(img2float, 'out_file', despiker, 'in_file') # Extract the first volume of the first run as the reference extractref = pe.Node(fsl.ExtractROI(t_size=1), iterfield=['in_file'], name = "extractref") wf.connect(despiker, ('out_file', pickfirst), extractref, 'in_file') wf.connect(despiker, ('out_file', pickvol, 0, whichvol), extractref, 't_min') wf.connect(extractref, 'roi_file', outputnode, 'reference') if slice_times is not None: # Simultaneous motion and slice timing correction with Nipy algorithm motion_correct = pe.Node(nipy.SpaceTimeRealigner(), name='motion_correct') motion_correct.inputs.tr = TR motion_correct.inputs.slice_times = slice_times motion_correct.inputs.slice_info = 2 motion_correct.plugin_args = {'bsub_args': '-n %s' %os.environ['MKL_NUM_THREADS']} motion_correct.plugin_args = {'bsub_args': '-R "span[hosts=1]"'} wf.connect(despiker, 'out_file', motion_correct, 'in_file') wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters') wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files') else: # Motion correct functional runs to the reference (1st volume of 1st run) motion_correct = pe.MapNode(fsl.MCFLIRT(save_mats = True, save_plots = True, interpolation = 'sinc'), name = 'motion_correct', iterfield = ['in_file']) wf.connect(despiker, 'out_file', motion_correct, 'in_file') wf.connect(extractref, 'roi_file', motion_correct, 'ref_file') wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters') wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files') # Compute TSNR on realigned data regressing polynomials upto order 2 tsnr = pe.MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(motion_correct, 'out_file', tsnr, 'in_file') wf.connect(tsnr, 'tsnr_file', outputnode, 'tsnr_file') # Plot the estimated motion parameters plot_motion = pe.MapNode(fsl.PlotMotionParams(in_source='fsl'), name='plot_motion', iterfield=['in_file']) plot_motion.iterables = ('plot_type', ['rotations', 'translations']) wf.connect(motion_correct, 'par_file', plot_motion, 'in_file') wf.connect(plot_motion, 'out_file', outputnode, 'motion_plots') # Register a source file to fs space and create a brain mask in source space fssource = pe.Node(nio.FreeSurferSource(), name ='fssource') fssource.inputs.subject_id = subject_id fssource.inputs.subjects_dir = subjects_dir # Extract aparc+aseg brain mask and binarize fs_threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'), name ='fs_threshold') wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), fs_threshold, 'in_file') # Calculate the transformation matrix from EPI space to FreeSurfer space # using the BBRegister command fs_register = pe.MapNode(fs.BBRegister(init='fsl'), iterfield=['source_file'], name ='fs_register') fs_register.inputs.contrast_type = 't2' fs_register.inputs.out_fsl_file = True fs_register.inputs.subject_id = subject_id fs_register.inputs.subjects_dir = subjects_dir wf.connect(extractref, 'roi_file', fs_register, 'source_file') wf.connect(fs_register, 'out_reg_file', outputnode, 'reg_file') wf.connect(fs_register, 'min_cost_file', outputnode, 'reg_cost') wf.connect(fs_register, 'out_fsl_file', outputnode, 'reg_fsl_file') # Extract wm+csf, brain masks by eroding freesurfer lables wmcsf = pe.MapNode(fs.Binarize(), iterfield=['match', 'binary_file', 'erode'], name='wmcsfmask') #wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [[2, 41], [4, 5, 14, 15, 24, 31, 43, 44, 63]] wmcsf.inputs.binary_file = ['wm.nii.gz', 'csf.nii.gz'] wmcsf.inputs.erode = [2, 2] #int(np.ceil(slice_thickness)) wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') # Now transform the wm and csf masks to 1st volume of 1st run wmcsftransform = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp='nearest'), iterfield=['target_file'], name='wmcsftransform') wmcsftransform.inputs.subjects_dir = subjects_dir wf.connect(extractref, 'roi_file', wmcsftransform, 'source_file') wf.connect(fs_register, ('out_reg_file', pickfirst), wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') # Transform the binarized aparc+aseg file to the 1st volume of 1st run space fs_voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True), iterfield = ['source_file', 'reg_file'], name='fs_transform') fs_voltransform.inputs.subjects_dir = subjects_dir wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file') wf.connect(fs_register, 'out_reg_file', fs_voltransform, 'reg_file') wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file') # Dilate the binarized mask by 1 voxel that is now in the EPI space fs_threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'), iterfield=['in_file'], name='fs_threshold2') fs_threshold2.inputs.dilate = 1 wf.connect(fs_voltransform, 'transformed_file', fs_threshold2, 'in_file') wf.connect(fs_threshold2, 'binary_file', outputnode, 'mask_file') # Use RapidART to detect motion/intensity outliers art = pe.MapNode(ra.ArtifactDetect(use_differences = [True, False], use_norm = True, zintensity_threshold = 3, norm_threshold = 1, bound_by_brainmask=True, mask_type = "file"), iterfield=["realignment_parameters","realigned_files"], name="art") if slice_times is not None: art.inputs.parameter_source = "NiPy" else: art.inputs.parameter_source = "FSL" wf.connect(motion_correct, 'par_file', art, 'realignment_parameters') wf.connect(motion_correct, 'out_file', art, 'realigned_files') wf.connect(fs_threshold2, ('binary_file', pickfirst), art, 'mask_file') wf.connect(art, 'norm_files', outputnode, 'artnorm_files') wf.connect(art, 'outlier_files', outputnode, 'artoutlier_files') wf.connect(art, 'displacement_files', outputnode, 'artdisplacement_files') # Compute motion regressors (save file with 1st and 2nd derivatives) motreg = pe.Node(util.Function(input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors, imports=imports), name='getmotionregress') wf.connect(motion_correct, 'par_file', motreg, 'motion_params') wf.connect(motreg, 'out_files', outputnode, 'motion_parameters_plusDerivs') # Create a filter text file to remove motion (+ derivatives), art confounds, # and 1st, 2nd, and 3rd order legendre polynomials. createfilter1 = pe.Node(util.Function(input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'], output_names=['out_files'], function=build_filter1, imports=imports), name='makemotionbasedfilter') createfilter1.inputs.detrend_poly = 3 wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') wf.connect(createfilter1, 'out_files', outputnode, 'motionandoutlier_noise_file') # Create a filter to remove noise components based on white matter and CSF createfilter2 = pe.MapNode(util.Function(input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], output_names=['out_files'], function=extract_noise_components, imports=imports), iterfield=['realigned_file', 'extra_regressors'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') wf.connect(motion_correct, 'out_file', createfilter2, 'realigned_file') wf.connect(wmcsftransform, 'transformed_file', createfilter2, 'mask_file') wf.connect(createfilter2, 'out_files', outputnode, 'noise_components') # Mask the functional runs with the extracted mask maskfunc = pe.MapNode(fsl.ImageMaths(suffix='_bet', op_string='-mas'), iterfield=['in_file'], name = 'maskfunc') wf.connect(motion_correct, 'out_file', maskfunc, 'in_file') wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc, 'in_file2') # Smooth each run using SUSAn with the brightness threshold set to 75% # of the median value for each run and a mask constituting the mean functional smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'), iterfield = ['in_file'], name='smooth_median') wf.connect(maskfunc, 'out_file', smooth_median, 'in_file') wf.connect(fs_threshold2, ('binary_file', pickfirst), smooth_median, 'mask_file') smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), iterfield=['in_file'], name='smooth_meanfunc') wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file') smooth_merge = pe.Node(util.Merge(2, axis='hstack'), name='smooth_merge') wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1') wf.connect(smooth_median, 'out_stat', smooth_merge, 'in2') smooth = pe.MapNode(fsl.SUSAN(), iterfield=['in_file', 'brightness_threshold', 'usans'], name='smooth') smooth.inputs.fwhm=fwhm wf.connect(maskfunc, 'out_file', smooth, 'in_file') wf.connect(smooth_median, ('out_stat', getbtthresh), smooth, 'brightness_threshold') wf.connect(smooth_merge, ('out', getusans), smooth, 'usans') # Mask the smoothed data with the dilated mask maskfunc2 = pe.MapNode(fsl.ImageMaths(suffix='_mask', op_string='-mas'), iterfield=['in_file'], name='maskfunc2') wf.connect(smooth, 'smoothed_file', maskfunc2, 'in_file') wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc2, 'in_file2') wf.connect(maskfunc2, 'out_file', outputnode, 'smoothed_files') # Band-pass filter the timeseries if use_fsl_bp == 'True': determine_bp_sigmas = pe.Node(util.Function(input_names=['tr', 'highpass_freq', 'lowpass_freq'], output_names = ['out_sigmas'], function=calc_fslbp_sigmas), name='determine_bp_sigmas') determine_bp_sigmas.inputs.tr = float(TR) determine_bp_sigmas.inputs.highpass_freq = float(highpass_frequency) determine_bp_sigmas.inputs.lowpass_freq = float(lowpass_frequency) bandpass = pe.MapNode(fsl.ImageMaths(suffix='_tempfilt'), iterfield=["in_file"], name="bandpass") wf.connect(determine_bp_sigmas, ('out_sigmas', highpass_operand), bandpass, 'op_string') wf.connect(maskfunc2, 'out_file', bandpass, 'in_file') wf.connect(bandpass, 'out_file', outputnode, 'bandpassed_files') else: bandpass = pe.Node(util.Function(input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'], output_names=['out_files'], function=bandpass_filter, imports=imports), name='bandpass') bandpass.inputs.fs = 1./TR if highpass_frequency < 0: bandpass.inputs.highpass_freq = -1 else: bandpass.inputs.highpass_freq = highpass_frequency if lowpass_frequency < 0: bandpass.inputs.lowpass_freq = -1 else: bandpass.inputs.lowpass_freq = lowpass_frequency wf.connect(maskfunc2, 'out_file', bandpass, 'files') wf.connect(bandpass, 'out_files', outputnode, 'bandpassed_files') # Save the relevant data into an output directory datasink = pe.Node(nio.DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id wf.connect(outputnode, 'reference', datasink, 'ref') wf.connect(outputnode, 'motion_parameters', datasink, 'motion') wf.connect(outputnode, 'realigned_files', datasink, 'func.realigned') wf.connect(outputnode, 'motion_plots', datasink, 'motion.@plots') wf.connect(outputnode, 'mask_file', datasink, 'ref.@mask') wf.connect(outputnode, 'smoothed_files', datasink, 'func.smoothed_fullspectrum') wf.connect(outputnode, 'bandpassed_files', datasink, 'func.smoothed_bandpassed') wf.connect(outputnode, 'reg_file', datasink, 'bbreg.@reg') wf.connect(outputnode, 'reg_cost', datasink, 'bbreg.@cost') wf.connect(outputnode, 'reg_fsl_file', datasink, 'bbreg.@regfsl') wf.connect(outputnode, 'artnorm_files', datasink, 'art.@norm_files') wf.connect(outputnode, 'artoutlier_files', datasink, 'art.@outlier_files') wf.connect(outputnode, 'artdisplacement_files', datasink, 'art.@displacement_files') wf.connect(outputnode, 'motion_parameters_plusDerivs', datasink, 'noise.@motionplusDerivs') wf.connect(outputnode, 'motionandoutlier_noise_file', datasink, 'noise.@motionplusoutliers') wf.connect(outputnode, 'noise_components', datasink, 'compcor') wf.connect(outputnode, 'tsnr_file', datasink, 'tsnr') return wf
def FUNCPIPE(): #--- 1) Import modules import os # system functions os.system('clear') import nipype.interfaces.dcm2nii as dcm2nii import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.fsl as fsl # fsl import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.interfaces.fsl.utils as fslu import nipype.interfaces.fsl.preprocess as fslp from nipype.interfaces import afni as afni from nipype.interfaces.utility import Function import matplotlib from nilearn import plotting from nilearn import image import os import nipype.interfaces.fsl as fsl # fsl import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.interfaces.freesurfer as fs # freesurfer tolist = lambda x: [x] highpass_operand = lambda x:'-bptf %.10f -1'%x #--- 2) Prompt user for directory containing DICOM FILES INITDIR=os.getcwd(); #--- 3) Prompt user for inputs. DICOMDIR=raw_input('Please drag in the directory of\nDICOM files you wish to pre-process\n(ensure there is no blank space at the end)\n') os.system('clear') print ('---\n') DICOMDIR=DICOMDIR.strip('\'"') frac=float(input('Please enter the fractional anisotropy threshold [0 - 1] \n')) os.system('clear') print ('---\n') grad=float(input('Please enter the threshold gradient [-1 - 1] \n')) os.system('clear') print ('---\n') FWHM=float(input('Please enter the FWHM of the smoother (mm) \n')) os.system('clear') print ('---\n') HIGHPASS=float(input('Please enter the High Pass filter cutoff (s)\n')) os.system('clear') print ('---\n') TR=float(input('Please enter the TR (s)\n')) os.system('clear') print ('---\n') #--- 4) Define workflow and input node. workflow = pe.Workflow(name='FUNCPIPE') inputnode = pe.Node(interface=util.IdentityInterface(fields=['fwhm','highpass','TR']),name='inputspec') inputnode.inputs.fwhm=FWHM inputnode.inputs.TR=TR inputnode.inputs.highpass=float(HIGHPASS/(inputnode.inputs.TR*2.5)) #--- 5) Move to directory os.chdir(DICOMDIR) #--- 6) Set up converter node for conversion to nifti converter=pe.Node(interface=dcm2nii.Dcm2nii(),name='CONVERTED') converter.inputs.source_dir=DICOMDIR converter.inputs.gzip_output=bool(1) #--- 7) Set up realigner node to match orientation of MNI 152 realigner=pe.Node(interface=fslu.Reorient2Std(),name='REORIENTED') realigner.inputs.output_type='NIFTI_GZ' workflow.connect(converter,'converted_files',realigner,'in_file') #--- 8) Set up a slice timing node slicetimer=pe.Node(interface=fslp.SliceTimer(),name='SLICETIMED') slicetimer.inputs.interleaved = True workflow.connect(inputnode, 'TR', slicetimer, 'time_repetition') workflow.connect(realigner, 'out_file', slicetimer, 'in_file') #--- 9) Convert to float. img2float = pe.Node(interface=fsl.ImageMaths(out_data_type='float',op_string='',suffix='_dtype'),name='IMG2FLOATED') workflow.connect(slicetimer,'slice_time_corrected_file',img2float,'in_file') #--- 10) Motion correct. motion_correct = pe.Node(interface=fsl.MCFLIRT(save_mats=True,save_plots=True,interpolation='spline'),name='MCORRECTED') workflow.connect(img2float, 'out_file', motion_correct, 'in_file') #--- 11) Despike despiker=pe.Node(interface=afni.Despike(),name='DESPIKED') despiker.inputs.outputtype = 'NIFTI_GZ' workflow.connect(motion_correct,'out_file',despiker,'in_file') #--- 12) Plot motion. plot_motion = pe.Node(interface=fsl.PlotMotionParams(in_source='fsl'),name='MOTIONPLOTTED') plot_motion.iterables = ('plot_type', ['rotations', 'translations']) workflow.connect(motion_correct, 'par_file', plot_motion, 'in_file') #--- 13) Extract extracter=pe.Node(interface=fsl.BET(),name='EXTRACTED') extracter.inputs.frac=float(frac) extracter.inputs.vertical_gradient=float(grad) extracter.inputs.mask=bool(1) extracter.inputs.functional=bool(1) workflow.connect(despiker, 'out_file', extracter, 'in_file') #--- 14) Smooth smoother=pe.MapNode(interface=afni.BlurInMask(),name='SMOOTHED',iterfield=['fwhm']) smoother.inputs.outputtype='NIFTI_GZ' workflow.connect(inputnode, 'fwhm', smoother, 'fwhm') workflow.connect(extracter, 'out_file', smoother, 'in_file') workflow.connect(extracter, 'mask_file', smoother, 'mask') #--- 15) Highpass filter # Filtering node highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),name='HIGHPASSED',iterfield=['in_file']) workflow.connect(inputnode, ('highpass', highpass_operand), highpass, 'op_string') workflow.connect(smoother, 'out_file', highpass, 'in_file') #--- 16) Mean functional volume # Need to add back the mean removed by FSL meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',suffix='_mean'),name='meanfunc',iterfield=['in_file']) workflow.connect(smoother, 'out_file', meanfunc, 'in_file') #--- 17) Add mean back to highpassed data (FINAL OUTPUT) addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),name='PREPROCESSED',iterfield=['in_file','operand_file']) workflow.connect(highpass, 'out_file', addmean, 'in_file') workflow.connect(meanfunc, 'out_file', addmean, 'operand_file') outputnode = pe.Node(interface=util.IdentityInterface(fields=['highpassed_files']),name='outputnode') workflow.connect(addmean, 'out_file', outputnode, 'highpassed_files') # Utility function for plotting extraction def bplot(in_file,in_file2,in_file3): from nilearn import image from nilearn import plotting import matplotlib niftifiledim=len(image.load_img(in_file).shape) firstim=image.index_img(in_file, 0) firstim2=image.index_img(in_file2, 0) display=plotting.plot_anat(firstim2) display.add_contours(firstim,filled=True, alpha=0.5,levels=[0.2], colors='b') display.add_edges(in_file3) matplotlib.pyplot.show() return niftifiledim #--- 18) Show extraction showextract= pe.Node(Function(input_names=['in_file','in_file2','in_file3'],output_names=['niftifiledim'],function=bplot),name='SHOWEXTRACT') workflow.connect(despiker,'out_file', showextract,'in_file2') workflow.connect(extracter,'out_file', showextract,'in_file') workflow.connect(extracter,'mask_file', showextract,'in_file3') # Utility function for plotting extraction def splot(in_file): from nilearn import image from nilearn import plotting import matplotlib niftifiledim=len(image.load_img(in_file).shape) firstim=image.index_img(in_file, 0) display=plotting.plot_anat(firstim,display_mode='z',cut_coords=10) matplotlib.pyplot.show() return niftifiledim #--- 19) Show smoothing showsmooth= pe.MapNode(Function(input_names=['in_file'],output_names=['niftifiledim'],function=splot),iterfield=['in_file'],name='SHOWSMOOTH') workflow.connect(smoother,'out_file', showsmooth,'in_file') #--- 20) Mean functional volume (for plotting stats) meanfunc2 = pe.Node(interface=fsl.ImageMaths(op_string='-Tmean',suffix='_mean'),name='MEANFUNCTIONAL') workflow.connect(extracter, 'out_file', meanfunc2, 'in_file') workflow.connect(meanfunc2, 'out_file', outputnode, 'mean_functional_volume') #--- 21) Plot workflow workflow.base_dir = DICOMDIR workflow.write_graph(graph2use='exec') #--- 22) Plot workflow result=workflow.run() #--- 23) Show plots #--- 24) Return to initial working directory print ("Workflow completed. Returning to intital directory\n") os.chdir(INITDIR)
def hmc_afni(name='fMRI_HMC_afni', st_correct=False, despike=False, deoblique=False, start_idx=None, stop_idx=None): """ A :abbr:`HMC (head motion correction)` workflow for functional scans .. workflow:: from mriqc.workflows.functional import hmc_afni wf = hmc_afni() """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_fd']), name='outputnode') if (start_idx is not None) or (stop_idx is not None): drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'), name='drop_trs') workflow.connect([ (inputnode, drop_trs, [('in_file', 'in_file_a'), ('start_idx', 'start_idx'), ('stop_idx', 'stop_idx')]), ]) else: drop_trs = pe.Node(niu.IdentityInterface(fields=['out_file']), name='drop_trs') workflow.connect([ (inputnode, drop_trs, [('in_file', 'out_file')]), ]) get_mean_RPI = pe.Node(afni.TStat(options='-mean', outputtype='NIFTI_GZ'), name='get_mean_RPI') # calculate hmc parameters hmc = pe.Node(afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'), name='motion_correct') get_mean_motion = get_mean_RPI.clone('get_mean_motion') hmc_A = hmc.clone('motion_correct_A') hmc_A.inputs.md1d_file = 'max_displacement.1D' # Compute the frame-wise displacement fdnode = pe.Node(nac.FramewiseDisplacement(normalize=False, parameter_source="AFNI"), name='ComputeFD') workflow.connect([ (inputnode, fdnode, [('fd_radius', 'radius')]), (get_mean_RPI, hmc, [('out_file', 'basefile')]), (hmc, get_mean_motion, [('out_file', 'in_file')]), (get_mean_motion, hmc_A, [('out_file', 'basefile')]), (hmc_A, outputnode, [('out_file', 'out_file')]), (hmc_A, fdnode, [('oned_file', 'in_file')]), (fdnode, outputnode, [('out_file', 'out_fd')]), ]) # Slice timing correction, despiking, and deoblique st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts') deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique') despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike') if st_correct and despike and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), (deoblique_node, hmc_A, [('out_file', 'in_file')]), ]) elif st_correct and despike: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, despike_node, [('out_file', 'in_file')]), (despike_node, get_mean_RPI, [('out_file', 'in_file')]), (despike_node, hmc, [('out_file', 'in_file')]), (despike_node, hmc_A, [('out_file', 'in_file')]), ]) elif st_correct and deoblique: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), (deoblique_node, hmc_A, [('out_file', 'in_file')]), ]) elif st_correct: workflow.connect([ (drop_trs, st_corr, [('out_file', 'in_file')]), (st_corr, get_mean_RPI, [('out_file', 'in_file')]), (st_corr, hmc, [('out_file', 'in_file')]), (st_corr, hmc_A, [('out_file', 'in_file')]), ]) elif despike and deoblique: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), (deoblique_node, hmc_A, [('out_file', 'in_file')]), ]) elif despike: workflow.connect([ (drop_trs, despike_node, [('out_file', 'in_file')]), (despike_node, get_mean_RPI, [('out_file', 'in_file')]), (despike_node, hmc, [('out_file', 'in_file')]), (despike_node, hmc_A, [('out_file', 'in_file')]), ]) elif deoblique: workflow.connect([ (drop_trs, deoblique_node, [('out_file', 'in_file')]), (deoblique_node, get_mean_RPI, [('out_file', 'in_file')]), (deoblique_node, hmc, [('out_file', 'in_file')]), (deoblique_node, hmc_A, [('out_file', 'in_file')]), ]) else: workflow.connect([ (drop_trs, get_mean_RPI, [('out_file', 'in_file')]), (drop_trs, hmc, [('out_file', 'in_file')]), (drop_trs, hmc_A, [('out_file', 'in_file')]), ]) return workflow
def FuncProc_despike_afni(stdrefvol="mid", SinkTag="func_preproc", wf_name="func_preproc_dspk_afni", fwhm=0, carpet_plot=""): """ Performs processing of functional (resting-state) images: Images should be already reoriented, e.g. with fsl fslreorient2std (see scripts/ex_pipeline.py) Workflow inputs: :param func: The functional image file. :param SinkDir: where to write important ouputs :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found. Workflow outputs: :param :return: anatproc_workflow Tamas Spisak [email protected] 2018 """ SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag) if not os.path.exists(SinkDir): os.makedirs(SinkDir) wf_mc = nipype.Workflow(wf_name) # Basic interface class generates identity mappings inputspec = pe.Node( utility.IdentityInterface(fields=['func', 'cc_noise_roi']), name='inputspec') # build the actual pipeline #myonevol = onevol.onevol_workflow(SinkDir=SinkDir) mybet = bet.bet_workflow(SinkTag="func_preproc", fmri=True, wf_name="brain_extraction_func") mymc = mc.mc_workflow_fsl(reference_vol=stdrefvol) if carpet_plot: # create "atlas" add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'), iterfield=['in_file', 'in_file2'], name="addimgs") wf_mc.connect(inputspec, 'cc_noise_roi', add_masks, 'in_file') wf_mc.connect(mybet, 'outputspec.brain_mask', add_masks, 'in_file2') fmri_qc_mc = qc.fMRI2QC(carpet_plot, tag="mc", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc, 'inputspec.atlas') wf_mc.connect(mymc, 'outputspec.FD_file', fmri_qc_mc, 'inputspec.confounds') wf_mc.connect(mymc, 'outputspec.func_out_file', fmri_qc_mc, 'inputspec.func') mydespike = pe.MapNode( afni.Despike( outputtype="NIFTI_GZ"), # I do it after motion correction... iterfield=['in_file'], name="DeSpike") if carpet_plot: fmri_qc_mc_dspk = qc.fMRI2QC(carpet_plot, tag="mc_dspk", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc_dspk, 'inputspec.atlas') wf_mc.connect(mymc, 'outputspec.FD_file', fmri_qc_mc_dspk, 'inputspec.confounds') wf_mc.connect(mydespike, 'out_file', fmri_qc_mc_dspk, 'inputspec.func') mycmpcor = cmpcor.compcor_workflow() # to WM+CSF signal myconc = conc.concat_workflow(numconcat=2) mynuisscor = nuisscorr.nuissremov_workflow( ) # regress out 5 compcor variables and the Friston24 if carpet_plot: fmri_qc_mc_dspk_nuis = qc.fMRI2QC(carpet_plot, tag="mc_dspk_nuis", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc_dspk_nuis, 'inputspec.atlas') wf_mc.connect(mymc, 'outputspec.FD_file', fmri_qc_mc_dspk_nuis, 'inputspec.confounds') wf_mc.connect(mynuisscor, 'outputspec.out_file', fmri_qc_mc_dspk_nuis, 'inputspec.func') # optional smoother: if fwhm > 0: smoother = pe.MapNode(interface=Smooth(fwhm=fwhm), iterfield=['in_file'], name="smoother") if carpet_plot: fmri_qc_mc_dspk_smooth_nuis_bpf = qc.fMRI2QC( carpet_plot, tag="mc_dspk_nuis_smooth", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc_dspk_smooth_nuis_bpf, 'inputspec.atlas') wf_mc.connect(mymc, 'outputspec.FD_file', fmri_qc_mc_dspk_smooth_nuis_bpf, 'inputspec.confounds') wf_mc.connect(smoother, 'smoothed_file', fmri_qc_mc_dspk_smooth_nuis_bpf, 'inputspec.func') #mymedangcor = medangcor.mac_workflow() #skip it this time mytmpfilt = tmpfilt.tmpfilt_workflow( highpass_Hz=0.008, lowpass_Hz=0.08) #will be done by the masker? if carpet_plot: fmri_qc_mc_dspk_nuis_bpf = qc.fMRI2QC(carpet_plot, tag="mc_dspk_nuis_bpf", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc_dspk_nuis_bpf, 'inputspec.atlas') wf_mc.connect(mymc, 'outputspec.FD_file', fmri_qc_mc_dspk_nuis_bpf, 'inputspec.confounds') wf_mc.connect(mytmpfilt, 'outputspec.func_tmplfilt', fmri_qc_mc_dspk_nuis_bpf, 'inputspec.func') myscrub = scrub.datacens_workflow_threshold(ex_before=0, ex_after=0) # "liberal scrubbing" since despiking was already performed if carpet_plot: fmri_qc_mc_dspk_nuis_bpf_scrub = qc.fMRI2QC( carpet_plot, tag="mc_dspk_nuis_bpf_scrub", indiv_atlas=True) wf_mc.connect(add_masks, 'out_file', fmri_qc_mc_dspk_nuis_bpf_scrub, 'inputspec.atlas') wf_mc.connect(myscrub, 'outputspec.FD_scrubbed', fmri_qc_mc_dspk_nuis_bpf_scrub, 'inputspec.confounds') wf_mc.connect(myscrub, 'outputspec.scrubbed_image', fmri_qc_mc_dspk_nuis_bpf_scrub, 'inputspec.func') # Basic interface class generates identity mappings outputspec = pe.Node( utility.IdentityInterface(fields=[ 'func_preprocessed', 'func_preprocessed_scrubbed', # non-image data 'FD' ]), name='outputspec') wf_mc.connect([ (inputspec, mybet, [('func', 'inputspec.in_file')]), (mybet, mymc, [('outputspec.brain', 'inputspec.func')]), (mymc, mydespike, [('outputspec.func_out_file', 'in_file')]), (mydespike, mycmpcor, [('out_file', 'inputspec.func_aligned')]), (inputspec, mycmpcor, [('cc_noise_roi', 'inputspec.mask_file')]), (mycmpcor, myconc, [('outputspec.components_file', 'inputspec.par1')]), (mymc, myconc, [('outputspec.first24_file', 'inputspec.par2')]), (myconc, mynuisscor, [('outputspec.concat_file', 'inputspec.design_file')]), (mydespike, mynuisscor, [('out_file', 'inputspec.in_file')]) ]) if fwhm > 0: wf_mc.connect([ (mynuisscor, smoother, [('outputspec.out_file', 'in_file')]), (smoother, mytmpfilt, [('smoothed_file', 'inputspec.func')]), (mytmpfilt, myscrub, [('outputspec.func_tmplfilt', 'inputspec.func')]), (mymc, myscrub, [('outputspec.FD_file', 'inputspec.FD')]), (mytmpfilt, outputspec, [('outputspec.func_tmplfilt', 'func_preprocessed')]) ]) else: wf_mc.connect([(mynuisscor, mytmpfilt, [('outputspec.out_file', 'inputspec.func')]), (mytmpfilt, myscrub, [('outputspec.func_tmplfilt', 'inputspec.func')]), (mymc, myscrub, [('outputspec.FD_file', 'inputspec.FD') ]), (mytmpfilt, outputspec, [('outputspec.func_tmplfilt', 'func_preprocessed')])]) wf_mc.connect([ # non-image data: (mymc, outputspec, [('outputspec.FD_file', 'FD')]), (myscrub, outputspec, [('outputspec.scrubbed_image', 'func_preprocessed_scrubbed')]), ]) return wf_mc
epi_s2_stack = Node(dcmstack.DcmStack(), name='epi_s2_stack') epi_s2_stack.inputs.embed_meta = True epi_s2_stack.inputs.out_format = 'epi2' epi_s2_stack.inputs.out_ext = '.nii' st_corr = Node(spm.SliceTiming(), name='slicetiming_correction') realign = Node(spm.Realign(), name='realign') realign.inputs.register_to_mean = True tsnr = MapNode(confounds.TSNR(), iterfield='in_file', name='tsnr') tsnr.inputs.mean_file = 'mean.nii' tsnr.inputs.stddev_file = 'stddev.nii' tsnr.inputs.tsnr_file = 'tsnr.nii' despike = MapNode(afni.Despike(), iterfield='in_file', name='despike') despike.inputs.outputtype = 'NIFTI' seg = Node(spm.Segment(), name='seg') seg.inputs.csf_output_type = [False, False, True] #Output native CSF seg seg.inputs.gm_output_type = [False, False, True] #Output native gm seg seg.inputs.wm_output_type = [False, False, True] #Output native wm seg coreg2epi = MapNode(spm.Coregister(), iterfield='target', name='coreg2epi') #Warps to MNI space using a 3mm template image antsnorm = MapNode(ants.Registration(), iterfield='moving_image', name='antsnorm') antsnorm.inputs.collapse_output_transforms = True antsnorm.inputs.initial_moving_transform_com = True
import sys from nipype.interfaces import afni from nipype.interfaces.fsl.maths import BinaryMaths ### despikes fMRI timeseries for subjects with > 1mm motion peak # inputname = 'sub-05_ses-03_task-future_bold_space-MNI152NLin2009cAsym_preproc.nii.gz' inputname = sys.argv[1] outputname = inputname[:-7] + '_despiked.nii.gz' # afni 3dDespike despike = afni.Despike() despike.inputs.in_file = inputname despike.inputs.out_file = outputname despike.inputs.args = '-cut 1.0 4.0' print(despike.cmdline) despike.run() # subtract despiked image from the original to check the diff subtract = BinaryMaths() subtract.inputs.in_file = inputname subtract.inputs.operand_file = outputname subtract.inputs.operation = 'sub' subtract.inputs.out_file = outputname[:-7] + '_diff.nii.gz' print(subtract.cmdline) subtract.run()