def init_func_preproc_wf(bold_file): """ This workflow controls the functional preprocessing stages of *fMRIPrep*. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.tests import mock_config from fmriprep import config from fmriprep.workflows.bold.base import init_func_preproc_wf with mock_config(): bold_file = config.execution.bids_dir / 'sub-01' / 'func' \ / 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' wf = init_func_preproc_wf(str(bold_file)) Parameters ---------- bold_file BOLD series NIfTI file Inputs ------ bold_file BOLD series NIfTI file t1w_preproc Bias-corrected structural template image t1w_mask Mask of the skull-stripped template image t1w_dseg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1w_asec Segmentation of structural image, done with FreeSurfer. t1w_aparc Parcellation of structural image, done with FreeSurfer. t1w_tpms List of tissue probability maps in T1w space template List of templates to target anat2std_xfm List of transform files, collated with templates std2anat_xfm List of inverse transform files, collated with templates subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2t1w_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w Outputs ------- bold_t1 BOLD series, resampled to T1w space bold_mask_t1 BOLD series mask in T1w space bold_std BOLD series, resampled to template space bold_mask_std BOLD series mask in template space confounds TSV of confounds surfaces BOLD series, resampled to FreeSurfer surfaces aroma_noise_ics Noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix bold_cifti BOLD CIFTI image cifti_variant combination of target spaces for `bold_cifti` See Also -------- * :py:func:`~niworkflows.func.util.init_bold_reference_wf` * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf` * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf` * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf` * :py:func:`~sdcflows.workflows.fmap.init_fmap_wf` * :py:func:`~sdcflows.workflows.pepolar.init_pepolar_unwarp_wf` * :py:func:`~sdcflows.workflows.phdiff.init_phdiff_wf` * :py:func:`~sdcflows.workflows.syn.init_syn_sdc_wf` * :py:func:`~sdcflows.workflows.unwarp.init_sdc_unwarp_wf` """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.nibabel import ApplyMask from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.utils import DictMerge from sdcflows.workflows.base import init_sdc_estimate_wf, fieldmap_wrangler ref_file = bold_file mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1} bold_tlen = 10 multiecho = isinstance(bold_file, list) # Have some options handy layout = config.execution.layout omp_nthreads = config.nipype.omp_nthreads freesurfer = config.workflow.run_reconall spaces = config.workflow.spaces if multiecho: tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file] ref_file = dict(zip(tes, bold_file))[min(tes)] if os.path.isfile(ref_file): bold_tlen, mem_gb = _create_mem_gb(ref_file) wf_name = _get_wf_name(ref_file) config.loggers.workflow.debug( 'Creating bold processing workflow for "%s" (%.2f GB / %d TRs). ' 'Memory resampled/largemem=%.2f/%.2f GB.', ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem']) sbref_file = None # Find associated sbref, if possible entities = layout.parse_file_entities(ref_file) entities['suffix'] = 'sbref' entities['extension'] = ['nii', 'nii.gz'] # Overwrite extensions files = layout.get(return_type='file', **entities) refbase = os.path.basename(ref_file) if 'sbref' in config.workflow.ignore: config.loggers.workflow.info("Single-band reference files ignored.") elif files and multiecho: config.loggers.workflow.warning( "Single-band reference found, but not supported in " "multi-echo workflows at this time. Ignoring.") elif files: sbref_file = files[0] sbbase = os.path.basename(sbref_file) if len(files) > 1: config.loggers.workflow.warning( "Multiple single-band reference files found for {}; using " "{}".format(refbase, sbbase)) else: config.loggers.workflow.info( "Using single-band reference file %s.", sbbase) else: config.loggers.workflow.info("No single-band-reference found for %s.", refbase) metadata = layout.get_metadata(ref_file) # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn) fmaps = None if 'fieldmaps' not in config.workflow.ignore: fmaps = fieldmap_wrangler(layout, ref_file, use_syn=config.workflow.use_syn, force_syn=config.workflow.force_syn) elif config.workflow.use_syn or config.workflow.force_syn: # If fieldmaps are not enabled, activate SyN-SDC in unforced (False) mode fmaps = {'syn': False} # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort' run_stc = (bool(metadata.get("SliceTiming")) and 'slicetiming' not in config.workflow.ignore and (_get_series_len(ref_file) > 4 or "TooShort")) # Check if MEEPI for T2* coregistration target if config.workflow.t2s_coreg and not multiecho: config.loggers.workflow.warning( "No multiecho BOLD images found for T2* coregistration. " "Using standard EPI-T1 coregistration.") config.workflow.t2s_coreg = False # By default, force-bbr for t2s_coreg unless user specifies otherwise if config.workflow.t2s_coreg and config.workflow.use_bbr is None: config.workflow.use_bbr = True # Build workflow workflow = Workflow(name=wf_name) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion transform matrices, susceptibility distortion correction when available, and co-registrations to anatomical and output spaces). Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs), configured with Lanczos interpolation to minimize the smoothing effects of other kernels [@lanczos]. Non-gridded (surface) resamplings were performed using `mri_vol2surf` (FreeSurfer). """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_file', 'subjects_dir', 'subject_id', 't1w_preproc', 't1w_mask', 't1w_dseg', 't1w_tpms', 't1w_aseg', 't1w_aparc', 'anat2std_xfm', 'std2anat_xfm', 'template', 't1w2fsnative_xfm', 'fsnative2t1w_xfm' ]), name='inputnode') inputnode.inputs.bold_file = bold_file if sbref_file is not None: from niworkflows.interfaces.images import ValidateImage val_sbref = pe.Node(ValidateImage(in_file=sbref_file), name='val_sbref') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'bold_std', 'bold_std_ref', 'bold_mask_std', 'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti', 'cifti_variant', 'cifti_metadata', 'cifti_density', 'surfaces', 'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file', 'confounds_metadata' ]), name='outputnode') # Generate a brain-masked conversion of the t1w t1w_brain = pe.Node(ApplyMask(), name='t1w_brain') # BOLD buffer: an identity used as a pointer to either the original BOLD # or the STC'ed one for further use. boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer') summary = pe.Node(FunctionalSummary( slice_timing=run_stc, registration=('FSL', 'FreeSurfer')[freesurfer], registration_dof=config.workflow.bold2t1w_dof, registration_init=config.workflow.bold2t1w_init, pe_direction=metadata.get("PhaseEncodingDirection"), tr=metadata.get("RepetitionTime")), name='summary', mem_gb=config.DEFAULT_MEMORY_MIN_GB, run_without_submitting=True) summary.inputs.dummy_scans = config.workflow.dummy_scans func_derivatives_wf = init_func_derivatives_wf( bids_root=layout.root, cifti_output=config.workflow.cifti_output, freesurfer=freesurfer, metadata=metadata, output_dir=str(config.execution.output_dir), spaces=spaces, use_aroma=config.workflow.use_aroma, ) workflow.connect([ (outputnode, func_derivatives_wf, [ ('bold_t1', 'inputnode.bold_t1'), ('bold_t1_ref', 'inputnode.bold_t1_ref'), ('bold_aseg_t1', 'inputnode.bold_aseg_t1'), ('bold_aparc_t1', 'inputnode.bold_aparc_t1'), ('bold_mask_t1', 'inputnode.bold_mask_t1'), ('bold_native', 'inputnode.bold_native'), ('confounds', 'inputnode.confounds'), ('surfaces', 'inputnode.surf_files'), ('aroma_noise_ics', 'inputnode.aroma_noise_ics'), ('melodic_mix', 'inputnode.melodic_mix'), ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'), ('bold_cifti', 'inputnode.bold_cifti'), ('cifti_variant', 'inputnode.cifti_variant'), ('cifti_metadata', 'inputnode.cifti_metadata'), ('cifti_density', 'inputnode.cifti_density'), ('confounds_metadata', 'inputnode.confounds_metadata'), ]), ]) # Generate a tentative boldref bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads) bold_reference_wf.inputs.inputnode.dummy_scans = config.workflow.dummy_scans if sbref_file is not None: workflow.connect([ (val_sbref, bold_reference_wf, [('out_file', 'inputnode.sbref_file')]), ]) # Top-level BOLD splitter bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split', mem_gb=mem_gb['filesize'] * 3) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf', mem_gb=mem_gb['filesize'], omp_nthreads=omp_nthreads) # calculate BOLD registration to T1w bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf', freesurfer=freesurfer, use_bbr=config.workflow.use_bbr, bold2t1w_dof=config.workflow.bold2t1w_dof, bold2t1w_init=config.workflow.bold2t1w_init, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # apply BOLD registration to T1w bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf', freesurfer=freesurfer, use_fieldwarp=bool(fmaps), multiecho=multiecho, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # get confounds bold_confounds_wf = init_bold_confs_wf( mem_gb=mem_gb['largemem'], metadata=metadata, regressors_all_comps=config.workflow.regressors_all_comps, regressors_fd_th=config.workflow.regressors_fd_th, regressors_dvars_th=config.workflow.regressors_dvars_th, name='bold_confounds_wf') bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False] # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_bold_trans_wf = init_bold_preproc_trans_wf( mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=not config.execution.low_mem, use_fieldwarp=bool(fmaps), name='bold_bold_trans_wf') bold_bold_trans_wf.inputs.inputnode.name_source = ref_file # SLICE-TIME CORRECTION (or bypass) ############################################# if run_stc is True: # bool('TooShort') == True, so check True explicitly bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata) workflow.connect([ (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]), ]) if not multiecho: workflow.connect([(bold_reference_wf, bold_stc_wf, [ ('outputnode.bold_file', 'inputnode.bold_file') ])]) else: # for meepi, iterate through stc_wf for all workflows meepi_echos = boldbuffer.clone(name='meepi_echos') meepi_echos.iterables = ('bold_file', bold_file) workflow.connect([(meepi_echos, bold_stc_wf, [('bold_file', 'inputnode.bold_file')])]) elif not multiecho: # STC is too short or False # bypass STC from original BOLD to the splitter through boldbuffer workflow.connect([(bold_reference_wf, boldbuffer, [('outputnode.bold_file', 'bold_file')])]) else: # for meepi, iterate over all meepi echos to boldbuffer boldbuffer.iterables = ('bold_file', bold_file) # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ########################## bold_sdc_wf = init_sdc_estimate_wf(fmaps, metadata, omp_nthreads=omp_nthreads, debug=config.execution.debug) # MULTI-ECHO EPI DATA ############################################# if multiecho: from niworkflows.func.util import init_skullstrip_bold_wf skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf') inputnode.inputs.bold_file = ref_file # Replace reference w first echo join_echos = pe.JoinNode( niu.IdentityInterface(fields=['bold_files']), joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'), joinfield=['bold_files'], name='join_echos') # create optimal combination, adaptive T2* map bold_t2s_wf = init_bold_t2s_wf(echo_times=tes, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, t2s_coreg=config.workflow.t2s_coreg, name='bold_t2smap_wf') workflow.connect([ (skullstrip_bold_wf, join_echos, [('outputnode.skull_stripped_file', 'bold_files')]), (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]), ]) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ (inputnode, t1w_brain, [('t1w_preproc', 'in_file'), ('t1w_mask', 'in_mask')]), # Generate early reference (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]), # BOLD buffer has slice-time corrected if it was run, original otherwise (boldbuffer, bold_split, [('bold_file', 'in_file')]), # HMC (bold_reference_wf, bold_hmc_wf, [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'), ('outputnode.bold_file', 'inputnode.bold_file')]), (bold_reference_wf, summary, [('outputnode.algo_dummy_scans', 'algo_dummy_scans')]), # EPI-T1 registration workflow ( inputnode, bold_reg_wf, [ ('t1w_dseg', 'inputnode.t1w_dseg'), # Undefined if --fs-no-reconall, but this is safe ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm') ]), (t1w_brain, bold_reg_wf, [('out_file', 'inputnode.t1w_brain')]), (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'), ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_aseg', 'inputnode.t1w_aseg'), ('t1w_aparc', 'inputnode.t1w_aparc')]), (t1w_brain, bold_t1_trans_wf, [('out_file', 'inputnode.t1w_brain')]), # unused if multiecho, but this is safe (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_t1_trans_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'), ('outputnode.bold_t1_ref', 'bold_t1_ref'), ('outputnode.bold_aseg_t1', 'bold_aseg_t1'), ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]), (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]), # SDC (or pass-through workflow) (t1w_brain, bold_sdc_wf, [('out_file', 'inputnode.t1w_brain')]), (bold_reference_wf, bold_sdc_wf, [('outputnode.ref_image', 'inputnode.epi_file'), ('outputnode.ref_image_brain', 'inputnode.epi_brain'), ('outputnode.bold_mask', 'inputnode.epi_mask')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_sdc_wf, bold_bold_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp'), ('outputnode.epi_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction') ]), # Connect bold_confounds_wf (inputnode, bold_confounds_wf, [('t1w_tpms', 'inputnode.t1w_tpms'), ('t1w_mask', 'inputnode.t1w_mask')]), (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), # Connect bold_bold_trans_wf (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')] ), (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), # Summary (outputnode, summary, [('confounds', 'confounds_file')]), ]) if not config.workflow.t2s_coreg: workflow.connect([ (bold_sdc_wf, bold_reg_wf, [('outputnode.epi_brain', 'inputnode.ref_bold_brain')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.epi_brain', 'inputnode.ref_bold_brain'), ('outputnode.epi_mask', 'inputnode.ref_bold_mask')]), ]) else: workflow.connect([ # For t2s_coreg, replace EPI-to-T1w registration inputs (bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'), ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]), ]) # for standard EPI data, pass along correct file if not multiecho: workflow.connect([ (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]), (bold_bold_trans_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_split, bold_t1_trans_wf, [('out_files', 'inputnode.bold_split')]), ]) else: # for meepi, create and use optimal combination workflow.connect([ # update name source for optimal combination (inputnode, func_derivatives_wf, [(('bold_file', combine_meepi_source), 'inputnode.source_file')]), (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold', 'inputnode.in_file')]), (bold_t2s_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold', 'inputnode.bold_split')]), ]) if fmaps: from sdcflows.workflows.outputs import init_sdc_unwarp_report_wf # Report on BOLD correction fmap_unwarp_report_wf = init_sdc_unwarp_report_wf() workflow.connect([ (inputnode, fmap_unwarp_report_wf, [('t1w_dseg', 'inputnode.in_seg')]), (bold_reference_wf, fmap_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.epi_corrected', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in fmap_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): fmap_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' for node in bold_sdc_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep' if 'syn' in fmaps: sdc_select_std = pe.Node(KeySelect(fields=['std2anat_xfm']), name='sdc_select_std', run_without_submitting=True) sdc_select_std.inputs.key = 'MNI152NLin2009cAsym' workflow.connect([ (inputnode, sdc_select_std, [('std2anat_xfm', 'std2anat_xfm'), ('template', 'keys')]), (sdc_select_std, bold_sdc_wf, [('std2anat_xfm', 'inputnode.std2anat_xfm')]), ]) if fmaps.get('syn') is True: # SyN forced syn_unwarp_report_wf = init_sdc_unwarp_report_wf( name='syn_unwarp_report_wf', forcedsyn=True) workflow.connect([ (inputnode, syn_unwarp_report_wf, [('t1w_dseg', 'inputnode.in_seg')]), (bold_reference_wf, syn_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, syn_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in syn_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): syn_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' # Map final BOLD mask into T1w space (if required) nonstd_spaces = set(spaces.get_nonstandard()) if nonstd_spaces.intersection(('T1w', 'anat')): from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as ApplyTransforms) boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True), name='boldmask_to_t1w', mem_gb=0.1) workflow.connect([ (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1', 'transforms')]), (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1', 'reference_image')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]), (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]), ]) if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')): workflow.connect([ (bold_bold_trans_wf, outputnode, [('outputnode.bold', 'bold_native')]), (bold_bold_trans_wf, func_derivatives_wf, [('outputnode.bold_ref', 'inputnode.bold_native_ref'), ('outputnode.bold_mask', 'inputnode.bold_mask_native')]), ]) if spaces.get_spaces(nonstandard=False, dim=(3, )): # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_std_trans_wf = init_bold_std_trans_wf( freesurfer=freesurfer, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, spaces=spaces, name='bold_std_trans_wf', use_compression=not config.execution.low_mem, use_fieldwarp=bool(fmaps), ) workflow.connect([ (inputnode, bold_std_trans_wf, [('template', 'inputnode.templates'), ('anat2std_xfm', 'inputnode.anat2std_xfm'), ('bold_file', 'inputnode.name_source'), ('t1w_aseg', 'inputnode.bold_aseg'), ('t1w_aparc', 'inputnode.bold_aparc')]), (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, bold_std_trans_wf, [('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_std_trans_wf, outputnode, [('outputnode.bold_std', 'bold_std'), ('outputnode.bold_std_ref', 'bold_std_ref'), ('outputnode.bold_mask_std', 'bold_mask_std')]), ]) if freesurfer: workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('outputnode.bold_aseg_std', 'inputnode.bold_aseg_std'), ('outputnode.bold_aparc_std', 'inputnode.bold_aparc_std'), ]), (bold_std_trans_wf, outputnode, [('outputnode.bold_aseg_std', 'bold_aseg_std'), ('outputnode.bold_aparc_std', 'bold_aparc_std')]), ]) if not multiecho: workflow.connect([(bold_split, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) else: split_opt_comb = bold_split.clone(name='split_opt_comb') workflow.connect([(bold_t2s_wf, split_opt_comb, [('outputnode.bold', 'in_file')]), (split_opt_comb, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) # func_derivatives_wf internally parametrizes over snapshotted spaces. workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('outputnode.template', 'inputnode.template'), ('outputnode.spatial_reference', 'inputnode.spatial_reference'), ('outputnode.bold_std_ref', 'inputnode.bold_std_ref'), ('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ]), ]) if config.workflow.use_aroma: # ICA-AROMA workflow from .confounds import init_ica_aroma_wf ica_aroma_wf = init_ica_aroma_wf( mem_gb=mem_gb['resampled'], metadata=metadata, omp_nthreads=omp_nthreads, use_fieldwarp=bool(fmaps), err_on_aroma_warn=config.workflow.aroma_err_on_warn, aroma_melodic_dim=config.workflow.aroma_melodic_dim, name='ica_aroma_wf') join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join), name='aroma_confounds') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) workflow.disconnect([ (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), ]) workflow.connect([ (inputnode, ica_aroma_wf, [('bold_file', 'inputnode.name_source')]), (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, join, [('outputnode.confounds_file', 'in_file')]), (bold_confounds_wf, mrg_conf_metadata, [('outputnode.confounds_metadata', 'in1')]), (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]), (ica_aroma_wf, mrg_conf_metadata, [('outputnode.aroma_metadata', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), (ica_aroma_wf, outputnode, [('outputnode.aroma_noise_ics', 'aroma_noise_ics'), ('outputnode.melodic_mix', 'melodic_mix'), ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file') ]), (join, outputnode, [('out_file', 'confounds')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (bold_std_trans_wf, ica_aroma_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ('outputnode.spatial_reference', 'inputnode.spatial_reference')]), ]) # SURFACES ################################################################################## # Freesurfer freesurfer_spaces = spaces.get_fs_spaces() if freesurfer and freesurfer_spaces: config.loggers.workflow.debug( 'Creating BOLD surface-sampling workflow.') bold_surf_wf = init_bold_surf_wf( mem_gb=mem_gb['resampled'], surface_spaces=freesurfer_spaces, medial_surface_nan=config.workflow.medial_surface_nan, name='bold_surf_wf') workflow.connect([ (inputnode, bold_surf_wf, [('t1w_preproc', 'inputnode.t1w_preproc'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm')]), (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]), (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), (bold_surf_wf, func_derivatives_wf, [('outputnode.target', 'inputnode.surf_refs')]), ]) # CIFTI output if config.workflow.cifti_output: from .resampling import init_bold_grayords_wf bold_grayords_wf = init_bold_grayords_wf( grayord_density=config.workflow.cifti_output, mem_gb=mem_gb['resampled'], repetition_time=metadata['RepetitionTime']) workflow.connect([ (inputnode, bold_grayords_wf, [('subjects_dir', 'inputnode.subjects_dir')]), (bold_std_trans_wf, bold_grayords_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.spatial_reference', 'inputnode.spatial_reference')]), (bold_surf_wf, bold_grayords_wf, [ ('outputnode.surfaces', 'inputnode.surf_files'), ('outputnode.target', 'inputnode.surf_refs'), ]), (bold_grayords_wf, outputnode, [('outputnode.cifti_bold', 'bold_cifti'), ('outputnode.cifti_variant', 'cifti_variant'), ('outputnode.cifti_metadata', 'cifti_metadata'), ('outputnode.cifti_density', 'cifti_density')]), ]) if spaces.get_spaces(nonstandard=False, dim=(3, )): carpetplot_wf = init_carpetplot_wf( mem_gb=mem_gb['resampled'], metadata=metadata, cifti_output=config.workflow.cifti_output, name='carpetplot_wf') if config.workflow.cifti_output: workflow.connect(bold_grayords_wf, 'outputnode.cifti_bold', carpetplot_wf, 'inputnode.cifti_bold') else: # Xform to 'MNI152NLin2009cAsym' is always computed. carpetplot_select_std = pe.Node(KeySelect( fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), name='carpetplot_select_std', run_without_submitting=True) workflow.connect([ (inputnode, carpetplot_select_std, [('std2anat_xfm', 'std2anat_xfm'), ('template', 'keys')]), (carpetplot_select_std, carpetplot_wf, [('std2anat_xfm', 'inputnode.std2anat_xfm')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, carpetplot_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), ]) workflow.connect([(bold_confounds_wf, carpetplot_wf, [ ('outputnode.confounds_file', 'inputnode.confounds_file') ])]) # REPORTING ############################################################ reportlets_dir = str(config.execution.work_dir / 'reportlets') ds_report_summary = pe.Node(DerivativesDataSink(desc='summary', keep_dtype=True), name='ds_report_summary', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB) ds_report_validation = pe.Node(DerivativesDataSink( base_directory=reportlets_dir, desc='validation', keep_dtype=True), name='ds_report_validation', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB) workflow.connect([ (summary, ds_report_summary, [('out_report', 'in_file')]), (bold_reference_wf, ds_report_validation, [('outputnode.validation_report', 'in_file')]), ]) # Fill-in datasinks of reportlets seen so far for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_report'): workflow.get_node(node).inputs.base_directory = reportlets_dir workflow.get_node(node).inputs.source_file = ref_file return workflow
def init_func_preproc_wf( aroma_melodic_dim, bold2t1w_dof, bold_file, cifti_output, debug, dummy_scans, err_on_aroma_warn, fmap_bspline, fmap_demean, force_syn, freesurfer, ignore, low_mem, medial_surface_nan, omp_nthreads, output_dir, output_spaces, regressors_all_comps, regressors_dvars_th, regressors_fd_th, reportlets_dir, t2s_coreg, use_aroma, use_bbr, use_syn, layout=None, num_bold=1, ): """ This workflow controls the functional preprocessing stages of FMRIPREP. .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold import init_func_preproc_wf from collections import namedtuple, OrderedDict BIDSLayout = namedtuple('BIDSLayout', ['root']) wf = init_func_preproc_wf( aroma_melodic_dim=-200, bold2t1w_dof=9, bold_file='/completely/made/up/path/sub-01_task-nback_bold.nii.gz', cifti_output=False, debug=False, dummy_scans=None, err_on_aroma_warn=False, fmap_bspline=True, fmap_demean=True, force_syn=True, freesurfer=True, ignore=[], low_mem=False, medial_surface_nan=False, omp_nthreads=1, output_dir='.', output_spaces=OrderedDict([ ('MNI152Lin', {}), ('fsaverage', {'density': '10k'}), ('T1w', {}), ('fsnative', {})]), regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, reportlets_dir='.', t2s_coreg=False, use_aroma=False, use_bbr=True, use_syn=True, layout=BIDSLayout('.'), num_bold=1, ) **Parameters** aroma_melodic_dim : int Maximum number of components identified by MELODIC within ICA-AROMA (default is -200, ie. no limitation). bold2t1w_dof : 6, 9 or 12 Degrees-of-freedom for BOLD-T1w registration bold_file : str BOLD series NIfTI file cifti_output : bool Generate bold CIFTI file in output spaces debug : bool Enable debugging outputs dummy_scans : int or None Number of volumes to consider as non steady state err_on_aroma_warn : bool Do not crash on ICA-AROMA errors fmap_bspline : bool **Experimental**: Fit B-Spline field using least-squares fmap_demean : bool Demean voxel-shift map during unwarp force_syn : bool **Temporary**: Always run SyN-based SDC freesurfer : bool Enable FreeSurfer functional registration (bbregister) and resampling BOLD series to FreeSurfer surface meshes. ignore : list Preprocessing steps to skip (may include "slicetiming", "fieldmaps") low_mem : bool Write uncompressed .nii files in some cases to reduce memory usage medial_surface_nan : bool Replace medial wall values with NaNs on functional GIFTI files omp_nthreads : int Maximum number of threads an individual process may use output_dir : str Directory in which to save derivatives output_spaces : OrderedDict Ordered dictionary where keys are TemplateFlow ID strings (e.g. ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNI152NLin2009cAsym``, or ``fsLR``) strings designating nonstandard references (e.g. ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or paths pointing to custom templates organized in a TemplateFlow-like structure. Values of the dictionary aggregate modifiers (e.g. the value for the key ``MNI152Lin`` could be ``{'resolution': 2}`` if one wants the resampling to be done on the 2mm resolution version of the selected template). regressors_all_comps Return all CompCor component time series instead of the top fraction regressors_dvars_th Criterion for flagging DVARS outliers regressors_fd_th Criterion for flagging framewise displacement outliers reportlets_dir : str Absolute path of a directory in which reportlets will be temporarily stored t2s_coreg : bool For multiecho EPI, use the calculated T2*-map for T2*-driven coregistration use_aroma : bool Perform ICA-AROMA on MNI-resampled functional series use_bbr : bool or None Enable/disable boundary-based registration refinement. If ``None``, test BBR result for distortion before accepting. When using ``t2s_coreg``, BBR will be enabled by default unless explicitly specified otherwise. use_syn : bool **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC). If fieldmaps are present and enabled, this is not run, by default. layout : BIDSLayout BIDSLayout structure to enable metadata retrieval num_bold : int Total number of BOLD files that have been set for preprocessing (default is 1) **Inputs** bold_file BOLD series NIfTI file t1_preproc Bias-corrected structural template image t1_brain Skull-stripped ``t1_preproc`` t1_mask Mask of the skull-stripped template image t1_seg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1_tpms List of tissue probability maps in T1w space anat2std_xfm ANTs-compatible affine-and-warp transform file std2anat_xfm ANTs-compatible affine-and-warp transform file (inverse) subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1_2_fsnative_forward_transform LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space t1_2_fsnative_reverse_transform LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w **Outputs** bold_t1 BOLD series, resampled to T1w space bold_mask_t1 BOLD series mask in T1w space bold_std BOLD series, resampled to template space bold_mask_std BOLD series mask in template space confounds TSV of confounds surfaces BOLD series, resampled to FreeSurfer surfaces aroma_noise_ics Noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix bold_cifti BOLD CIFTI image cifti_variant combination of target spaces for `bold_cifti` **Subworkflows** * :py:func:`~fmriprep.workflows.bold.util.init_bold_reference_wf` * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf` * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf` * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf` * :py:func:`~fmriprep.workflows.fieldmap.pepolar.init_pepolar_unwarp_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_fmap_estimator_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_sdc_unwarp_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_nonlinear_sdc_wf` """ from .resampling import NONSTANDARD_REFERENCES from ..fieldmap.base import init_sdc_wf # Avoid circular dependency (#1066) # Filter out standard spaces to a separate dict std_spaces = OrderedDict([(key, modifiers) for key, modifiers in output_spaces.items() if key not in NONSTANDARD_REFERENCES]) volume_std_spaces = OrderedDict([(key, modifiers) for key, modifiers in std_spaces.items() if not key.startswith('fs')]) ref_file = bold_file mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1} bold_tlen = 10 multiecho = isinstance(bold_file, list) if multiecho: tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file] ref_file = dict(zip(tes, bold_file))[min(tes)] if os.path.isfile(ref_file): bold_tlen, mem_gb = _create_mem_gb(ref_file) wf_name = _get_wf_name(ref_file) LOGGER.log( 25, ('Creating bold processing workflow for "%s" (%.2f GB / %d TRs). ' 'Memory resampled/largemem=%.2f/%.2f GB.'), ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem']) sbref_file = None # For doc building purposes if not hasattr(layout, 'parse_file_entities'): LOGGER.log(25, 'No valid layout: building empty workflow.') metadata = { 'RepetitionTime': 2.0, 'SliceTiming': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9], 'PhaseEncodingDirection': 'j', } fmaps = [{ 'suffix': 'phasediff', 'phasediff': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_phasediff.nii.gz', 'magnitude1': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude1.nii.gz', 'magnitude2': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude2.nii.gz', }] run_stc = True multiecho = False else: # Find associated sbref, if possible entities = layout.parse_file_entities(ref_file) entities['suffix'] = 'sbref' entities['extension'] = ['nii', 'nii.gz'] # Overwrite extensions files = layout.get(return_type='file', **entities) refbase = os.path.basename(ref_file) if 'sbref' in ignore: LOGGER.info("Single-band reference files ignored.") elif files and multiecho: LOGGER.warning("Single-band reference found, but not supported in " "multi-echo workflows at this time. Ignoring.") elif files: sbref_file = files[0] sbbase = os.path.basename(sbref_file) if len(files) > 1: LOGGER.warning( "Multiple single-band reference files found for {}; using " "{}".format(refbase, sbbase)) else: LOGGER.log( 25, "Using single-band reference file {}".format(sbbase)) else: LOGGER.log(25, "No single-band-reference found for {}".format(refbase)) metadata = layout.get_metadata(ref_file) # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn) fmaps = [] if 'fieldmaps' not in ignore: for fmap in layout.get_fieldmap(ref_file, return_list=True): if fmap['suffix'] == 'phase': LOGGER.warning("""\ Found phase1/2 type of fieldmaps, which are not currently supported. \ fMRIPrep will discard them for susceptibility distortion correction. \ Please, follow up on this issue at \ https://github.com/poldracklab/fmriprep/issues/1655.""") else: fmap['metadata'] = layout.get_metadata( fmap[fmap['suffix']]) fmaps.append(fmap) # Run SyN if forced or in the absence of fieldmap correction if force_syn or (use_syn and not fmaps): fmaps.append({'suffix': 'syn'}) # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort' run_stc = ("SliceTiming" in metadata and 'slicetiming' not in ignore and (_get_series_len(ref_file) > 4 or "TooShort")) # Check if MEEPI for T2* coregistration target if t2s_coreg and not multiecho: LOGGER.warning( "No multiecho BOLD images found for T2* coregistration. " "Using standard EPI-T1 coregistration.") t2s_coreg = False # By default, force-bbr for t2s_coreg unless user specifies otherwise if t2s_coreg and use_bbr is None: use_bbr = True # Build workflow workflow = Workflow(name=wf_name) workflow.__desc__ = """ Functional data preprocessing : For each of the {num_bold} BOLD runs found per subject (across all tasks and sessions), the following preprocessing was performed. """.format(num_bold=num_bold) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion transform matrices, susceptibility distortion correction when available, and co-registrations to anatomical and output spaces). Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs), configured with Lanczos interpolation to minimize the smoothing effects of other kernels [@lanczos]. Non-gridded (surface) resamplings were performed using `mri_vol2surf` (FreeSurfer). """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_file', 'subjects_dir', 'subject_id', 't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 't1_aseg', 't1_aparc', 'anat2std_xfm', 'std2anat_xfm', 'template', 'joint_anat2std_xfm', 'joint_std2anat_xfm', 'joint_template', 't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform' ]), name='inputnode') inputnode.inputs.bold_file = bold_file if sbref_file is not None: from niworkflows.interfaces.images import ValidateImage val_sbref = pe.Node(ValidateImage(in_file=sbref_file), name='val_sbref') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'bold_std', 'bold_std_ref' 'bold_mask_std', 'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti', 'cifti_variant', 'cifti_variant_key', 'surfaces', 'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file', 'confounds_metadata' ]), name='outputnode') # BOLD buffer: an identity used as a pointer to either the original BOLD # or the STC'ed one for further use. boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer') summary = pe.Node(FunctionalSummary( slice_timing=run_stc, registration=('FSL', 'FreeSurfer')[freesurfer], registration_dof=bold2t1w_dof, pe_direction=metadata.get("PhaseEncodingDirection"), tr=metadata.get("RepetitionTime")), name='summary', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True) summary.inputs.dummy_scans = dummy_scans # CIfTI output: currently, we only support fsaverage{5,6} cifti_spaces = set(s for s in output_spaces.keys() if s in ('fsaverage5', 'fsaverage6')) fsaverage_den = output_spaces.get('fsaverage', {}).get('den') if fsaverage_den: cifti_spaces.add(FSAVERAGE_DENSITY[fsaverage_den]) cifti_output = cifti_output and cifti_spaces func_derivatives_wf = init_func_derivatives_wf( bids_root=layout.root, cifti_output=cifti_output, freesurfer=freesurfer, metadata=metadata, output_dir=output_dir, output_spaces=output_spaces, standard_spaces=list(std_spaces.keys()), use_aroma=use_aroma, ) workflow.connect([ (outputnode, func_derivatives_wf, [ ('bold_t1', 'inputnode.bold_t1'), ('bold_t1_ref', 'inputnode.bold_t1_ref'), ('bold_aseg_t1', 'inputnode.bold_aseg_t1'), ('bold_aparc_t1', 'inputnode.bold_aparc_t1'), ('bold_mask_t1', 'inputnode.bold_mask_t1'), ('bold_native', 'inputnode.bold_native'), ('confounds', 'inputnode.confounds'), ('surfaces', 'inputnode.surfaces'), ('aroma_noise_ics', 'inputnode.aroma_noise_ics'), ('melodic_mix', 'inputnode.melodic_mix'), ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'), ('bold_cifti', 'inputnode.bold_cifti'), ('cifti_variant', 'inputnode.cifti_variant'), ('cifti_variant_key', 'inputnode.cifti_variant_key'), ('confounds_metadata', 'inputnode.confounds_metadata'), ]), ]) # Generate a tentative boldref bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads) bold_reference_wf.inputs.inputnode.dummy_scans = dummy_scans if sbref_file is not None: workflow.connect([ (val_sbref, bold_reference_wf, [('out_file', 'inputnode.sbref_file')]), ]) # Top-level BOLD splitter bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split', mem_gb=mem_gb['filesize'] * 3) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf', mem_gb=mem_gb['filesize'], omp_nthreads=omp_nthreads) # calculate BOLD registration to T1w bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf', freesurfer=freesurfer, use_bbr=use_bbr, bold2t1w_dof=bold2t1w_dof, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # apply BOLD registration to T1w bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf', freesurfer=freesurfer, use_fieldwarp=(fmaps is not None or use_syn), multiecho=multiecho, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # get confounds bold_confounds_wf = init_bold_confs_wf( mem_gb=mem_gb['largemem'], metadata=metadata, regressors_all_comps=regressors_all_comps, regressors_fd_th=regressors_fd_th, regressors_dvars_th=regressors_dvars_th, name='bold_confounds_wf') bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False] # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_bold_trans_wf = init_bold_preproc_trans_wf( mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=not low_mem, use_fieldwarp=(fmaps is not None or use_syn), name='bold_bold_trans_wf') bold_bold_trans_wf.inputs.inputnode.name_source = ref_file # SLICE-TIME CORRECTION (or bypass) ############################################# if run_stc is True: # bool('TooShort') == True, so check True explicitly bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata) workflow.connect([ (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]), ]) if not multiecho: workflow.connect([(bold_reference_wf, bold_stc_wf, [ ('outputnode.bold_file', 'inputnode.bold_file') ])]) else: # for meepi, iterate through stc_wf for all workflows meepi_echos = boldbuffer.clone(name='meepi_echos') meepi_echos.iterables = ('bold_file', bold_file) workflow.connect([(meepi_echos, bold_stc_wf, [('bold_file', 'inputnode.bold_file')])]) elif not multiecho: # STC is too short or False # bypass STC from original BOLD to the splitter through boldbuffer workflow.connect([(bold_reference_wf, boldbuffer, [('outputnode.bold_file', 'bold_file')])]) else: # for meepi, iterate over all meepi echos to boldbuffer boldbuffer.iterables = ('bold_file', bold_file) # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ########################## bold_sdc_wf = init_sdc_wf(fmaps, metadata, omp_nthreads=omp_nthreads, debug=debug, fmap_demean=fmap_demean, fmap_bspline=fmap_bspline) # If no standard space is given, use the default for SyN-SDC if not volume_std_spaces or 'MNI152NLin2009cAsym' in volume_std_spaces: bold_sdc_wf.inputs.inputnode.template = 'MNI152NLin2009cAsym' else: bold_sdc_wf.inputs.inputnode.template = next(iter(volume_std_spaces)) if not fmaps: LOGGER.warning('SDC: no fieldmaps found or they were ignored (%s).', ref_file) elif fmaps[0]['suffix'] == 'syn': LOGGER.warning( 'SDC: no fieldmaps found or they were ignored. ' 'Using EXPERIMENTAL "fieldmap-less SyN" correction ' 'for dataset %s.', ref_file) else: LOGGER.log( 25, 'SDC: fieldmap estimation of type "%s" intended for %s found.', fmaps[0]['suffix'], ref_file) # Overwrite ``out_path_base`` of sdcflows' DataSinks for node in bold_sdc_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep' # MULTI-ECHO EPI DATA ############################################# if multiecho: from .util import init_skullstrip_bold_wf skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf') inputnode.inputs.bold_file = ref_file # Replace reference w first echo join_echos = pe.JoinNode( niu.IdentityInterface(fields=['bold_files']), joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'), joinfield=['bold_files'], name='join_echos') # create optimal combination, adaptive T2* map bold_t2s_wf = init_bold_t2s_wf(echo_times=tes, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, t2s_coreg=t2s_coreg, name='bold_t2smap_wf') workflow.connect([ (skullstrip_bold_wf, join_echos, [('outputnode.skull_stripped_file', 'bold_files')]), (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]), ]) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ # Generate early reference (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]), # BOLD buffer has slice-time corrected if it was run, original otherwise (boldbuffer, bold_split, [('bold_file', 'in_file')]), # HMC (bold_reference_wf, bold_hmc_wf, [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'), ('outputnode.bold_file', 'inputnode.bold_file')]), (bold_reference_wf, summary, [('outputnode.algo_dummy_scans', 'algo_dummy_scans')]), # EPI-T1 registration workflow ( inputnode, bold_reg_wf, [ ('t1_brain', 'inputnode.t1_brain'), ('t1_seg', 'inputnode.t1_seg'), # Undefined if --no-freesurfer, but this is safe ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1_2_fsnative_reverse_transform', 'inputnode.t1_2_fsnative_reverse_transform') ]), (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'), ('t1_brain', 'inputnode.t1_brain'), ('t1_mask', 'inputnode.t1_mask'), ('t1_aseg', 'inputnode.t1_aseg'), ('t1_aparc', 'inputnode.t1_aparc')]), # unused if multiecho, but this is safe (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_t1_trans_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'), ('outputnode.bold_t1_ref', 'bold_t1_ref'), ('outputnode.bold_aseg_t1', 'bold_aseg_t1'), ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]), (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]), # SDC (or pass-through workflow) (inputnode, bold_sdc_wf, [('joint_template', 'inputnode.templates'), ('joint_std2anat_xfm', 'inputnode.std2anat_xfm')]), (inputnode, bold_sdc_wf, [('t1_brain', 'inputnode.t1_brain')]), (bold_reference_wf, bold_sdc_wf, [('outputnode.ref_image', 'inputnode.bold_ref'), ('outputnode.ref_image_brain', 'inputnode.bold_ref_brain'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), # For t2s_coreg, replace EPI-to-T1w registration inputs (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain')]), (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'), ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_sdc_wf, bold_bold_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction') ]), # Connect bold_confounds_wf (inputnode, bold_confounds_wf, [('t1_tpms', 'inputnode.t1_tpms'), ('t1_mask', 'inputnode.t1_mask')]), (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), # Connect bold_bold_trans_wf (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')] ), (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), # Summary (outputnode, summary, [('confounds', 'confounds_file')]), ]) # for standard EPI data, pass along correct file if not multiecho: workflow.connect([ (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]), (bold_bold_trans_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_split, bold_t1_trans_wf, [('out_files', 'inputnode.bold_split')]), ]) else: # for meepi, create and use optimal combination workflow.connect([ # update name source for optimal combination (inputnode, func_derivatives_wf, [(('bold_file', combine_meepi_source), 'inputnode.source_file')]), (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold', 'inputnode.in_file')]), (bold_t2s_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold', 'inputnode.bold_split')]), ]) if fmaps: from ..fieldmap.unwarp import init_fmap_unwarp_report_wf # Report on BOLD correction fmap_unwarp_report_wf = init_fmap_unwarp_report_wf() workflow.connect([ (inputnode, fmap_unwarp_report_wf, [('t1_seg', 'inputnode.in_seg') ]), (bold_reference_wf, fmap_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.bold_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in fmap_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): fmap_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' if force_syn and fmaps[0]['suffix'] != 'syn': syn_unwarp_report_wf = init_fmap_unwarp_report_wf( name='syn_unwarp_report_wf', forcedsyn=True) workflow.connect([ (inputnode, syn_unwarp_report_wf, [('t1_seg', 'inputnode.in_seg')]), (bold_reference_wf, syn_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, syn_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_bold_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in syn_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): syn_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' # Map final BOLD mask into T1w space (if required) if 'T1w' in output_spaces or 'anat' in output_spaces: from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as ApplyTransforms) boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True), name='boldmask_to_t1w', mem_gb=0.1) workflow.connect([ (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1', 'transforms')]), (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1', 'reference_image')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]), (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]), ]) if set(['func', 'run', 'bold', 'boldref', 'sbref']).intersection(output_spaces): workflow.connect([ (bold_bold_trans_wf, outputnode, [('outputnode.bold', 'bold_native')]), (bold_bold_trans_wf, func_derivatives_wf, [('outputnode.bold_ref', 'inputnode.bold_native_ref'), ('outputnode.bold_mask', 'inputnode.bold_mask_native')]), ]) if volume_std_spaces: # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_std_trans_wf = init_bold_std_trans_wf( freesurfer=freesurfer, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, standard_spaces=volume_std_spaces, name='bold_std_trans_wf', use_compression=not low_mem, use_fieldwarp=fmaps is not None, ) workflow.connect([ (inputnode, bold_std_trans_wf, [('joint_template', 'inputnode.templates'), ('joint_anat2std_xfm', 'inputnode.anat2std_xfm'), ('bold_file', 'inputnode.name_source'), ('t1_aseg', 'inputnode.bold_aseg'), ('t1_aparc', 'inputnode.bold_aparc')]), (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, bold_std_trans_wf, [('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_std_trans_wf, outputnode, [('outputnode.bold_std', 'bold_std'), ('outputnode.bold_std_ref', 'bold_std_ref'), ('outputnode.bold_mask_std', 'bold_mask_std')]), ]) if freesurfer: workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('poutputnode.bold_aseg_std', 'inputnode.bold_aseg_std'), ('poutputnode.bold_aparc_std', 'inputnode.bold_aparc_std'), ]), (bold_std_trans_wf, outputnode, [('outputnode.bold_aseg_std', 'bold_aseg_std'), ('outputnode.bold_aparc_std', 'bold_aparc_std')]), ]) if 'MNI152NLin2009cAsym' in std_spaces: carpetplot_wf = init_carpetplot_wf(standard_spaces=std_spaces, mem_gb=mem_gb['resampled'], metadata=metadata, name='carpetplot_wf') workflow.connect([ (inputnode, carpetplot_wf, [('joint_std2anat_xfm', 'inputnode.std2anat_xfm')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, carpetplot_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_confounds_wf, carpetplot_wf, [('outputnode.confounds_file', 'inputnode.confounds_file')]), ]) if not multiecho: workflow.connect([(bold_split, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) else: split_opt_comb = bold_split.clone(name='split_opt_comb') workflow.connect([(bold_t2s_wf, split_opt_comb, [('outputnode.bold', 'in_file')]), (split_opt_comb, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) # Artifacts resampled in MNI space can only be sinked if they # were actually generated. See #1348. # Uses the parameterized outputnode to generate all outputs workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('poutputnode.templates', 'inputnode.template'), ('poutputnode.bold_std_ref', 'inputnode.bold_std_ref'), ('poutputnode.bold_std', 'inputnode.bold_std'), ('poutputnode.bold_mask_std', 'inputnode.bold_mask_std'), ]), ]) if use_aroma and 'MNI152NLin6Asym' in std_spaces: # ICA-AROMA workflow from .confounds import init_ica_aroma_wf ica_aroma_wf = init_ica_aroma_wf( metadata=metadata, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_fieldwarp=fmaps is not None, err_on_aroma_warn=err_on_aroma_warn, aroma_melodic_dim=aroma_melodic_dim, name='ica_aroma_wf') join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join), name='aroma_confounds') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) workflow.disconnect([ (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), ]) workflow.connect([ (bold_std_trans_wf, ica_aroma_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ('outputnode.templates', 'inputnode.templates')]), (inputnode, ica_aroma_wf, [('bold_file', 'inputnode.name_source')]), (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, join, [('outputnode.confounds_file', 'in_file')]), (bold_confounds_wf, mrg_conf_metadata, [('outputnode.confounds_metadata', 'in1')]), (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]), (ica_aroma_wf, mrg_conf_metadata, [('outputnode.aroma_metadata', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), (ica_aroma_wf, outputnode, [('outputnode.aroma_noise_ics', 'aroma_noise_ics'), ('outputnode.melodic_mix', 'melodic_mix'), ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file') ]), (join, outputnode, [('out_file', 'confounds')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), ]) # SURFACES ################################################################################## surface_spaces = [ space for space in output_spaces.keys() if space.startswith('fs') ] if freesurfer and surface_spaces: LOGGER.log(25, 'Creating BOLD surface-sampling workflow.') bold_surf_wf = init_bold_surf_wf(mem_gb=mem_gb['resampled'], output_spaces=surface_spaces, medial_surface_nan=medial_surface_nan, name='bold_surf_wf') workflow.connect([ (inputnode, bold_surf_wf, [('t1_preproc', 'inputnode.t1_preproc'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform')]), (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]), (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), ]) if cifti_output: from niworkflows.interfaces.utility import KeySelect bold_surf_wf.__desc__ += """\ *Grayordinates* files [@hcppipelines], which combine surface-sampled data and volume-sampled data, were also generated. """ select_std = pe.Node(KeySelect(fields=['bold_std']), name='select_std', run_without_submitting=True) select_std.inputs.key = 'MNI152NLin2009cAsym' gen_cifti = pe.MapNode(GenerateCifti(), iterfield=["surface_target", "gifti_files"], name="gen_cifti") gen_cifti.inputs.TR = metadata.get("RepetitionTime") gen_cifti.inputs.surface_target = list(cifti_spaces) workflow.connect([ (bold_std_trans_wf, select_std, [('outputnode.templates', 'keys'), ('outputnode.bold_std', 'bold_std')]), (bold_surf_wf, gen_cifti, [('outputnode.surfaces', 'gifti_files')]), (inputnode, gen_cifti, [('subjects_dir', 'subjects_dir')]), (select_std, gen_cifti, [('bold_std', 'bold_file')]), (gen_cifti, outputnode, [('out_file', 'bold_cifti'), ('variant', 'cifti_variant'), ('variant_key', 'cifti_variant_key') ]), ]) # REPORTING ############################################################ ds_report_summary = pe.Node(DerivativesDataSink(desc='summary', keep_dtype=True), name='ds_report_summary', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) ds_report_validation = pe.Node(DerivativesDataSink( base_directory=reportlets_dir, desc='validation', keep_dtype=True), name='ds_report_validation', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) workflow.connect([ (summary, ds_report_summary, [('out_report', 'in_file')]), (bold_reference_wf, ds_report_validation, [('outputnode.validation_report', 'in_file')]), ]) # Fill-in datasinks of reportlets seen so far for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_report'): workflow.get_node(node).inputs.base_directory = reportlets_dir workflow.get_node(node).inputs.source_file = ref_file return workflow
def init_bold_confs_wf( mem_gb, metadata, regressors_all_comps, regressors_dvars_th, regressors_fd_th, freesurfer=False, name="bold_confs_wf", ): """ Build a workflow to generate and write out confounding signals. This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, metadata={}, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, ) Parameters ---------- mem_gb : :obj:`float` Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV metadata : :obj:`dict` BIDS metadata for BOLD file name : :obj:`str` Name of workflow (default: ``bold_confs_wf``) regressors_all_comps : :obj:`bool` Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th : :obj:`float` Criterion for flagging DVARS outliers regressors_fd_th : :obj:`float` Criterion for flagging framewise displacement outliers Inputs ------ bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file rmsd_file Framewise displacement as measured by ``fsl_motion_outliers``. skip_vols number of non steady state volumes t1w_mask Mask of the skull-stripped template image t1w_tpms List of tissue probability maps in T1w space t1_bold_xform Affine matrix that maps the T1w space into alignment with the native BOLD space Outputs ------- confounds_file TSV of all aggregated confounds rois_report Reportlet visualizing white-matter/CSF mask used for aCompCor, the ROI for tCompCor and the BOLD brain mask. confounds_metadata Confounds metadata dictionary. """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.images import SignalExtraction from niworkflows.interfaces.masks import ROIsPlot from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.interfaces.patches import ( RobustACompCor as ACompCor, RobustTCompCor as TCompCor, ) from niworkflows.interfaces.plotting import (CompCorVariancePlot, ConfoundsCorrelationPlot) from niworkflows.interfaces.utils import (AddTSVHeader, TSV2JSON, DictMerge) from ...interfaces.confounds import aCompCorMasks gm_desc = ( "dilating a GM mask extracted from the FreeSurfer's *aseg* segmentation" if freesurfer else "thresholding the corresponding partial volume map at 0.05") workflow = Workflow(name=name) workflow.__desc__ = f"""\ Several confounding time-series were calculated based on the *preprocessed BOLD*: framewise displacement (FD), DVARS and three region-wise global signals. FD was computed using two formulations following Power (absolute sum of relative motions, @power_fd_dvars) and Jenkinson (relative root mean square displacement between affines, @mcflirt). FD and DVARS are calculated for each functional run, both using their implementations in *Nipype* [following the definitions by @power_fd_dvars]. The three global signals are extracted within the CSF, the WM, and the whole-brain masks. Additionally, a set of physiological regressors were extracted to allow for component-based noise correction [*CompCor*, @compcor]. Principal components are estimated after high-pass filtering the *preprocessed BOLD* time-series (using a discrete cosine filter with 128s cut-off) for the two *CompCor* variants: temporal (tCompCor) and anatomical (aCompCor). tCompCor components are then calculated from the top 2% variable voxels within the brain mask. For aCompCor, three probabilistic masks (CSF, WM and combined CSF+WM) are generated in anatomical space. The implementation differs from that of Behzadi et al. in that instead of eroding the masks by 2 pixels on BOLD space, the aCompCor masks are subtracted a mask of pixels that likely contain a volume fraction of GM. This mask is obtained by {gm_desc}, and it ensures components are not extracted from voxels containing a minimal fraction of GM. Finally, these masks are resampled into BOLD space and binarized by thresholding at 0.99 (as in the original implementation). Components are also calculated separately within the WM and CSF masks. For each CompCor decomposition, the *k* components with the largest singular values are retained, such that the retained components' time series are sufficient to explain 50 percent of variance across the nuisance mask (CSF, WM, combined, or temporal). The remaining components are dropped from consideration. The head-motion estimates calculated in the correction step were also placed within the corresponding confounds file. The confound time series derived from head motion estimates and global signals were expanded with the inclusion of temporal derivatives and quadratic terms for each [@confounds_satterthwaite_2013]. Frames that exceeded a threshold of {regressors_fd_th} mm FD or {regressors_dvars_th} standardised DVARS were annotated as motion outliers. """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols', 't1w_mask', 't1w_tpms', 't1_bold_xform' ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'confounds_file', 'confounds_metadata', 'acompcor_masks', 'tcompcor_mask' ]), name='outputnode') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # Generate aCompCor probseg maps acc_masks = pe.Node(aCompCorMasks(is_aseg=freesurfer), name="acc_masks") # Resample probseg maps in BOLD space via T1w-to-BOLD transform acc_msk_tfm = pe.MapNode(ApplyTransforms(interpolation='Gaussian', float=False), iterfield=["input_image"], name='acc_msk_tfm', mem_gb=0.1) acc_msk_brain = pe.MapNode(ApplyMask(), name="acc_msk_brain", iterfield=["in_file"]) acc_msk_bin = pe.MapNode(Binarize(thresh_low=0.99), name='acc_msk_bin', iterfield=["in_file"]) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['CSF', 'WM', 'combined'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.02, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if 'RepetitionTime' in metadata: tcompcor.inputs.repetition_time = metadata['RepetitionTime'] acompcor.inputs.repetition_time = metadata['RepetitionTime'] # Global and segment regressors signals_class_labels = [ "global_signal", "csf", "white_matter", "csf_wm", "tcompcor", ] merge_rois = pe.Node(niu.Merge(3, ravel_inputs=True), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction(class_labels=signals_class_labels), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]), name="add_rmsd_header", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(3), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata.inputs.in3 = { label: { 'Method': 'Mean' } for label in signals_class_labels } mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2, ravel_inputs=True), name='mrg_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink( desc='rois', datatype="figures", dismiss_entities=("echo", )), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node(CompCorVariancePlot( variance_thresholds=(0.5, 0.7, 0.9), metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink( desc='compcorvar', datatype="figures", dismiss_entities=("echo", )), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=20), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink( desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) def _last(inlist): return inlist[-1] def _select_cols(table): import pandas as pd return [ col for col in pd.read_table(table, nrows=2).columns if not col.startswith(("a_comp_cor_", "t_comp_cor_", "std_dvars")) ] workflow.connect([ # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # aCompCor (inputnode, acompcor, [("bold", "realigned_file"), ("skip_vols", "ignore_initial_volumes")]), (inputnode, acc_masks, [("t1w_tpms", "in_vfs"), (("bold", _get_zooms), "bold_zooms")]), (inputnode, acc_msk_tfm, [("t1_bold_xform", "transforms"), ("bold_mask", "reference_image")]), (inputnode, acc_msk_brain, [("bold_mask", "in_mask")]), (acc_masks, acc_msk_tfm, [("out_masks", "input_image")]), (acc_msk_tfm, acc_msk_brain, [("output_image", "in_file")]), (acc_msk_brain, acc_msk_bin, [("out_file", "in_file")]), (acc_msk_bin, acompcor, [("out_file", "mask_files")]), # tCompCor (inputnode, tcompcor, [("bold", "realigned_file"), ("skip_vols", "ignore_initial_volumes"), ("bold_mask", "mask_files")]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (inputnode, merge_rois, [('bold_mask', 'in1')]), (acc_msk_bin, merge_rois, [('out_file', 'in2')]), (tcompcor, merge_rois, [('high_variance_masks', 'in3')]), (merge_rois, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_rmsd_header, concat, [('out_file', 'rmsd')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (tcompcor, outputnode, [("high_variance_masks", "tcompcor_mask")]), (acc_msk_bin, outputnode, [("out_file", "acompcor_masks")]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk_bin, mrg_compcor, [(('out_file', _last), 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file'), (('confounds_file', _select_cols), 'columns') ]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow
def init_bold_confs_wf( mem_gb, metadata, regressors_all_comps, regressors_dvars_th, regressors_fd_th, name="bold_confs_wf", ): """ Build a workflow to generate and write out confounding signals. This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, metadata={}, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, ) Parameters ---------- mem_gb : :obj:`float` Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV metadata : :obj:`dict` BIDS metadata for BOLD file name : :obj:`str` Name of workflow (default: ``bold_confs_wf``) regressors_all_comps : :obj:`bool` Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th : :obj:`float` Criterion for flagging DVARS outliers regressors_fd_th : :obj:`float` Criterion for flagging framewise displacement outliers Inputs ------ bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file rmsd_file Framewise displacement as measured by ``fsl_motion_outliers``. skip_vols number of non steady state volumes t1w_mask Mask of the skull-stripped template image t1w_tpms List of tissue probability maps in T1w space t1_bold_xform Affine matrix that maps the T1w space into alignment with the native BOLD space Outputs ------- confounds_file TSV of all aggregated confounds rois_report Reportlet visualizing white-matter/CSF mask used for aCompCor, the ROI for tCompCor and the BOLD brain mask. confounds_metadata Confounds metadata dictionary. """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.images import SignalExtraction from niworkflows.interfaces.masks import ROIsPlot from niworkflows.interfaces.patches import ( RobustACompCor as ACompCor, RobustTCompCor as TCompCor, ) from niworkflows.interfaces.plotting import (CompCorVariancePlot, ConfoundsCorrelationPlot) from niworkflows.interfaces.utils import (TPM2ROI, AddTPMs, AddTSVHeader, TSV2JSON, DictMerge) workflow = Workflow(name=name) workflow.__desc__ = """\ Several confounding time-series were calculated based on the *preprocessed BOLD*: framewise displacement (FD), DVARS and three region-wise global signals. FD was computed using two formulations following Power (absolute sum of relative motions, @power_fd_dvars) and Jenkinson (relative root mean square displacement between affines, @mcflirt). FD and DVARS are calculated for each functional run, both using their implementations in *Nipype* [following the definitions by @power_fd_dvars]. The three global signals are extracted within the CSF, the WM, and the whole-brain masks. Additionally, a set of physiological regressors were extracted to allow for component-based noise correction [*CompCor*, @compcor]. Principal components are estimated after high-pass filtering the *preprocessed BOLD* time-series (using a discrete cosine filter with 128s cut-off) for the two *CompCor* variants: temporal (tCompCor) and anatomical (aCompCor). tCompCor components are then calculated from the top 5% variable voxels within a mask covering the subcortical regions. This subcortical mask is obtained by heavily eroding the brain mask, which ensures it does not include cortical GM regions. For aCompCor, components are calculated within the intersection of the aforementioned mask and the union of CSF and WM masks calculated in T1w space, after their projection to the native space of each functional run (using the inverse BOLD-to-T1w transformation). Components are also calculated separately within the WM and CSF masks. For each CompCor decomposition, the *k* components with the largest singular values are retained, such that the retained components' time series are sufficient to explain 50 percent of variance across the nuisance mask (CSF, WM, combined, or temporal). The remaining components are dropped from consideration. The head-motion estimates calculated in the correction step were also placed within the corresponding confounds file. The confound time series derived from head motion estimates and global signals were expanded with the inclusion of temporal derivatives and quadratic terms for each [@confounds_satterthwaite_2013]. Frames that exceeded a threshold of {fd} mm FD or {dv} standardised DVARS were annotated as motion outliers. """.format(fd=regressors_fd_th, dv=regressors_dvars_th) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols', 't1w_mask', 't1w_tpms', 't1_bold_xform' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']), name='outputnode') # Get masks ready in T1w space acc_tpm = pe.Node( AddTPMs(indices=[1, 2]), # BIDS convention (WM=1, CSF=2) name='acc_tpm') # acc stands for aCompCor csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi') wm_roi = pe.Node( TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), # 0.6 = radius; 0.6^3 = volume name='wm_roi') acc_roi = pe.Node( TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), # 0.6 = radius; 0.6^3 = volume name='acc_roi') # Map ROIs in T1w space into BOLD space csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='csf_tfm', mem_gb=0.1) wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='wm_tfm', mem_gb=0.1) acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='acc_tfm', mem_gb=0.1) tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='tcc_tfm', mem_gb=0.1) # Ensure ROIs don't go off-limits (reduced FoV) csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk') wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk') acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk') tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # a/t-CompCor mrg_lbl_cc = pe.Node(niu.Merge(3), name='merge_rois_cc', run_without_submitting=True) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.05, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['combined', 'CSF', 'WM'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if 'RepetitionTime' in metadata: tcompcor.inputs.repetition_time = metadata['RepetitionTime'] acompcor.inputs.repetition_time = metadata['RepetitionTime'] # Global and segment regressors signals_class_labels = ["csf", "white_matter", "global_signal"] mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction(class_labels=signals_class_labels), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]), name="add_rmsd_header", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(3), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata.inputs.in3 = { label: { 'Method': 'Mean' } for label in signals_class_labels } mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink( desc='rois', datatype="figures", dismiss_entities=("echo", )), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node(CompCorVariancePlot( variance_thresholds=(0.5, 0.7, 0.9), metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink( desc='compcorvar', datatype="figures", dismiss_entities=("echo", )), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=70), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink( desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) def _pick_csf(files): return files[2] # after smriprep#189, this is BIDS-compliant. def _pick_wm(files): return files[1] # after smriprep#189, this is BIDS-compliant. workflow.connect([ # Massage ROIs (in T1w space) (inputnode, acc_tpm, [('t1w_tpms', 'in_files')]), (inputnode, csf_roi, [(('t1w_tpms', _pick_csf), 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, wm_roi, [(('t1w_tpms', _pick_wm), 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, acc_roi, [('t1w_mask', 'in_mask')]), (acc_tpm, acc_roi, [('out_file', 'in_tpm')]), # Map ROIs to BOLD (inputnode, csf_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (csf_roi, csf_tfm, [('roi_file', 'input_image')]), (inputnode, wm_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (wm_roi, wm_tfm, [('roi_file', 'input_image')]), (inputnode, acc_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (acc_roi, acc_tfm, [('roi_file', 'input_image')]), (inputnode, tcc_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]), # Mask ROIs with bold_mask (inputnode, csf_msk, [('bold_mask', 'in_mask')]), (inputnode, wm_msk, [('bold_mask', 'in_mask')]), (inputnode, acc_msk, [('bold_mask', 'in_mask')]), (inputnode, tcc_msk, [('bold_mask', 'in_mask')]), # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # tCompCor (inputnode, tcompcor, [('bold', 'realigned_file')]), (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]), (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]), (tcc_msk, tcompcor, [('out', 'mask_files')]), # aCompCor (inputnode, acompcor, [('bold', 'realigned_file')]), (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]), (acc_tfm, acc_msk, [('output_image', 'roi_file')]), (acc_msk, mrg_lbl_cc, [('out', 'in1')]), (csf_msk, mrg_lbl_cc, [('out', 'in2')]), (wm_msk, mrg_lbl_cc, [('out', 'in3')]), (mrg_lbl_cc, acompcor, [('out', 'mask_files')]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (csf_tfm, csf_msk, [('output_image', 'roi_file')]), (csf_msk, mrg_lbl, [('out', 'in1')]), (wm_tfm, wm_msk, [('output_image', 'roi_file')]), (wm_msk, mrg_lbl, [('out', 'in2')]), (inputnode, mrg_lbl, [('bold_mask', 'in3')]), (mrg_lbl, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_rmsd_header, concat, [('out_file', 'rmsd')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk, mrg_compcor, [('out', 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow
def init_timeseries_wf( out_dir, out_path_base, source_file, dt, work_dir=None, name='timeseries_wf', ): """ Calculate timeseries of interest for a bold image in standard space. Parameters ---------- out_dir: str the output directory out_path_base: str the new directory for the output, to be created within out_dir source_file: str a filename for output naming puroses dt: float repetition time work_dir: str the working directory for the workflow name: str the workflow name Returns ------- workflow: nipype workflow Inputs ------ bold_std BOLD series NIfTI file in MNI152NLin6Asym space bold_mask_std BOLD mask for MNI152NLin6Asym space movpar_file movement parameter file skip_vols number of non steady state volumes csf_mask csk mask in MNI 2mm space wm_mask wm mask in MNI 2mm space cortical_gm_mask gm mask in MNI 2mm space Outputs ------- NONE """ DerivativesDataSink.out_path_base = out_path_base workflow = Workflow(name=name, base_dir=work_dir) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_std', 'bold_mask_std', 'movpar_file', 'skip_vols', 'csf_mask', 'wm_mask', 'cortical_gm_mask' ]), name='inputnode') bold_confs_wf = init_bold_confs_wf(out_dir, out_path_base, source_file, mem_gb=1, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5) ica_aroma_wf = init_ica_aroma_wf(dt, err_on_aroma_warn=True) join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join), name='aroma_confounds') merge_metadata = pe.Node(niu.Merge(2), name='merge_metadata', run_without_submitting=True) merge_metadata2 = pe.Node(DictMerge(), name='merge_metadata2', run_without_submitting=True) ds_timeseries = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='confounds', source_file=source_file, suffix='timeseries'), name='ds_confounds') ds_aroma_noise_ics = pe.Node(DerivativesDataSink(base_directory=out_dir, source_file=source_file, suffix='AROMAnoiseICs'), name='ds_aroma_noise_ics') ds_melodic_mix = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='MELODIC', source_file=source_file, suffix='mixing'), name='ds_melodic_mix') ds_aroma_report = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='mixing', source_file=source_file, suffix='reportlet'), name='ds_aroma_report') workflow.connect([ (inputnode, bold_confs_wf, [('bold_std', 'inputnode.bold'), ('bold_mask_std', 'inputnode.bold_mask'), ('movpar_file', 'inputnode.movpar_file'), ('skip_vols', 'inputnode.skip_vols'), ('csf_mask', 'inputnode.csf_mask'), ('wm_mask', 'inputnode.wm_mask'), ('cortical_gm_mask', 'inputnode.cortical_gm_mask')]), (inputnode, ica_aroma_wf, [('bold_std', 'inputnode.bold_std'), ('bold_mask_std', 'inputnode.bold_mask_std'), ('movpar_file', 'inputnode.movpar_file'), ('skip_vols', 'inputnode.skip_vols')]), # merge tsvs (bold_confs_wf, join, [('outputnode.confounds_file', 'in_file')]), (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]), # merge metadata (bold_confs_wf, merge_metadata, [('outputnode.confounds_metadata', 'in1')]), (ica_aroma_wf, merge_metadata, [('outputnode.aroma_metadata', 'in2')]), (merge_metadata, merge_metadata2, [('out', 'in_dicts')]), # derivatives (join, ds_timeseries, [('out_file', 'in_file')]), (merge_metadata2, ds_timeseries, [('out_dict', 'meta_dict')]), (ica_aroma_wf, ds_aroma_noise_ics, [('outputnode.aroma_noise_ics', 'in_file')]), (ica_aroma_wf, ds_melodic_mix, [('outputnode.melodic_mix', 'in_file') ]), (ica_aroma_wf, ds_aroma_report, [('outputnode.out_report', 'in_file') ]), ]) return workflow
def init_bold_confs_wf( out_dir, out_path_base, source_file, mem_gb, regressors_all_comps, regressors_dvars_th, regressors_fd_th, dt=None, work_dir=None, name="bold_confs_wf", ): """ This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, dt=2.0, ) **Parameters** mem_gb : float Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV regressors_all_comps: bool Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th Criterion for flagging DVARS outliers regressors_fd_th Criterion for flagging framewise displacement outliers dt: float repetition time name : str Name of workflow (default: ``bold_confs_wf``) **Inputs** bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file skip_vols number of non steady state volumes csf_mask csk mask in MNI 2mm space wm_mask wm mask in MNI 2mm space cortical_gm_mask gm mask in MNI 2mm space **Outputs** confounds_file TSV of all aggregated confounds confounds_metadata Confounds metadata dictionary. """ DerivativesDataSink.out_path_base = out_path_base workflow = Workflow(name=name, base_dir=work_dir) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'skip_vols', 'csf_mask', 'wm_mask', 'cortical_gm_mask' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']), name='outputnode') # create tcc mask: fslmaths cortical_gm_mask -dilD -mul -1 -add bold_mask -bin tcc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-dilD -mul -1 -add', args='-bin'), name='tcc_roi') # create acc mask fslmaths wm_mask -add csf_mask acc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-add'), name='acc_roi') # Ensure ROIs don't go off-limits (reduced FoV) csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk') wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk') acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk') tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # a/t-Compcor mrg_lbl_cc = pe.Node(niu.Merge(3), name='merge_rois_cc', run_without_submitting=True) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.05, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['combined', 'CSF', 'WM'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if dt: tcompcor.inputs.repetition_time = dt acompcor.inputs.repetition_time = dt # Global and segment regressors mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction( class_labels=["csf", "white_matter", "global_signal"]), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='rois', source_file=source_file, suffix='reportlet', keep_dtype=True), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node( CompCorVariancePlot(metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='compcorvar', source_file=source_file, keep_dtype=True), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=70), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='confoundcorr', source_file=source_file, keep_dtype=True), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) workflow.connect([ # generate tcc and acc rois (inputnode, tcc_roi, [('cortical_gm_mask', 'in_file'), ('bold_mask', 'in_file2')]), (inputnode, acc_roi, [('wm_mask', 'in_file'), ('csf_mask', 'in_file2')]), # Mask ROIs with bold_mask (inputnode, csf_msk, [('bold_mask', 'in_mask')]), (inputnode, wm_msk, [('bold_mask', 'in_mask')]), (inputnode, acc_msk, [('bold_mask', 'in_mask')]), (inputnode, tcc_msk, [('bold_mask', 'in_mask')]), # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # tCompCor (inputnode, tcompcor, [('bold', 'realigned_file')]), (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]), (tcc_roi, tcc_msk, [('out_file', 'roi_file')]), (tcc_msk, tcompcor, [('out', 'mask_files')]), # aCompCor (inputnode, acompcor, [('bold', 'realigned_file')]), (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]), (acc_roi, acc_msk, [('out_file', 'roi_file')]), (acc_msk, mrg_lbl_cc, [('out', 'in1')]), (inputnode, mrg_lbl_cc, [('csf_mask', 'in2')]), (inputnode, mrg_lbl_cc, [('wm_mask', 'in3')]), (mrg_lbl_cc, acompcor, [('out', 'mask_files')]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (inputnode, csf_msk, [('csf_mask', 'roi_file')]), (csf_msk, mrg_lbl, [('out', 'in1')]), (inputnode, wm_msk, [('wm_mask', 'roi_file')]), (wm_msk, mrg_lbl, [('out', 'in2')]), (inputnode, mrg_lbl, [('bold_mask', 'in3')]), (mrg_lbl, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk, mrg_compcor, [('out', 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow