def init_ica_aroma_wf( mem_gb, metadata, omp_nthreads, aroma_melodic_dim=-200, err_on_aroma_warn=False, name='ica_aroma_wf', susan_fwhm=6.0, ): """ Build a workflow that runs `ICA-AROMA`_. This workflow wraps `ICA-AROMA`_ to identify and remove motion-related independent components from a BOLD time series. The following steps are performed: #. Remove non-steady state volumes from the bold series. #. Smooth data using FSL `susan`, with a kernel width FWHM=6.0mm. #. Run FSL `melodic` outside of ICA-AROMA to generate the report #. Run ICA-AROMA #. Aggregate identified motion components (aggressive) to TSV #. Return ``classified_motion_ICs`` and ``melodic_mix`` for user to complete non-aggressive denoising in T1w space #. Calculate ICA-AROMA-identified noise components (columns named ``AROMAAggrCompXX``) Additionally, non-aggressive denoising is performed on the BOLD series resampled into MNI space. There is a current discussion on whether other confounds should be extracted before or after denoising `here <http://nbviewer.jupyter.org/github/poldracklab/fmriprep-notebooks/blob/922e436429b879271fa13e76767a6e73443e74d9/issue-817_aroma_confounds.ipynb>`__. .. _ICA-AROMA: https://github.com/maartenmennes/ICA-AROMA Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_ica_aroma_wf wf = init_ica_aroma_wf( mem_gb=3, metadata={'RepetitionTime': 1.0}, omp_nthreads=1) Parameters ---------- metadata : :obj:`dict` BIDS metadata for BOLD file mem_gb : :obj:`float` Size of BOLD file in GB omp_nthreads : :obj:`int` Maximum number of threads an individual process may use name : :obj:`str` Name of workflow (default: ``bold_tpl_trans_wf``) susan_fwhm : :obj:`float` Kernel width (FWHM in mm) for the smoothing step with FSL ``susan`` (default: 6.0mm) err_on_aroma_warn : :obj:`bool` Do not fail on ICA-AROMA errors aroma_melodic_dim : :obj:`int` Set the dimensionality of the MELODIC ICA decomposition. Negative numbers set a maximum on automatic dimensionality estimation. Positive numbers set an exact number of components to extract. (default: -200, i.e., estimate <=200 components) Inputs ------ itk_bold_to_t1 Affine transform from ``ref_bold_brain`` to T1 space (ITK format) anat2std_xfm ANTs-compatible affine-and-warp transform file name_source BOLD series NIfTI file Used to recover original information lost during processing skip_vols number of non steady state volumes bold_split Individual 3D BOLD volumes, not motion corrected bold_mask BOLD series mask in template space hmc_xforms List of affine transforms aligning each volume to ``ref_image`` in ITK format movpar_file SPM-formatted motion parameters file Outputs ------- aroma_confounds TSV of confounds identified as noise by ICA-AROMA aroma_noise_ics CSV of noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix nonaggr_denoised_file BOLD series with non-aggressive ICA-AROMA denoising applied """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.segmentation import ICA_AROMARPT from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.utils import TSV2JSON workflow = Workflow(name=name) workflow.__postdesc__ = """\ Automatic removal of motion artifacts using independent component analysis [ICA-AROMA, @aroma] was performed on the *preprocessed BOLD on MNI space* time-series after removal of non-steady state volumes and spatial smoothing with an isotropic, Gaussian kernel of 6mm FWHM (full-width half-maximum). Corresponding "non-aggresively" denoised runs were produced after such smoothing. Additionally, the "aggressive" noise-regressors were collected and placed in the corresponding confounds file. """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_std', 'bold_mask_std', 'movpar_file', 'name_source', 'skip_vols', 'spatial_reference', ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'aroma_confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file', 'aroma_metadata' ]), name='outputnode') # extract out to BOLD base select_std = pe.Node(KeySelect(fields=['bold_mask_std', 'bold_std']), name='select_std', run_without_submitting=True) select_std.inputs.key = 'MNI152NLin6Asym_res-2' rm_non_steady_state = pe.Node(niu.Function(function=_remove_volumes, output_names=['bold_cut']), name='rm_nonsteady') calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'), name='calc_median_val') calc_bold_mean = pe.Node(fsl.MeanImage(), name='calc_bold_mean') def _getusans_func(image, thresh): return [tuple([image, thresh])] getusans = pe.Node(niu.Function(function=_getusans_func, output_names=['usans']), name='getusans', mem_gb=0.01) smooth = pe.Node(fsl.SUSAN(fwhm=susan_fwhm), name='smooth') # melodic node melodic = pe.Node(fsl.MELODIC(no_bet=True, tr_sec=float(metadata['RepetitionTime']), mm_thresh=0.5, out_stats=True, dim=aroma_melodic_dim), name="melodic") # ica_aroma node ica_aroma = pe.Node(ICA_AROMARPT(denoise_type='nonaggr', generate_report=True, TR=metadata['RepetitionTime'], args='-np'), name='ica_aroma') add_non_steady_state = pe.Node(niu.Function(function=_add_volumes, output_names=['bold_add']), name='add_nonsteady') # extract the confound ICs from the results ica_aroma_confound_extraction = pe.Node( ICAConfounds(err_on_aroma_warn=err_on_aroma_warn), name='ica_aroma_confound_extraction') ica_aroma_metadata_fmt = pe.Node(TSV2JSON(index_column='IC', output=None, enforce_case=True, additional_metadata={ 'Method': { 'Name': 'ICA-AROMA', 'Version': getenv( 'AROMA_VERSION', 'n/a') } }), name='ica_aroma_metadata_fmt') ds_report_ica_aroma = pe.Node(DerivativesDataSink( desc='aroma', datatype="figures", dismiss_entities=("echo", )), name='ds_report_ica_aroma', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) def _getbtthresh(medianval): return 0.75 * medianval # connect the nodes workflow.connect([ (inputnode, select_std, [('spatial_reference', 'keys'), ('bold_std', 'bold_std'), ('bold_mask_std', 'bold_mask_std')]), (inputnode, ica_aroma, [('movpar_file', 'motion_parameters')]), (inputnode, rm_non_steady_state, [('skip_vols', 'skip_vols')]), (select_std, rm_non_steady_state, [('bold_std', 'bold_file')]), (select_std, calc_median_val, [('bold_mask_std', 'mask_file')]), (rm_non_steady_state, calc_median_val, [('bold_cut', 'in_file')]), (rm_non_steady_state, calc_bold_mean, [('bold_cut', 'in_file')]), (calc_bold_mean, getusans, [('out_file', 'image')]), (calc_median_val, getusans, [('out_stat', 'thresh')]), # Connect input nodes to complete smoothing (rm_non_steady_state, smooth, [('bold_cut', 'in_file')]), (getusans, smooth, [('usans', 'usans')]), (calc_median_val, smooth, [(('out_stat', _getbtthresh), 'brightness_threshold')]), # connect smooth to melodic (smooth, melodic, [('smoothed_file', 'in_files')]), (select_std, melodic, [('bold_mask_std', 'mask')]), # connect nodes to ICA-AROMA (smooth, ica_aroma, [('smoothed_file', 'in_file')]), (select_std, ica_aroma, [('bold_mask_std', 'report_mask'), ('bold_mask_std', 'mask')]), (melodic, ica_aroma, [('out_dir', 'melodic_dir')]), # generate tsvs from ICA-AROMA (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'in_directory') ]), (inputnode, ica_aroma_confound_extraction, [('skip_vols', 'skip_vols') ]), (ica_aroma_confound_extraction, ica_aroma_metadata_fmt, [('aroma_metadata', 'in_file')]), # output for processing and reporting (ica_aroma_confound_extraction, outputnode, [('aroma_confounds', 'aroma_confounds'), ('aroma_noise_ics', 'aroma_noise_ics'), ('melodic_mix', 'melodic_mix')]), (ica_aroma_metadata_fmt, outputnode, [('output', 'aroma_metadata')]), (ica_aroma, add_non_steady_state, [('nonaggr_denoised_file', 'bold_cut_file')]), (select_std, add_non_steady_state, [('bold_std', 'bold_file')]), (inputnode, add_non_steady_state, [('skip_vols', 'skip_vols')]), (add_non_steady_state, outputnode, [('bold_add', 'nonaggr_denoised_file')]), (ica_aroma, ds_report_ica_aroma, [('out_report', 'in_file')]), ]) return workflow
def init_bold_confs_wf( mem_gb, metadata, regressors_all_comps, regressors_dvars_th, regressors_fd_th, freesurfer=False, name="bold_confs_wf", ): """ Build a workflow to generate and write out confounding signals. This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, metadata={}, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, ) Parameters ---------- mem_gb : :obj:`float` Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV metadata : :obj:`dict` BIDS metadata for BOLD file name : :obj:`str` Name of workflow (default: ``bold_confs_wf``) regressors_all_comps : :obj:`bool` Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th : :obj:`float` Criterion for flagging DVARS outliers regressors_fd_th : :obj:`float` Criterion for flagging framewise displacement outliers Inputs ------ bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file rmsd_file Framewise displacement as measured by ``fsl_motion_outliers``. skip_vols number of non steady state volumes t1w_mask Mask of the skull-stripped template image t1w_tpms List of tissue probability maps in T1w space t1_bold_xform Affine matrix that maps the T1w space into alignment with the native BOLD space Outputs ------- confounds_file TSV of all aggregated confounds rois_report Reportlet visualizing white-matter/CSF mask used for aCompCor, the ROI for tCompCor and the BOLD brain mask. confounds_metadata Confounds metadata dictionary. """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.images import SignalExtraction from niworkflows.interfaces.masks import ROIsPlot from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.interfaces.patches import ( RobustACompCor as ACompCor, RobustTCompCor as TCompCor, ) from niworkflows.interfaces.plotting import (CompCorVariancePlot, ConfoundsCorrelationPlot) from niworkflows.interfaces.utils import (AddTSVHeader, TSV2JSON, DictMerge) from ...interfaces.confounds import aCompCorMasks gm_desc = ( "dilating a GM mask extracted from the FreeSurfer's *aseg* segmentation" if freesurfer else "thresholding the corresponding partial volume map at 0.05") workflow = Workflow(name=name) workflow.__desc__ = f"""\ Several confounding time-series were calculated based on the *preprocessed BOLD*: framewise displacement (FD), DVARS and three region-wise global signals. FD was computed using two formulations following Power (absolute sum of relative motions, @power_fd_dvars) and Jenkinson (relative root mean square displacement between affines, @mcflirt). FD and DVARS are calculated for each functional run, both using their implementations in *Nipype* [following the definitions by @power_fd_dvars]. The three global signals are extracted within the CSF, the WM, and the whole-brain masks. Additionally, a set of physiological regressors were extracted to allow for component-based noise correction [*CompCor*, @compcor]. Principal components are estimated after high-pass filtering the *preprocessed BOLD* time-series (using a discrete cosine filter with 128s cut-off) for the two *CompCor* variants: temporal (tCompCor) and anatomical (aCompCor). tCompCor components are then calculated from the top 2% variable voxels within the brain mask. For aCompCor, three probabilistic masks (CSF, WM and combined CSF+WM) are generated in anatomical space. The implementation differs from that of Behzadi et al. in that instead of eroding the masks by 2 pixels on BOLD space, the aCompCor masks are subtracted a mask of pixels that likely contain a volume fraction of GM. This mask is obtained by {gm_desc}, and it ensures components are not extracted from voxels containing a minimal fraction of GM. Finally, these masks are resampled into BOLD space and binarized by thresholding at 0.99 (as in the original implementation). Components are also calculated separately within the WM and CSF masks. For each CompCor decomposition, the *k* components with the largest singular values are retained, such that the retained components' time series are sufficient to explain 50 percent of variance across the nuisance mask (CSF, WM, combined, or temporal). The remaining components are dropped from consideration. The head-motion estimates calculated in the correction step were also placed within the corresponding confounds file. The confound time series derived from head motion estimates and global signals were expanded with the inclusion of temporal derivatives and quadratic terms for each [@confounds_satterthwaite_2013]. Frames that exceeded a threshold of {regressors_fd_th} mm FD or {regressors_dvars_th} standardised DVARS were annotated as motion outliers. """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols', 't1w_mask', 't1w_tpms', 't1_bold_xform' ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'confounds_file', 'confounds_metadata', 'acompcor_masks', 'tcompcor_mask' ]), name='outputnode') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # Generate aCompCor probseg maps acc_masks = pe.Node(aCompCorMasks(is_aseg=freesurfer), name="acc_masks") # Resample probseg maps in BOLD space via T1w-to-BOLD transform acc_msk_tfm = pe.MapNode(ApplyTransforms(interpolation='Gaussian', float=False), iterfield=["input_image"], name='acc_msk_tfm', mem_gb=0.1) acc_msk_brain = pe.MapNode(ApplyMask(), name="acc_msk_brain", iterfield=["in_file"]) acc_msk_bin = pe.MapNode(Binarize(thresh_low=0.99), name='acc_msk_bin', iterfield=["in_file"]) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['CSF', 'WM', 'combined'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.02, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if 'RepetitionTime' in metadata: tcompcor.inputs.repetition_time = metadata['RepetitionTime'] acompcor.inputs.repetition_time = metadata['RepetitionTime'] # Global and segment regressors signals_class_labels = [ "global_signal", "csf", "white_matter", "csf_wm", "tcompcor", ] merge_rois = pe.Node(niu.Merge(3, ravel_inputs=True), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction(class_labels=signals_class_labels), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]), name="add_rmsd_header", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(3), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata.inputs.in3 = { label: { 'Method': 'Mean' } for label in signals_class_labels } mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2, ravel_inputs=True), name='mrg_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink( desc='rois', datatype="figures", dismiss_entities=("echo", )), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node(CompCorVariancePlot( variance_thresholds=(0.5, 0.7, 0.9), metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink( desc='compcorvar', datatype="figures", dismiss_entities=("echo", )), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=20), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink( desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) def _last(inlist): return inlist[-1] def _select_cols(table): import pandas as pd return [ col for col in pd.read_table(table, nrows=2).columns if not col.startswith(("a_comp_cor_", "t_comp_cor_", "std_dvars")) ] workflow.connect([ # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # aCompCor (inputnode, acompcor, [("bold", "realigned_file"), ("skip_vols", "ignore_initial_volumes")]), (inputnode, acc_masks, [("t1w_tpms", "in_vfs"), (("bold", _get_zooms), "bold_zooms")]), (inputnode, acc_msk_tfm, [("t1_bold_xform", "transforms"), ("bold_mask", "reference_image")]), (inputnode, acc_msk_brain, [("bold_mask", "in_mask")]), (acc_masks, acc_msk_tfm, [("out_masks", "input_image")]), (acc_msk_tfm, acc_msk_brain, [("output_image", "in_file")]), (acc_msk_brain, acc_msk_bin, [("out_file", "in_file")]), (acc_msk_bin, acompcor, [("out_file", "mask_files")]), # tCompCor (inputnode, tcompcor, [("bold", "realigned_file"), ("skip_vols", "ignore_initial_volumes"), ("bold_mask", "mask_files")]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (inputnode, merge_rois, [('bold_mask', 'in1')]), (acc_msk_bin, merge_rois, [('out_file', 'in2')]), (tcompcor, merge_rois, [('high_variance_masks', 'in3')]), (merge_rois, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_rmsd_header, concat, [('out_file', 'rmsd')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (tcompcor, outputnode, [("high_variance_masks", "tcompcor_mask")]), (acc_msk_bin, outputnode, [("out_file", "acompcor_masks")]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk_bin, mrg_compcor, [(('out_file', _last), 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file'), (('confounds_file', _select_cols), 'columns') ]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow
def init_bold_confs_wf( mem_gb, metadata, regressors_all_comps, regressors_dvars_th, regressors_fd_th, name="bold_confs_wf", ): """ Build a workflow to generate and write out confounding signals. This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, metadata={}, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, ) Parameters ---------- mem_gb : :obj:`float` Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV metadata : :obj:`dict` BIDS metadata for BOLD file name : :obj:`str` Name of workflow (default: ``bold_confs_wf``) regressors_all_comps : :obj:`bool` Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th : :obj:`float` Criterion for flagging DVARS outliers regressors_fd_th : :obj:`float` Criterion for flagging framewise displacement outliers Inputs ------ bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file rmsd_file Framewise displacement as measured by ``fsl_motion_outliers``. skip_vols number of non steady state volumes t1w_mask Mask of the skull-stripped template image t1w_tpms List of tissue probability maps in T1w space t1_bold_xform Affine matrix that maps the T1w space into alignment with the native BOLD space Outputs ------- confounds_file TSV of all aggregated confounds rois_report Reportlet visualizing white-matter/CSF mask used for aCompCor, the ROI for tCompCor and the BOLD brain mask. confounds_metadata Confounds metadata dictionary. """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.images import SignalExtraction from niworkflows.interfaces.masks import ROIsPlot from niworkflows.interfaces.patches import ( RobustACompCor as ACompCor, RobustTCompCor as TCompCor, ) from niworkflows.interfaces.plotting import (CompCorVariancePlot, ConfoundsCorrelationPlot) from niworkflows.interfaces.utils import (TPM2ROI, AddTPMs, AddTSVHeader, TSV2JSON, DictMerge) workflow = Workflow(name=name) workflow.__desc__ = """\ Several confounding time-series were calculated based on the *preprocessed BOLD*: framewise displacement (FD), DVARS and three region-wise global signals. FD was computed using two formulations following Power (absolute sum of relative motions, @power_fd_dvars) and Jenkinson (relative root mean square displacement between affines, @mcflirt). FD and DVARS are calculated for each functional run, both using their implementations in *Nipype* [following the definitions by @power_fd_dvars]. The three global signals are extracted within the CSF, the WM, and the whole-brain masks. Additionally, a set of physiological regressors were extracted to allow for component-based noise correction [*CompCor*, @compcor]. Principal components are estimated after high-pass filtering the *preprocessed BOLD* time-series (using a discrete cosine filter with 128s cut-off) for the two *CompCor* variants: temporal (tCompCor) and anatomical (aCompCor). tCompCor components are then calculated from the top 5% variable voxels within a mask covering the subcortical regions. This subcortical mask is obtained by heavily eroding the brain mask, which ensures it does not include cortical GM regions. For aCompCor, components are calculated within the intersection of the aforementioned mask and the union of CSF and WM masks calculated in T1w space, after their projection to the native space of each functional run (using the inverse BOLD-to-T1w transformation). Components are also calculated separately within the WM and CSF masks. For each CompCor decomposition, the *k* components with the largest singular values are retained, such that the retained components' time series are sufficient to explain 50 percent of variance across the nuisance mask (CSF, WM, combined, or temporal). The remaining components are dropped from consideration. The head-motion estimates calculated in the correction step were also placed within the corresponding confounds file. The confound time series derived from head motion estimates and global signals were expanded with the inclusion of temporal derivatives and quadratic terms for each [@confounds_satterthwaite_2013]. Frames that exceeded a threshold of {fd} mm FD or {dv} standardised DVARS were annotated as motion outliers. """.format(fd=regressors_fd_th, dv=regressors_dvars_th) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols', 't1w_mask', 't1w_tpms', 't1_bold_xform' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']), name='outputnode') # Get masks ready in T1w space acc_tpm = pe.Node( AddTPMs(indices=[1, 2]), # BIDS convention (WM=1, CSF=2) name='acc_tpm') # acc stands for aCompCor csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi') wm_roi = pe.Node( TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), # 0.6 = radius; 0.6^3 = volume name='wm_roi') acc_roi = pe.Node( TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), # 0.6 = radius; 0.6^3 = volume name='acc_roi') # Map ROIs in T1w space into BOLD space csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='csf_tfm', mem_gb=0.1) wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='wm_tfm', mem_gb=0.1) acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='acc_tfm', mem_gb=0.1) tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True), name='tcc_tfm', mem_gb=0.1) # Ensure ROIs don't go off-limits (reduced FoV) csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk') wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk') acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk') tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # a/t-CompCor mrg_lbl_cc = pe.Node(niu.Merge(3), name='merge_rois_cc', run_without_submitting=True) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.05, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['combined', 'CSF', 'WM'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if 'RepetitionTime' in metadata: tcompcor.inputs.repetition_time = metadata['RepetitionTime'] acompcor.inputs.repetition_time = metadata['RepetitionTime'] # Global and segment regressors signals_class_labels = ["csf", "white_matter", "global_signal"] mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction(class_labels=signals_class_labels), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]), name="add_rmsd_header", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(3), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata.inputs.in3 = { label: { 'Method': 'Mean' } for label in signals_class_labels } mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink( desc='rois', datatype="figures", dismiss_entities=("echo", )), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node(CompCorVariancePlot( variance_thresholds=(0.5, 0.7, 0.9), metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink( desc='compcorvar', datatype="figures", dismiss_entities=("echo", )), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=70), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink( desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) def _pick_csf(files): return files[2] # after smriprep#189, this is BIDS-compliant. def _pick_wm(files): return files[1] # after smriprep#189, this is BIDS-compliant. workflow.connect([ # Massage ROIs (in T1w space) (inputnode, acc_tpm, [('t1w_tpms', 'in_files')]), (inputnode, csf_roi, [(('t1w_tpms', _pick_csf), 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, wm_roi, [(('t1w_tpms', _pick_wm), 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, acc_roi, [('t1w_mask', 'in_mask')]), (acc_tpm, acc_roi, [('out_file', 'in_tpm')]), # Map ROIs to BOLD (inputnode, csf_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (csf_roi, csf_tfm, [('roi_file', 'input_image')]), (inputnode, wm_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (wm_roi, wm_tfm, [('roi_file', 'input_image')]), (inputnode, acc_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (acc_roi, acc_tfm, [('roi_file', 'input_image')]), (inputnode, tcc_tfm, [('bold_mask', 'reference_image'), ('t1_bold_xform', 'transforms')]), (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]), # Mask ROIs with bold_mask (inputnode, csf_msk, [('bold_mask', 'in_mask')]), (inputnode, wm_msk, [('bold_mask', 'in_mask')]), (inputnode, acc_msk, [('bold_mask', 'in_mask')]), (inputnode, tcc_msk, [('bold_mask', 'in_mask')]), # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # tCompCor (inputnode, tcompcor, [('bold', 'realigned_file')]), (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]), (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]), (tcc_msk, tcompcor, [('out', 'mask_files')]), # aCompCor (inputnode, acompcor, [('bold', 'realigned_file')]), (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]), (acc_tfm, acc_msk, [('output_image', 'roi_file')]), (acc_msk, mrg_lbl_cc, [('out', 'in1')]), (csf_msk, mrg_lbl_cc, [('out', 'in2')]), (wm_msk, mrg_lbl_cc, [('out', 'in3')]), (mrg_lbl_cc, acompcor, [('out', 'mask_files')]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (csf_tfm, csf_msk, [('output_image', 'roi_file')]), (csf_msk, mrg_lbl, [('out', 'in1')]), (wm_tfm, wm_msk, [('output_image', 'roi_file')]), (wm_msk, mrg_lbl, [('out', 'in2')]), (inputnode, mrg_lbl, [('bold_mask', 'in3')]), (mrg_lbl, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_rmsd_header, concat, [('out_file', 'rmsd')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk, mrg_compcor, [('out', 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow
def init_ica_aroma_wf( dt, aroma_melodic_dim=-200, err_on_aroma_warn=False, susan_fwhm=6.0, name='ica_aroma_wf', ): """ Build a workflow that runs `ICA-AROMA`_. This workflow wraps `ICA-AROMA`_ to identify and remove motion-related independent components from a BOLD time series. The following steps are performed: #. Remove non-steady state volumes from the bold series. #. Smooth data using FSL `susan`, with a kernel width FWHM=6.0mm. #. Run FSL `melodic` outside of ICA-AROMA to generate the report #. Run ICA-AROMA #. Aggregate identified motion components (aggressive) to TSV #. Return ``classified_motion_ICs`` and ``melodic_mix`` for user to complete non-aggressive denoising in T1w space #. Calculate ICA-AROMA-identified noise components (columns named ``AROMAAggrCompXX``) There is a current discussion on whether other confounds should be extracted before or after denoising `here <http://nbviewer.jupyter.org/github/nipreps/fmriprep-notebooks/blob/922e436429b879271fa13e76767a6e73443e74d9/issue-817_aroma_confounds.ipynb>`__. .. _ICA-AROMA: https://github.com/maartenmennes/ICA-AROMA Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from ecp.workflows.confounds import init_ica_aroma_wf wf = init_ica_aroma_wf( dt=1.0) Parameters ---------- dt : :obj:`float` bold repetition time aroma_melodic_dim : :obj:`int` Set the dimensionality of the MELODIC ICA decomposition. Negative numbers set a maximum on automatic dimensionality estimation. Positive numbers set an exact number of components to extract. (default: -200, i.e., estimate <=200 components) err_on_aroma_warn : :obj:`bool` Do not fail on ICA-AROMA errors susan_fwhm : :obj:`float` Kernel width (FWHM in mm) for the smoothing step with FSL ``susan`` (default: 6.0mm) name : :obj:`str` Name of workflow (default: ``ica_aroma_wf``) Inputs ------ bold_std BOLD series NIfTI file in MNI152NLin6Asym space bold_mask_std BOLD mask for MNI152NLin6Asym space movpar_file movement parameter file skip_vols number of non steady state volumes Outputs ------- aroma_confounds TSV of confounds identified as noise by ICA-AROMA aroma_noise_ics CSV of noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix aroma_metatdata metadata out_report aroma out report """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.segmentation import ICA_AROMARPT from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.utils import TSV2JSON workflow = Workflow(name=name) workflow.__postdesc__ = """\ Automatic removal of motion artifacts using independent component analysis [ICA-AROMA, @aroma] was performed on the *preprocessed BOLD on MNI space* time-series after removal of non-steady state volumes and spatial smoothing with an isotropic, Gaussian kernel of 6mm FWHM (full-width half-maximum). The "aggressive" noise-regressors were collected and placed in the corresponding confounds file. """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_std', 'bold_mask_std', 'movpar_file', 'skip_vols', ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'aroma_confounds', 'aroma_noise_ics', 'melodic_mix', 'aroma_metadata', 'out_report' ]), name='outputnode') # extract out to BOLD base rm_non_steady_state = pe.Node(Trim(), name='rm_nonsteady') trim_movement = pe.Node(TrimMovement(), name='trim_movement') calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'), name='calc_median_val') calc_bold_mean = pe.Node(fsl.MeanImage(), name='calc_bold_mean') def _getusans_func(image, thresh): return [tuple([image, thresh])] getusans = pe.Node(niu.Function(function=_getusans_func, output_names=['usans']), name='getusans', mem_gb=0.01) smooth = pe.Node(fsl.SUSAN(fwhm=susan_fwhm), name='smooth') # melodic node melodic = pe.Node(fsl.MELODIC(no_bet=True, tr_sec=dt, mm_thresh=0.5, out_stats=True, dim=aroma_melodic_dim), name="melodic") # ica_aroma node ica_aroma = pe.Node(ICA_AROMARPT(denoise_type='no', generate_report=True, TR=dt, args='-np'), name='ica_aroma') # extract the confound ICs from the results ica_aroma_confound_extraction = pe.Node( ICAConfounds(err_on_aroma_warn=err_on_aroma_warn), name='ica_aroma_confound_extraction') ica_aroma_metadata_fmt = pe.Node(TSV2JSON(index_column='IC', output=None, enforce_case=True, additional_metadata={ 'Method': { 'Name': 'ICA-AROMA', 'Version': getenv( 'AROMA_VERSION', 'n/a') } }), name='ica_aroma_metadata_fmt') def _getbtthresh(medianval): return 0.75 * medianval # connect the nodes workflow.connect([ (inputnode, ica_aroma, [('movpar_file', 'motion_parameters')]), (inputnode, rm_non_steady_state, [('skip_vols', 'begin_index')]), (inputnode, rm_non_steady_state, [('bold_std', 'in_file')]), (inputnode, calc_median_val, [('bold_mask_std', 'mask_file')]), (inputnode, trim_movement, [('movpar_file', 'movpar_file')]), (inputnode, trim_movement, [('skip_vols', 'skip_vols')]), (rm_non_steady_state, calc_median_val, [('out_file', 'in_file')]), (rm_non_steady_state, calc_bold_mean, [('out_file', 'in_file')]), (calc_bold_mean, getusans, [('out_file', 'image')]), (calc_median_val, getusans, [('out_stat', 'thresh')]), # Connect input nodes to complete smoothing (rm_non_steady_state, smooth, [('out_file', 'in_file')]), (getusans, smooth, [('usans', 'usans')]), (calc_median_val, smooth, [(('out_stat', _getbtthresh), 'brightness_threshold')]), # connect smooth to melodic (smooth, melodic, [('smoothed_file', 'in_files')]), (inputnode, melodic, [('bold_mask_std', 'mask')]), # connect nodes to ICA-AROMA (smooth, ica_aroma, [('smoothed_file', 'in_file')]), (inputnode, ica_aroma, [('bold_mask_std', 'report_mask'), ('bold_mask_std', 'mask')]), (melodic, ica_aroma, [('out_dir', 'melodic_dir')]), # generate tsvs from ICA-AROMA (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'in_directory') ]), (inputnode, ica_aroma_confound_extraction, [('skip_vols', 'skip_vols') ]), (ica_aroma_confound_extraction, ica_aroma_metadata_fmt, [('aroma_metadata', 'in_file')]), # output for processing and reporting (ica_aroma_confound_extraction, outputnode, [('aroma_confounds', 'aroma_confounds'), ('aroma_noise_ics', 'aroma_noise_ics'), ('melodic_mix', 'melodic_mix')]), (ica_aroma_metadata_fmt, outputnode, [('output', 'aroma_metadata')]), (ica_aroma, outputnode, [('out_report', 'out_report')]), ]) return workflow
def init_bold_confs_wf( out_dir, out_path_base, source_file, mem_gb, regressors_all_comps, regressors_dvars_th, regressors_fd_th, dt=None, work_dir=None, name="bold_confs_wf", ): """ This workflow calculates confounds for a BOLD series, and aggregates them into a :abbr:`TSV (tab-separated value)` file, for use as nuisance regressors in a :abbr:`GLM (general linear model)`. The following confounds are calculated, with column headings in parentheses: #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``) #. DVARS - original and standardized variants (``dvars``, ``std_dvars``) #. Framewise displacement, based on head-motion parameters (``framewise_displacement``) #. Temporal CompCor (``t_comp_cor_XX``) #. Anatomical CompCor (``a_comp_cor_XX``) #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off (``cosine_XX``) #. Non-steady-state volumes (``non_steady_state_XX``) #. Estimated head-motion parameters, in mm and rad (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``) Prior to estimating aCompCor and tCompCor, non-steady-state volumes are censored and high-pass filtered using a :abbr:`DCT (discrete cosine transform)` basis. The cosine basis, as well as one regressor per censored volume, are included for convenience. .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.confounds import init_bold_confs_wf wf = init_bold_confs_wf( mem_gb=1, regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, dt=2.0, ) **Parameters** mem_gb : float Size of BOLD file in GB - please note that this size should be calculated after resamplings that may extend the FoV regressors_all_comps: bool Indicates whether CompCor decompositions should return all components instead of the minimal number of components necessary to explain 50 percent of the variance in the decomposition mask. regressors_dvars_th Criterion for flagging DVARS outliers regressors_fd_th Criterion for flagging framewise displacement outliers dt: float repetition time name : str Name of workflow (default: ``bold_confs_wf``) **Inputs** bold BOLD image, after the prescribed corrections (STC, HMC and SDC) when available. bold_mask BOLD series mask movpar_file SPM-formatted motion parameters file skip_vols number of non steady state volumes csf_mask csk mask in MNI 2mm space wm_mask wm mask in MNI 2mm space cortical_gm_mask gm mask in MNI 2mm space **Outputs** confounds_file TSV of all aggregated confounds confounds_metadata Confounds metadata dictionary. """ DerivativesDataSink.out_path_base = out_path_base workflow = Workflow(name=name, base_dir=work_dir) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 'movpar_file', 'skip_vols', 'csf_mask', 'wm_mask', 'cortical_gm_mask' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']), name='outputnode') # create tcc mask: fslmaths cortical_gm_mask -dilD -mul -1 -add bold_mask -bin tcc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-dilD -mul -1 -add', args='-bin'), name='tcc_roi') # create acc mask fslmaths wm_mask -add csf_mask acc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-add'), name='acc_roi') # Ensure ROIs don't go off-limits (reduced FoV) csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk') wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk') acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk') tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk') # DVARS dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), name="dvars", mem_gb=mem_gb) # Frame displacement fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) # a/t-Compcor mrg_lbl_cc = pe.Node(niu.Merge(3), name='merge_rois_cc', run_without_submitting=True) tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv', header_prefix='t_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=.05, failure_mode='NaN'), name="tcompcor", mem_gb=mem_gb) acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', save_pre_filter=True, save_metadata=True, mask_names=['combined', 'CSF', 'WM'], merge_method='none', failure_mode='NaN'), name="acompcor", mem_gb=mem_gb) # Set number of components if regressors_all_comps: acompcor.inputs.num_components = 'all' tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present if dt: tcompcor.inputs.repetition_time = dt acompcor.inputs.repetition_time = dt # Global and segment regressors mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True) signals = pe.Node(SignalExtraction( class_labels=["csf", "white_matter", "global_signal"]), name="signals", mem_gb=mem_gb) # Arrange confounds add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]), name="add_dvars_header", mem_gb=0.01, run_without_submitting=True) add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]), name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True) add_motion_headers = pe.Node(AddTSVHeader( columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), name="add_motion_headers", mem_gb=0.01, run_without_submitting=True) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # CompCor metadata tcc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', drop_columns=['mask'], output=None, additional_metadata={'Method': 'tCompCor'}, enforce_case=True), name='tcc_metadata_fmt') acc_metadata_fmt = pe.Node(TSV2JSON( index_column='component', output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) # Expand model to include derivatives and quadratics model_expand = pe.Node( ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), name='model_expansion') # Add spike regressors spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), name='spike_regressors') # Generate reportlet (ROIs) mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True) rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'], generate_report=True), name='rois_plot', mem_gb=mem_gb) ds_report_bold_rois = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='rois', source_file=source_file, suffix='reportlet', keep_dtype=True), name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node(niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True) compcor_plot = pe.Node( CompCorVariancePlot(metadata_sources=['tCompCor', 'aCompCor']), name='compcor_plot') ds_report_compcor = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='compcorvar', source_file=source_file, keep_dtype=True), name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node(ConfoundsCorrelationPlot( reference_column='global_signal', max_dim=70), name='conf_corr_plot') ds_report_conf_corr = pe.Node(DerivativesDataSink(base_directory=out_dir, desc='confoundcorr', source_file=source_file, keep_dtype=True), name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) workflow.connect([ # generate tcc and acc rois (inputnode, tcc_roi, [('cortical_gm_mask', 'in_file'), ('bold_mask', 'in_file2')]), (inputnode, acc_roi, [('wm_mask', 'in_file'), ('csf_mask', 'in_file2')]), # Mask ROIs with bold_mask (inputnode, csf_msk, [('bold_mask', 'in_mask')]), (inputnode, wm_msk, [('bold_mask', 'in_mask')]), (inputnode, acc_msk, [('bold_mask', 'in_mask')]), (inputnode, tcc_msk, [('bold_mask', 'in_mask')]), # connect inputnode to each non-anatomical confound node (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (inputnode, fdisp, [('movpar_file', 'in_file')]), # tCompCor (inputnode, tcompcor, [('bold', 'realigned_file')]), (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]), (tcc_roi, tcc_msk, [('out_file', 'roi_file')]), (tcc_msk, tcompcor, [('out', 'mask_files')]), # aCompCor (inputnode, acompcor, [('bold', 'realigned_file')]), (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]), (acc_roi, acc_msk, [('out_file', 'roi_file')]), (acc_msk, mrg_lbl_cc, [('out', 'in1')]), (inputnode, mrg_lbl_cc, [('csf_mask', 'in2')]), (inputnode, mrg_lbl_cc, [('wm_mask', 'in3')]), (mrg_lbl_cc, acompcor, [('out', 'mask_files')]), # Global signals extraction (constrained by anatomy) (inputnode, signals, [('bold', 'in_file')]), (inputnode, csf_msk, [('csf_mask', 'roi_file')]), (csf_msk, mrg_lbl, [('out', 'in1')]), (inputnode, wm_msk, [('wm_mask', 'roi_file')]), (wm_msk, mrg_lbl, [('out', 'in2')]), (inputnode, mrg_lbl, [('bold_mask', 'in3')]), (mrg_lbl, signals, [('out', 'label_files')]), # Collate computed confounds together (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), (dvars, add_dvars_header, [('out_nstd', 'in_file')]), (dvars, add_std_dvars_header, [('out_std', 'in_file')]), (signals, concat, [('out_file', 'signals')]), (fdisp, concat, [('out_file', 'fd')]), (tcompcor, concat, [('components_file', 'tcompcor'), ('pre_filter_file', 'cos_basis')]), (acompcor, concat, [('components_file', 'acompcor')]), (add_motion_headers, concat, [('out_file', 'motion')]), (add_dvars_header, concat, [('out_file', 'dvars')]), (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes (concat, model_expand, [('confounds_file', 'confounds_file')]), (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (inputnode, rois_plot, [('bold', 'in_file'), ('bold_mask', 'in_mask')]), (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), (acc_msk, mrg_compcor, [('out', 'in2')]), (mrg_compcor, rois_plot, [('out', 'in_rois')]), (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]), (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) return workflow
def init_confound_wf(t1, t1_mask, wm_tpm, csf_tpm, bold, bold_mask, tr, skipvols): ''' Initialize the confound extraction workflow ''' inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold', 'bold_mask', 't1w_mask', 't1w', 'wm_tpm', 'csf_tpm', 'tpms' ]), name='inputnode') inputnode.inputs.bold = bold inputnode.inputs.bold_mask = bold_mask inputnode.inputs.t1w_mask = t1_mask inputnode.inputs.t1w = t1 inputnode.inputs.wm_tpm = wm_tpm inputnode.inputs.csf_tpm = csf_tpm inputnode.inputs.tpms = [wm_tpm, csf_tpm] inputnode.inputs.skip_vols = skipvols # WM Inputs wm_roi = pe.Node(TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), name='wm_roi') wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk') resample_wm_roi = pe.Node(ResampleTPM(), name='resampled_wm_roi') # CSF inputs csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi') csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk') resample_csf_roi = pe.Node(ResampleTPM(), name='resampled_csf_roi') # Nodes for aCompCor merge_label = pe.Node(niu.Merge(2), name='merge_rois', run_without_submitting=True) # Set up aCompCor acc_tpm = pe.Node(AddTPMs(indices=[0, 1]), name='tpms_add_csf_wm') acc_roi = pe.Node(TPM2ROI(erode_prop=0.6, mask_erode_prop=0.6**3), name='acc_roi') resample_acc_roi = pe.Node(ResampleTPM(), name='resampled_acc_roi') acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk') acompcor = pe.Node(ACompCor(components_file='acompcor.tsv', header_prefix='a_comp_cor_', pre_filter='cosine', repetition_time=tr, save_pre_filter=True, save_metadata=True, merge_method='none', mask_names=["combined", "CSF", "WM"], failure_mode="NaN"), name='acompcor') acompcor.inputs.variance_threshold = 0.5 # aCompCor metadata extraction acc_metadata_fmt = pe.Node(TSV2JSON( index_column="component", output=None, additional_metadata={'Method': 'aCompCor'}, enforce_case=True), name='acc_metadata_fmt') acc_meta2json = pe.Node(niu.Function(function=_dict2json, output_names=['out_meta']), name='acc_meta2json') mrg_lbl_cc = pe.Node(niu.Merge(3), name='merge_rois_acc') signals_class_labels = ["white_matter", "csf"] signals = pe.Node(SignalExtraction(class_labels=signals_class_labels), name="signals") # Nodes to join signal extraction and aCompCor components outputnode = pe.Node(niu.IdentityInterface(fields=[ 'signals', 'wm_roi', 'csf_roi', 'acc_roi', 'components_file', 'confounds_file', 'confounds_metadata' ]), name='outputnode') wf = pe.Workflow(name='confound_wf') wf.config['execution']['crashfile_format'] = 'txt' # WM workflow wf.connect([(inputnode, wm_roi, [('wm_tpm', 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, resample_wm_roi, [('bold_mask', 'fixed_file')]), (wm_roi, resample_wm_roi, [('roi_file', 'moving_file')]), (inputnode, wm_msk, [('bold_mask', 'in_mask')]), (resample_wm_roi, wm_msk, [('out_file', 'roi_file')])]) # CSF workflow wf.connect([(inputnode, csf_roi, [('csf_tpm', 'in_tpm'), ('t1w_mask', 'in_mask')]), (inputnode, resample_csf_roi, [('bold_mask', 'fixed_file')]), (csf_roi, resample_csf_roi, [('roi_file', 'moving_file')]), (inputnode, csf_msk, [('bold_mask', 'in_mask')]), (resample_csf_roi, csf_msk, [('out_file', 'roi_file')])]) # Signal extraction workflow wf.connect([(wm_msk, merge_label, [('out', 'in1')]), (csf_msk, merge_label, [('out', 'in2')]), (inputnode, signals, [('bold', 'in_file')]), (merge_label, signals, [('out', 'label_files')]), (signals, outputnode, [('out_file', 'signals')]), (wm_roi, outputnode, [('roi_file', 'wm_roi')]), (csf_roi, outputnode, [('roi_file', 'csf_roi')])]) # ACC workflow wf.connect([ (inputnode, acc_tpm, [('tpms', 'in_files')]), (inputnode, acc_roi, [('t1w_mask', 'in_mask')]), (acc_tpm, acc_roi, [('out_file', 'in_tpm')]), (inputnode, resample_acc_roi, [('bold_mask', 'fixed_file')]), (acc_roi, resample_acc_roi, [('roi_file', 'moving_file')]), (inputnode, acc_msk, [('bold_mask', 'in_mask')]), (resample_acc_roi, acc_msk, [('out_file', 'roi_file')]), (acc_msk, mrg_lbl_cc, [('out', 'in1')]), (csf_msk, mrg_lbl_cc, [('out', 'in2')]), (wm_msk, mrg_lbl_cc, [('out', 'in3')]), (mrg_lbl_cc, acompcor, [('out', 'mask_files')]), (inputnode, acompcor, [('bold', 'realigned_file')]), (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]), (acc_roi, outputnode, [('roi_file', 'acc_roi')]), (acompcor, outputnode, [('components_file', 'components_file')]), (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]), (acc_metadata_fmt, acc_meta2json, [('output', 'in_dict')]), (acc_meta2json, outputnode, [('out_meta', 'confounds_metadata')]) ]) concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) # Join ACC and WM/CSF signal extraction workflow TSVs wf.connect([(signals, concat, [('out_file', 'signals')]), (acompcor, concat, [('components_file', 'acompcor')]), (concat, outputnode, [('confounds_file', 'confounds_file')])]) return wf