示例#1
0
def _applytfms(args):
    """
    Applies ANTs' antsApplyTransforms to the input image.
    All inputs are zipped in one tuple to make it digestible by
    multiprocessing's map
    """
    import nibabel as nb
    from nipype.utils.filemanip import fname_presuffix
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms

    in_file, in_xform, ifargs, index, newpath = args
    out_file = fname_presuffix(in_file,
                               suffix='_xform-%05d' % index,
                               newpath=newpath,
                               use_ext=True)

    copy_dtype = ifargs.pop('copy_dtype', False)
    xfm = ApplyTransforms(input_image=in_file,
                          transforms=in_xform,
                          output_image=out_file,
                          **ifargs)
    xfm.terminal_output = 'allatonce'
    xfm.resource_monitor = False
    runtime = xfm.run().runtime

    if copy_dtype:
        nii = nb.load(out_file, mmap=False)
        in_dtype = nb.load(in_file).get_data_dtype()

        # Overwrite only iff dtypes don't match
        if in_dtype != nii.get_data_dtype():
            nii.set_data_dtype(in_dtype)
            nii.to_filename(out_file)

    return (out_file, runtime.cmdline)
示例#2
0
def init_anat_seg_wf(
    age_months=None,
    anat_modality="T1w",
    template_dir=None,
    sloppy=False,
    omp_nthreads=1,
    name="anat_seg_wf",
):
    """
    Calculate brain tissue segmentations from either:
     A) a collection of manually segmented templates
     B) FSL's FAST
    """

    if anat_modality != "T1w":
        raise NotImplementedError(
            "Only T1w images are currently accepted for the segmentation workflow."
        )

    wf = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=["anat_brain"]),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(fields=["anat_aseg", "anat_dseg", "anat_tpms"]),
        name="outputnode",
    )

    # Coerce segmentation labels to BIDS
    lut_anat_dseg = pe.Node(niu.Function(function=_apply_bids_lut),
                            name="lut_anat_dseg")

    if template_dir is None:
        # Use FSL FAST for segmentations
        anat_dseg = pe.Node(fsl.FAST(segments=True,
                                     no_bias=True,
                                     probability_maps=True),
                            name='anat_dseg',
                            mem_gb=3)
        lut_anat_dseg.inputs.lut = (0, 3, 1, 2
                                    )  # Maps: 0 -> 0, 3 -> 1, 1 -> 2, 2 -> 3.
        fast2bids = pe.Node(niu.Function(function=_probseg_fast2bids),
                            name="fast2bids",
                            run_without_submitting=True)

        wf.connect([
            (inputnode, anat_dseg, [
                ('anat_brain', 'in_files'),
            ]),
            (anat_dseg, lut_anat_dseg, [
                ('partial_volume_map', 'in_dseg'),
            ]),
            (lut_anat_dseg, outputnode, [
                ('out', 'anat_dseg'),
            ]),
            (anat_dseg, fast2bids, [
                ('partial_volume_files', 'inlist'),
            ]),
            (fast2bids, outputnode, [
                ('out', 'anat_tpms'),
            ]),
        ])
        return wf

    # Otherwise, register to templates and run ANTs JointFusion
    lut_anat_dseg.inputs.lut = _aseg_to_three()
    tmpl_anats, tmpl_segs = _parse_segmentation_atlases(
        anat_modality, template_dir)

    # register to templates
    ants_params = "testing" if sloppy else "precise"
    # Register to each subject space
    norm = pe.MapNode(
        Registration(from_file=pkgr_fn(
            "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")),
        name="norm",
        iterfield=["moving_image"],
        n_procs=omp_nthreads,
        mem_gb=DEFAULT_MEMORY_MIN_GB,
    )
    norm.inputs.moving_image = tmpl_anats
    norm.inputs.float = True

    apply_atlas = pe.MapNode(
        ApplyTransforms(
            dimension=3,
            interpolation="NearestNeighbor",
            float=True,
        ),
        iterfield=["transforms", "input_image"],
        name="apply_atlas",
    )
    apply_atlas.inputs.input_image = tmpl_anats

    apply_seg = pe.MapNode(
        ApplyTransforms(dimension=3,
                        interpolation="MultiLabel"),  # NearestNeighbor?
        name="apply_seg",
        iterfield=["transforms", "input_image"],
    )
    apply_seg.inputs.input_image = tmpl_segs

    jointfusion = pe.Node(
        JointFusion(
            dimension=3,
            out_label_fusion="fusion_labels.nii.gz",
            num_threads=omp_nthreads,
        ),
        name="jointfusion",
    )

    jf_label = pe.Node(niu.Function(function=_to_dtype), name="jf_label")

    # split each tissue into individual masks
    split_seg = pe.Node(niu.Function(function=_split_segments),
                        name="split_seg")
    to_list = pe.Node(niu.Function(function=_to_list), name='to_list')

    # fmt: off
    wf.connect([
        (inputnode, norm, [('anat_brain', 'fixed_image')]),
        (norm, apply_atlas, [('forward_transforms', 'transforms')]),
        (inputnode, apply_atlas, [('anat_brain', 'reference_image')]),
        (norm, apply_seg, [('forward_transforms', 'transforms')]),
        (inputnode, apply_seg, [('anat_brain', 'reference_image')]),
        (inputnode, to_list, [('anat_brain', 'in_file')]),
        (to_list, jointfusion, [('out', 'target_image')]),
        (apply_atlas, jointfusion, [('output_image', 'atlas_image')]),
        (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')
                                  ]),
        (jointfusion, jf_label, [('out_label_fusion', 'in_file')]),
        (jf_label, outputnode, [('out', 'anat_aseg')]),
        (jf_label, lut_anat_dseg, [('out', 'in_dseg')]),
        (lut_anat_dseg, outputnode, [('out', 'anat_dseg')]),
        (lut_anat_dseg, split_seg, [('out', 'in_file')]),
        (split_seg, outputnode, [('out', 'anat_tpms')]),
    ])
    # fmt: on

    return wf
示例#3
0
def init_bold_std_trans_wf(freesurfer,
                           mem_gb,
                           omp_nthreads,
                           standard_spaces,
                           name='bold_std_trans_wf',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images into standard space with a single
    resampling of the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from collections import OrderedDict
        from fmriprep.workflows.bold import init_bold_std_trans_wf
        wf = init_bold_std_trans_wf(
            freesurfer=True,
            mem_gb=3,
            omp_nthreads=1,
            standard_spaces=OrderedDict([('MNI152Lin', {}),
                                         ('fsaverage', {'density': '10k'})]),
        )

    **Parameters**

        freesurfer : bool
            Whether to generate FreeSurfer's aseg/aparc segmentations on BOLD space.
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        standard_spaces : OrderedDict
            Ordered dictionary where keys are TemplateFlow ID strings (e.g.,
            ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNI152NLin2009cAsym``, or ``fsLR``),
            or paths pointing to custom templates organized in a TemplateFlow-like structure.
            Values of the dictionary aggregate modifiers (e.g., the value for the key ``MNI152Lin``
            could be ``{'resolution': 2}`` if one wants the resampling to be done on the 2mm
            resolution version of the selected template).
        name : str
            Name of workflow (default: ``bold_std_trans_wf``)
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        anat2std_xfm
            List of anatomical-to-standard space transforms generated during
            spatial normalization.
        bold_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_mask
            Skull-stripping mask of reference image
        bold_split
            Individual 3D volumes, not motion corrected
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        templates
            List of templates that were applied as targets during
            spatial normalization.

    **Outputs** - Two outputnodes are available. One output node (with name ``poutputnode``)
    will be parameterized in a Nipype sense (see `Nipype iterables
    <https://miykael.github.io/nipype_tutorial/notebooks/basic_iteration.html>`__), and a
    second node (``outputnode``) will collapse the parameterized outputs into synchronous
    lists of the following fields:

        bold_std
            BOLD series, resampled to template space
        bold_std_ref
            Reference, contrast-enhanced summary of the BOLD series, resampled to template space
        bold_mask_std
            BOLD series mask in template space
        bold_aseg_std
            FreeSurfer's ``aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        bold_aparc_std
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        templates
            Template identifiers synchronized correspondingly to previously
            described outputs.

    """

    # Filter ``standard_spaces``
    vol_std_spaces = [
        k for k in standard_spaces.keys() if not k.startswith('fs')
    ]

    workflow = Workflow(name=name)

    if len(vol_std_spaces) == 1:
        workflow.__desc__ = """\
The BOLD time-series were resampled into standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=vol_std_spaces)
    else:
        workflow.__desc__ = """\
The BOLD time-series were resampled into several standard spaces,
correspondingly generating the following *spatially-normalized,
preprocessed BOLD runs*: {tpl}.
""".format(tpl=', '.join(vol_std_spaces))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'anat2std_xfm',
        'bold_aparc',
        'bold_aseg',
        'bold_mask',
        'bold_split',
        'fieldwarp',
        'hmc_xforms',
        'itk_bold_to_t1',
        'name_source',
        'templates',
    ]),
                        name='inputnode')

    select_std = pe.Node(KeySelect(fields=['resolution', 'anat2std_xfm']),
                         name='select_std',
                         run_without_submitting=True)

    select_std.inputs.resolution = [
        v.get('resolution') or v.get('res') or 'native'
        for k, v in list(standard_spaces.items()) if k in vol_std_spaces
    ]
    select_std.iterables = ('key', vol_std_spaces)

    select_tpl = pe.Node(niu.Function(function=_select_template),
                         name='select_tpl',
                         run_without_submitting=True)
    select_tpl.inputs.template_specs = standard_spaces

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)

    mask_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_std_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, select_std, [('templates', 'keys'),
                                 ('anat2std_xfm', 'anat2std_xfm')]),
        (inputnode, mask_std_tfm, [('bold_mask', 'input_image')]),
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_merge_tfms, [(('itk_bold_to_t1', _aslist), 'in2')]),
        (select_std, select_tpl, [('key', 'template')]),
        (select_std, mask_merge_tfms, [('anat2std_xfm', 'in1')]),
        (select_std, gen_ref, [(('resolution', _is_native), 'keep_native')]),
        (select_tpl, gen_ref, [('out', 'fixed_image')]),
        (mask_merge_tfms, mask_std_tfm, [('out', 'transforms')]),
        (gen_ref, mask_std_tfm, [('out_file', 'reference_image')]),
    ])

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    bold_to_std_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_std_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(omp_nthreads=omp_nthreads,
                                           pre_mask=True)

    workflow.connect([
        (inputnode, merge_xforms, [(('itk_bold_to_t1', _aslist), 'in2')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_std_transform, [('bold_split', 'input_image')]),
        (select_std, merge_xforms, [('anat2std_xfm', 'in1')]),
        (merge_xforms, bold_to_std_transform, [('out', 'transforms')]),
        (gen_ref, bold_to_std_transform, [('out_file', 'reference_image')]),
        (bold_to_std_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_std_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
    ])

    # Connect output nodes
    output_names = ['bold_std', 'bold_std_ref', 'bold_mask_std', 'templates']
    if freesurfer:
        output_names += ['bold_aseg_std', 'bold_aparc_std']

    # poutputnode - parametric output node
    poutputnode = pe.Node(niu.IdentityInterface(fields=output_names),
                          name='poutputnode')

    workflow.connect([
        (gen_final_ref, poutputnode, [('outputnode.ref_image', 'bold_std_ref')
                                      ]),
        (merge, poutputnode, [('out_file', 'bold_std')]),
        (mask_std_tfm, poutputnode, [('output_image', 'bold_mask_std')]),
        (select_std, poutputnode, [('key', 'templates')]),
    ])

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               float=True),
                               name='aseg_std_tfm',
                               mem_gb=1)
        aparc_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                float=True),
                                name='aparc_std_tfm',
                                mem_gb=1)

        workflow.connect([
            (inputnode, aseg_std_tfm, [('bold_aseg', 'input_image')]),
            (inputnode, aparc_std_tfm, [('bold_aparc', 'input_image')]),
            (select_std, aseg_std_tfm, [('anat2std_xfm', 'transforms')]),
            (select_std, aparc_std_tfm, [('anat2std_xfm', 'transforms')]),
            (gen_ref, aseg_std_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_std_tfm, [('out_file', 'reference_image')]),
            (aseg_std_tfm, poutputnode, [('output_image', 'bold_aseg_std')]),
            (aparc_std_tfm, poutputnode, [('output_image', 'bold_aparc_std')]),
        ])

    # Connect outputnode to the parameterized outputnode
    outputnode = pe.JoinNode(niu.IdentityInterface(fields=output_names),
                             name='outputnode',
                             joinsource='select_std')
    workflow.connect([(poutputnode, outputnode, [(f, f)
                                                 for f in output_names])])

    return workflow
示例#4
0
def init_func_preproc_wf(bold_file, ignore, freesurfer,
                         use_bbr, t2s_coreg, bold2t1w_dof, reportlets_dir,
                         output_spaces, template, output_dir, omp_nthreads,
                         fmap_bspline, fmap_demean, use_syn, force_syn,
                         use_aroma, ignore_aroma_err, aroma_melodic_dim,
                         medial_surface_nan, cifti_output,
                         debug, low_mem, template_out_grid, layout=None):
    """
    This workflow controls the functional preprocessing stages of FMRIPREP.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold import init_func_preproc_wf
        wf = init_func_preproc_wf('/completely/made/up/path/sub-01_task-nback_bold.nii.gz',
                                  omp_nthreads=1,
                                  ignore=[],
                                  freesurfer=True,
                                  reportlets_dir='.',
                                  output_dir='.',
                                  template='MNI152NLin2009cAsym',
                                  output_spaces=['T1w', 'fsnative',
                                                 'template', 'fsaverage5'],
                                  debug=False,
                                  use_bbr=True,
                                  t2s_coreg=False,
                                  bold2t1w_dof=9,
                                  fmap_bspline=True,
                                  fmap_demean=True,
                                  use_syn=True,
                                  force_syn=True,
                                  low_mem=False,
                                  template_out_grid='native',
                                  medial_surface_nan=False,
                                  cifti_output=False,
                                  use_aroma=False,
                                  ignore_aroma_err=False,
                                  aroma_melodic_dim=None)

    **Parameters**

        bold_file : str
            BOLD series NIfTI file
        ignore : list
            Preprocessing steps to skip (may include "slicetiming", "fieldmaps")
        freesurfer : bool
            Enable FreeSurfer functional registration (bbregister) and resampling
            BOLD series to FreeSurfer surface meshes.
        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        t2s_coreg : bool
            Use multiple BOLD echos to create T2*-map for T2*-driven coregistration
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        reportlets_dir : str
            Directory in which to save reportlets
        output_spaces : list
            List of output spaces functional images are to be resampled to.
            Some parts of pipeline will only be instantiated for some output spaces.

            Valid spaces:

                - T1w
                - template
                - fsnative
                - fsaverage (or other pre-existing FreeSurfer templates)
        template : str
            Name of template targeted by ``template`` output space
        output_dir : str
            Directory in which to save derivatives
        omp_nthreads : int
            Maximum number of threads an individual process may use
        fmap_bspline : bool
            **Experimental**: Fit B-Spline field using least-squares
        fmap_demean : bool
            Demean voxel-shift map during unwarp
        use_syn : bool
            **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC).
            If fieldmaps are present and enabled, this is not run, by default.
        force_syn : bool
            **Temporary**: Always run SyN-based SDC
        use_aroma : bool
            Perform ICA-AROMA on MNI-resampled functional series
        ignore_aroma_err : bool
            Do not fail on ICA-AROMA errors
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files
        cifti_output : bool
            Generate bold CIFTI file in output spaces
        debug : bool
            Enable debugging outputs
        low_mem : bool
            Write uncompressed .nii files in some cases to reduce memory usage
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization
        layout : BIDSLayout
            BIDSLayout structure to enable metadata retrieval

    **Inputs**

        bold_file
            BOLD series NIfTI file
        t1_preproc
            Bias-corrected structural template image
        t1_brain
            Skull-stripped ``t1_preproc``
        t1_mask
            Mask of the skull-stripped template image
        t1_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        t1_tpms
            List of tissue probability maps in T1w space
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        t1_2_mni_reverse_transform
            ANTs-compatible affine-and-warp transform file (inverse)
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
        t1_2_fsnative_reverse_transform
            LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w


    **Outputs**

        bold_t1
            BOLD series, resampled to T1w space
        bold_mask_t1
            BOLD series mask in T1w space
        bold_mni
            BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space
        confounds
            TSV of confounds
        surfaces
            BOLD series, resampled to FreeSurfer surfaces
        aroma_noise_ics
            Noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        bold_cifti
            BOLD CIFTI image
        cifti_variant
            combination of target spaces for `bold_cifti`


    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.util.init_bold_reference_wf`
        * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf`
        * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf`
        * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf`
        * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_mni_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.pepolar.init_pepolar_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_fmap_estimator_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_sdc_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_nonlinear_sdc_wf`

    """
    from ..fieldmap.base import init_sdc_wf  # Avoid circular dependency (#1066)

    ref_file = bold_file
    mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1}
    bold_tlen = 10
    multiecho = isinstance(bold_file, list)

    if multiecho:
        tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file]
        ref_file = dict(zip(tes, bold_file))[min(tes)]

    if os.path.isfile(ref_file):
        bold_tlen, mem_gb = _create_mem_gb(ref_file)

    wf_name = _get_wf_name(ref_file)
    LOGGER.log(25, ('Creating bold processing workflow for "%s" (%.2f GB / %d TRs). '
                    'Memory resampled/largemem=%.2f/%.2f GB.'),
               ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem'])

    # For doc building purposes
    if layout is None or bold_file == 'bold_preprocesing':
        LOGGER.log(25, 'No valid layout: building empty workflow.')
        metadata = {
            'RepetitionTime': 2.0,
            'SliceTiming': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
            'PhaseEncodingDirection': 'j',
        }
        fmaps = [{
            'type': 'phasediff',
            'phasediff': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_phasediff.nii.gz',
            'magnitude1': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude1.nii.gz',
            'magnitude2': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude2.nii.gz',
        }]
        run_stc = True
        multiecho = False
    else:
        metadata = layout.get_metadata(ref_file)

        # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn)
        fmaps = []
        if 'fieldmaps' not in ignore:
            fmaps = layout.get_fieldmap(ref_file, return_list=True)
            for fmap in fmaps:
                fmap['metadata'] = layout.get_metadata(fmap[fmap['type']])

        # Run SyN if forced or in the absence of fieldmap correction
        if force_syn or (use_syn and not fmaps):
            fmaps.append({'type': 'syn'})

        # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort'
        run_stc = ("SliceTiming" in metadata and
                   'slicetiming' not in ignore and
                   (_get_series_len(ref_file) > 4 or "TooShort"))

    # Use T2* as target for ME-EPI in co-registration
    if t2s_coreg and not multiecho:
        LOGGER.warning("No multiecho BOLD images found for T2* coregistration. "
                       "Using standard EPI-T1 coregistration.")
        t2s_coreg = False

    # Switch stc off
    if multiecho and run_stc is True:
        LOGGER.warning('Slice-timing correction is not available for '
                       'multiecho BOLD data (not implemented).')
        run_stc = False

    # Build workflow
    workflow = pe.Workflow(name=wf_name)
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bold_file', 'subjects_dir', 'subject_id',
                't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms',
                't1_aseg', 't1_aparc',
                't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform']),
        name='inputnode')
    inputnode.inputs.bold_file = bold_file

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bold_t1', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'cifti_variant',
                'bold_mni', 'bold_mask_mni', 'bold_cifti', 'confounds', 'surfaces',
                't2s_map', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file',
                'cifti_variant_key']),
        name='outputnode')

    # BOLD buffer: an identity used as a pointer to either the original BOLD
    # or the STC'ed one for further use.
    boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer')

    summary = pe.Node(
        FunctionalSummary(output_spaces=output_spaces,
                          slice_timing=run_stc,
                          registration='FreeSurfer' if freesurfer else 'FSL',
                          registration_dof=bold2t1w_dof,
                          pe_direction=metadata.get("PhaseEncodingDirection")),
        name='summary', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True)

    func_derivatives_wf = init_func_derivatives_wf(output_dir=output_dir,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   freesurfer=freesurfer,
                                                   use_aroma=use_aroma,
                                                   cifti_output=cifti_output)

    workflow.connect([
        (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]),
        (outputnode, func_derivatives_wf, [
            ('bold_t1', 'inputnode.bold_t1'),
            ('bold_aseg_t1', 'inputnode.bold_aseg_t1'),
            ('bold_aparc_t1', 'inputnode.bold_aparc_t1'),
            ('bold_mask_t1', 'inputnode.bold_mask_t1'),
            ('bold_mni', 'inputnode.bold_mni'),
            ('bold_mask_mni', 'inputnode.bold_mask_mni'),
            ('confounds', 'inputnode.confounds'),
            ('surfaces', 'inputnode.surfaces'),
            ('aroma_noise_ics', 'inputnode.aroma_noise_ics'),
            ('melodic_mix', 'inputnode.melodic_mix'),
            ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'),
            ('bold_cifti', 'inputnode.bold_cifti'),
            ('cifti_variant', 'inputnode.cifti_variant'),
            ('cifti_variant_key', 'inputnode.cifti_variant_key')
        ]),
    ])

    # The first reference uses T2 contrast enhancement
    bold_reference_wf = init_bold_reference_wf(
        omp_nthreads=omp_nthreads, enhance_t2=True)

    # Top-level BOLD splitter
    bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split',
                         mem_gb=mem_gb['filesize'] * 3)

    # HMC on the BOLD
    bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf',
                                   mem_gb=mem_gb['filesize'],
                                   omp_nthreads=omp_nthreads)

    # mean BOLD registration to T1w
    bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf',
                                   freesurfer=freesurfer,
                                   use_bbr=use_bbr,
                                   bold2t1w_dof=bold2t1w_dof,
                                   mem_gb=mem_gb['resampled'],
                                   omp_nthreads=omp_nthreads,
                                   use_compression=False,
                                   use_fieldwarp=(fmaps is not None or use_syn))

    # get confounds
    bold_confounds_wf = init_bold_confs_wf(
        mem_gb=mem_gb['largemem'],
        metadata=metadata,
        name='bold_confounds_wf')
    bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False]

    # Apply transforms in 1 shot
    # Only use uncompressed output if AROMA is to be run
    bold_bold_trans_wf = init_bold_preproc_trans_wf(
        mem_gb=mem_gb['resampled'],
        omp_nthreads=omp_nthreads,
        use_compression=not low_mem,
        use_fieldwarp=(fmaps is not None or use_syn),
        name='bold_bold_trans_wf'
    )

    # SLICE-TIME CORRECTION (or bypass) #############################################
    if run_stc is True:  # bool('TooShort') == True, so check True explicitly
        bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata)
        workflow.connect([
            (bold_reference_wf, bold_stc_wf, [('outputnode.bold_file', 'inputnode.bold_file'),
                                              ('outputnode.skip_vols', 'inputnode.skip_vols')]),
            (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]),
        ])
    else:  # bypass STC from original BOLD to the splitter through boldbuffer
        workflow.connect([
            (bold_reference_wf, boldbuffer, [
                ('outputnode.bold_file', 'bold_file')]),
        ])

    # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ##########################
    bold_sdc_wf = init_sdc_wf(
        fmaps, metadata, omp_nthreads=omp_nthreads,
        debug=debug, fmap_demean=fmap_demean, fmap_bspline=fmap_bspline)
    bold_sdc_wf.inputs.inputnode.template = template

    if not fmaps:
        LOGGER.warning('SDC: no fieldmaps found or they were ignored (%s).',
                       ref_file)
    elif fmaps[0]['type'] == 'syn':
        LOGGER.warning(
            'SDC: no fieldmaps found or they were ignored. '
            'Using EXPERIMENTAL "fieldmap-less SyN" correction '
            'for dataset %s.', ref_file)
    else:
        LOGGER.log(25, 'SDC: fieldmap estimation of type "%s" intended for %s found.',
                   fmaps[0]['type'], ref_file)

    # MAIN WORKFLOW STRUCTURE #######################################################
    workflow.connect([
        # Generate early reference
        (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]),
        # BOLD buffer has slice-time corrected if it was run, original otherwise
        (boldbuffer, bold_split, [('bold_file', 'in_file')]),
        # HMC
        (bold_reference_wf, bold_hmc_wf, [
            ('outputnode.raw_ref_image', 'inputnode.raw_ref_image'),
            ('outputnode.bold_file', 'inputnode.bold_file')]),
        # EPI-T1 registration workflow
        (inputnode, bold_reg_wf, [
            ('bold_file', 'inputnode.name_source'),
            ('t1_preproc', 'inputnode.t1_preproc'),
            ('t1_brain', 'inputnode.t1_brain'),
            ('t1_mask', 'inputnode.t1_mask'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('t1_aseg', 'inputnode.t1_aseg'),
            ('t1_aparc', 'inputnode.t1_aparc'),
            # Undefined if --no-freesurfer, but this is safe
            ('subjects_dir', 'inputnode.subjects_dir'),
            ('subject_id', 'inputnode.subject_id'),
            ('t1_2_fsnative_reverse_transform', 'inputnode.t1_2_fsnative_reverse_transform')]),
        (bold_split, bold_reg_wf, [('out_files', 'inputnode.bold_split')]),
        (bold_hmc_wf, bold_reg_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]),
        (bold_reg_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'),
                                   ('outputnode.bold_aseg_t1', 'bold_aseg_t1'),
                                   ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]),
        (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]),
        # SDC (or pass-through workflow)
        (inputnode, bold_sdc_wf, [
            ('t1_brain', 'inputnode.t1_brain'),
            ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform')]),
        (bold_reference_wf, bold_sdc_wf, [
            ('outputnode.ref_image', 'inputnode.bold_ref'),
            ('outputnode.ref_image_brain', 'inputnode.bold_ref_brain'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, bold_reg_wf, [
            ('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
            ('outputnode.bold_mask', 'inputnode.ref_bold_mask'),
            ('outputnode.out_warp', 'inputnode.fieldwarp')]),
        (bold_sdc_wf, bold_bold_trans_wf, [
            ('outputnode.out_warp', 'inputnode.fieldwarp'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction')]),
        # Connect bold_confounds_wf
        (inputnode, bold_confounds_wf, [('t1_tpms', 'inputnode.t1_tpms'),
                                        ('t1_mask', 'inputnode.t1_mask')]),
        (bold_hmc_wf, bold_confounds_wf, [
            ('outputnode.movpar_file', 'inputnode.movpar_file')]),
        (bold_reg_wf, bold_confounds_wf, [
            ('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_file', 'confounds'),
        ]),
        # Connect bold_bold_trans_wf
        (inputnode, bold_bold_trans_wf, [
            ('bold_file', 'inputnode.name_source')]),
        (bold_split, bold_bold_trans_wf, [
            ('out_files', 'inputnode.bold_file')]),
        (bold_hmc_wf, bold_bold_trans_wf, [
            ('outputnode.xforms', 'inputnode.hmc_xforms')]),
        (bold_bold_trans_wf, bold_confounds_wf, [
            ('outputnode.bold', 'inputnode.bold'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        # Summary
        (outputnode, summary, [('confounds', 'confounds_file')]),
    ])

    if fmaps:
        from ..fieldmap.unwarp import init_fmap_unwarp_report_wf
        sdc_type = fmaps[0]['type']

        # Report on BOLD correction
        fmap_unwarp_report_wf = init_fmap_unwarp_report_wf(
            suffix='sdc_%s' % sdc_type)
        workflow.connect([
            (inputnode, fmap_unwarp_report_wf, [
                ('t1_seg', 'inputnode.in_seg')]),
            (bold_reference_wf, fmap_unwarp_report_wf, [
                ('outputnode.ref_image', 'inputnode.in_pre')]),
            (bold_reg_wf, fmap_unwarp_report_wf, [
                ('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
            (bold_sdc_wf, fmap_unwarp_report_wf, [
                ('outputnode.bold_ref', 'inputnode.in_post')]),
        ])

        if force_syn and sdc_type != 'syn':
            syn_unwarp_report_wf = init_fmap_unwarp_report_wf(
                suffix='forcedsyn', name='syn_unwarp_report_wf')
            workflow.connect([
                (inputnode, syn_unwarp_report_wf, [
                    ('t1_seg', 'inputnode.in_seg')]),
                (bold_reference_wf, syn_unwarp_report_wf, [
                    ('outputnode.ref_image', 'inputnode.in_pre')]),
                (bold_reg_wf, syn_unwarp_report_wf, [
                    ('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
                (bold_sdc_wf, syn_unwarp_report_wf, [
                    ('outputnode.syn_bold_ref', 'inputnode.in_post')]),
            ])

    # if multiecho data, select first echo for hmc correction
    if multiecho:
        inputnode.iterables = ('bold_file', bold_file)

        me_first_echo = pe.Node(FirstEcho(
            te_list=tes, in_files=bold_file, ref_imgs=bold_file),
            name='me_first_echo')
        # Replace reference with the echo selected with FirstEcho
        workflow.disconnect([
            (inputnode, bold_reference_wf, [
                ('bold_file', 'inputnode.bold_file')]),
            (bold_reference_wf, boldbuffer, [
                ('outputnode.bold_file', 'bold_file')]),
        ])
        workflow.connect([
            (me_first_echo, bold_reference_wf, [
                ('first_image', 'inputnode.bold_file')]),
            (inputnode, boldbuffer, [
                ('bold_file', 'bold_file')]),
        ])

        if t2s_coreg:
            # create a T2* map
            bold_t2s_wf = init_bold_t2s_wf(bold_echos=bold_file,
                                           echo_times=tes,
                                           mem_gb=mem_gb['resampled'],
                                           omp_nthreads=omp_nthreads,
                                           name='bold_t2s_wf')
            bold_t2s_wf.inputs.inputnode.name_source = ref_file

            # Replace EPI-to-T1w registration inputs
            workflow.disconnect([
                (bold_sdc_wf, bold_reg_wf, [
                    ('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
                    ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]),
            ])
            workflow.connect([
                (bold_hmc_wf, bold_t2s_wf, [
                    ('outputnode.xforms', 'inputnode.hmc_xforms')]),
                (bold_t2s_wf, bold_reg_wf, [
                    ('outputnode.t2s_map', 'inputnode.ref_bold_brain'),
                    ('outputnode.oc_mask', 'inputnode.ref_bold_mask')]),
            ])

    # Map final BOLD mask into T1w space (if required)
    if 'T1w' in output_spaces:
        from niworkflows.interfaces.fixes import (
            FixHeaderApplyTransforms as ApplyTransforms
        )

        boldmask_to_t1w = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True),
            name='boldmask_to_t1w', mem_gb=0.1
        )
        workflow.connect([
            (bold_bold_trans_wf, boldmask_to_t1w, [
                ('outputnode.bold_mask', 'input_image')]),
            (bold_reg_wf, boldmask_to_t1w, [
                ('outputnode.bold_mask_t1', 'reference_image'),
                ('outputnode.itk_bold_to_t1', 'transforms')]),
            (boldmask_to_t1w, outputnode, [
                ('output_image', 'bold_mask_t1')]),
        ])

    if 'template' in output_spaces:
        # Apply transforms in 1 shot
        # Only use uncompressed output if AROMA is to be run
        bold_mni_trans_wf = init_bold_mni_trans_wf(
            template=template,
            mem_gb=mem_gb['resampled'],
            omp_nthreads=omp_nthreads,
            template_out_grid=template_out_grid,
            use_compression=not low_mem,
            use_fieldwarp=fmaps is not None,
            name='bold_mni_trans_wf'
        )
        carpetplot_wf = init_carpetplot_wf(
            mem_gb=mem_gb['resampled'],
            metadata=metadata,
            name='carpetplot_wf')

        workflow.connect([
            (inputnode, bold_mni_trans_wf, [
                ('bold_file', 'inputnode.name_source'),
                ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform')]),
            (bold_split, bold_mni_trans_wf, [
                ('out_files', 'inputnode.bold_split')]),
            (bold_hmc_wf, bold_mni_trans_wf, [
                ('outputnode.xforms', 'inputnode.hmc_xforms')]),
            (bold_reg_wf, bold_mni_trans_wf, [
                ('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]),
            (bold_bold_trans_wf, bold_mni_trans_wf, [
                ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_sdc_wf, bold_mni_trans_wf, [
                ('outputnode.out_warp', 'inputnode.fieldwarp')]),
            (bold_mni_trans_wf, outputnode, [('outputnode.bold_mni', 'bold_mni'),
                                             ('outputnode.bold_mask_mni', 'bold_mask_mni')]),
            (bold_bold_trans_wf, carpetplot_wf, [
                ('outputnode.bold', 'inputnode.bold'),
                ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (inputnode, carpetplot_wf, [
                ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform')]),
            (bold_reg_wf, carpetplot_wf, [
                ('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]),
            (bold_confounds_wf, carpetplot_wf, [
                ('outputnode.confounds_file', 'inputnode.confounds_file')]),
        ])

        if use_aroma:
            # ICA-AROMA workflow
            # Internally resamples to MNI152 Linear (2006)
            from .confounds import init_ica_aroma_wf
            from ...interfaces import JoinTSVColumns

            ica_aroma_wf = init_ica_aroma_wf(
                template=template,
                metadata=metadata,
                mem_gb=mem_gb['resampled'],
                omp_nthreads=omp_nthreads,
                use_fieldwarp=fmaps is not None,
                ignore_aroma_err=ignore_aroma_err,
                aroma_melodic_dim=aroma_melodic_dim,
                name='ica_aroma_wf')

            join = pe.Node(JoinTSVColumns(), name='aroma_confounds')

            workflow.disconnect([
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_file', 'confounds'),
                ]),
            ])
            workflow.connect([
                (inputnode, ica_aroma_wf, [
                    ('bold_file', 'inputnode.name_source'),
                    ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform')]),
                (bold_split, ica_aroma_wf, [
                    ('out_files', 'inputnode.bold_split')]),
                (bold_hmc_wf, ica_aroma_wf, [
                    ('outputnode.movpar_file', 'inputnode.movpar_file'),
                    ('outputnode.xforms', 'inputnode.hmc_xforms')]),
                (bold_reg_wf, ica_aroma_wf, [
                    ('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]),
                (bold_bold_trans_wf, ica_aroma_wf, [
                    ('outputnode.bold_mask', 'inputnode.bold_mask')]),
                (bold_sdc_wf, ica_aroma_wf, [
                    ('outputnode.out_warp', 'inputnode.fieldwarp')]),
                (bold_confounds_wf, join, [
                    ('outputnode.confounds_file', 'in_file')]),
                (ica_aroma_wf, join,
                    [('outputnode.aroma_confounds', 'join_file')]),
                (ica_aroma_wf, outputnode,
                    [('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                     ('outputnode.melodic_mix', 'melodic_mix'),
                     ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')]),
                (join, outputnode, [('out_file', 'confounds')]),
            ])

    # SURFACES ##################################################################################
    if freesurfer and any(space.startswith('fs') for space in output_spaces):
        LOGGER.log(25, 'Creating BOLD surface-sampling workflow.')
        bold_surf_wf = init_bold_surf_wf(mem_gb=mem_gb['resampled'],
                                         output_spaces=output_spaces,
                                         medial_surface_nan=medial_surface_nan,
                                         name='bold_surf_wf')
        workflow.connect([
            (inputnode, bold_surf_wf, [
                ('t1_preproc', 'inputnode.t1_preproc'),
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id'),
                ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform')]),
            (bold_reg_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]),
            (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        ])

        # CIFTI output
        if cifti_output and 'template' in output_spaces:
            gen_cifti = pe.MapNode(GenerateCifti(), iterfield=["surface_target", "gifti_files"],
                                   name="gen_cifti")
            gen_cifti.inputs.TR = metadata.get("RepetitionTime")

            workflow.connect([
                (bold_surf_wf, gen_cifti, [
                    ('targets.out', 'surface_target'),
                    ('outputnode.surfaces', 'gifti_files')]),
                (inputnode, gen_cifti, [('subjects_dir', 'subjects_dir')]),
                (bold_mni_trans_wf, gen_cifti, [('outputnode.bold_mni', 'bold_file')]),
                (gen_cifti, outputnode, [('out_file', 'bold_cifti'),
                                         ('variant', 'cifti_variant'),
                                         ('variant_key', 'cifti_variant_key')]),
            ])

    # REPORTING ############################################################
    ds_report_summary = pe.Node(
        DerivativesDataSink(suffix='summary'),
        name='ds_report_summary', run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB)

    ds_report_validation = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='validation'),
        name='ds_report_validation', run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (summary, ds_report_summary, [('out_report', 'in_file')]),
        (bold_reference_wf, ds_report_validation, [
            ('outputnode.validation_report', 'in_file')]),
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = ref_file

    return workflow
def init_bold_t1_trans_wf(freesurfer,
                          mem_gb,
                          omp_nthreads,
                          use_compression=True,
                          name='bold_t1_trans_wf'):
    """
    Co-register the reference BOLD image to T1w-space.

    The workflow uses :abbr:`BBR (boundary-based registration)`.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.bold.registration import init_bold_t1_trans_wf
            wf = init_bold_t1_trans_wf(freesurfer=True,
                                       mem_gb=3,
                                       omp_nthreads=1)

    Parameters
    ----------
    freesurfer : :obj:`bool`
        Enable FreeSurfer functional registration (bbregister)
    mem_gb : :obj:`float`
        Size of BOLD file in GB
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    use_compression : :obj:`bool`
        Save registered BOLD series as ``.nii.gz``
    name : :obj:`str`
        Name of workflow (default: ``bold_reg_wf``)

    Inputs
    ------
    name_source
        BOLD series NIfTI file
        Used to recover original information lost during processing
    ref_bold_brain
        Reference image to which BOLD series is aligned
        If ``fieldwarp == True``, ``ref_bold_brain`` should be unwarped
    ref_bold_mask
        Skull-stripping mask of reference image
    t1w_brain
        Skull-stripped bias-corrected structural template image
    t1w_mask
        Mask of the skull-stripped template image
    t1w_aseg
        FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    t1w_aparc
        FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    bold_split
        Individual 3D BOLD volumes, not motion corrected
    hmc_xforms
        List of affine transforms aligning each volume to ``ref_image`` in ITK format
    itk_bold_to_t1
        Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
    fieldwarp
        a :abbr:`DFM (displacements field map)` in ITK format

    Outputs
    -------
    bold_t1
        Motion-corrected BOLD series in T1 space
    bold_t1_ref
        Reference, contrast-enhanced summary of the motion-corrected BOLD series in T1w space
    bold_mask_t1
        BOLD mask in T1 space
    bold_aseg_t1
        FreeSurfer's ``aseg.mgz`` atlas, in T1w-space at the BOLD resolution
        (only if ``recon-all`` was run).
    bold_aparc_t1
        FreeSurfer's ``aparc+aseg.mgz`` atlas, in T1w-space at the BOLD resolution
        (only if ``recon-all`` was run).

    See also
    --------
      * :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`
      * :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.func.util import init_bold_reference_wf
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.itk import MultiApplyTransforms
    from niworkflows.interfaces.nilearn import Merge
    from niworkflows.interfaces.utils import GenerateSamplingReference

    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'ref_bold_brain', 'ref_bold_mask', 't1w_brain',
        't1w_mask', 't1w_aseg', 't1w_aparc', 'bold_split', 'fieldwarp',
        'hmc_xforms', 'itk_bold_to_t1'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1',
        'bold_aparc_t1'
    ]),
                         name='outputnode')

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB

    mask_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                           name='mask_t1w_tfm',
                           mem_gb=0.1)

    workflow.connect([
        (inputnode, gen_ref, [('ref_bold_brain', 'moving_image'),
                              ('t1w_brain', 'fixed_image'),
                              ('t1w_mask', 'fov_mask')]),
        (inputnode, mask_t1w_tfm, [('ref_bold_mask', 'input_image')]),
        (gen_ref, mask_t1w_tfm, [('out_file', 'reference_image')]),
        (inputnode, mask_t1w_tfm, [('itk_bold_to_t1', 'transforms')]),
        (mask_t1w_tfm, outputnode, [('output_image', 'bold_mask_t1')]),
    ])

    if freesurfer:
        # Resample aseg and aparc in T1w space (no transforms needed)
        aseg_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               transforms='identity'),
                               name='aseg_t1w_tfm',
                               mem_gb=0.1)
        aparc_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                transforms='identity'),
                                name='aparc_t1w_tfm',
                                mem_gb=0.1)

        workflow.connect([
            (inputnode, aseg_t1w_tfm, [('t1w_aseg', 'input_image')]),
            (inputnode, aparc_t1w_tfm, [('t1w_aparc', 'input_image')]),
            (gen_ref, aseg_t1w_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_t1w_tfm, [('out_file', 'reference_image')]),
            (aseg_t1w_tfm, outputnode, [('output_image', 'bold_aseg_t1')]),
            (aparc_t1w_tfm, outputnode, [('output_image', 'bold_aparc_t1')]),
        ])

    bold_to_t1w_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_t1w_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    # merge 3D volumes into 4D timeseries
    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(omp_nthreads, pre_mask=True)

    # Merge transforms placing the head motion correction last
    merge_xforms = pe.Node(niu.Merge(3),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, merge, [('name_source', 'header_source')]),
        (
            inputnode,
            merge_xforms,
            [
                ('hmc_xforms',
                 'in3'),  # May be 'identity' if HMC already applied
                ('fieldwarp',
                 'in2'),  # May be 'identity' if SDC already applied
                ('itk_bold_to_t1', 'in1')
            ]),
        (inputnode, bold_to_t1w_transform, [('bold_split', 'input_image')]),
        (merge_xforms, bold_to_t1w_transform, [('out', 'transforms')]),
        (gen_ref, bold_to_t1w_transform, [('out_file', 'reference_image')]),
        (bold_to_t1w_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_t1w_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
        (merge, outputnode, [('out_file', 'bold_t1')]),
        (gen_final_ref, outputnode, [('outputnode.ref_image', 'bold_t1_ref')]),
    ])

    return workflow
示例#6
0
def init_func_preproc_wf(bold_file):
    """
    This workflow controls the functional preprocessing stages of *fMRIPrep*.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.tests import mock_config
            from fmriprep import config
            from fmriprep.workflows.bold.base import init_func_preproc_wf
            with mock_config():
                bold_file = config.execution.bids_dir / 'sub-01' / 'func' \
                    / 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz'
                wf = init_func_preproc_wf(str(bold_file))

    Parameters
    ----------
    bold_file
        BOLD series NIfTI file

    Inputs
    ------
    bold_file
        BOLD series NIfTI file
    t1w_preproc
        Bias-corrected structural template image
    t1w_mask
        Mask of the skull-stripped template image
    t1w_dseg
        Segmentation of preprocessed structural image, including
        gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
    t1w_asec
        Segmentation of structural image, done with FreeSurfer.
    t1w_aparc
        Parcellation of structural image, done with FreeSurfer.
    t1w_tpms
        List of tissue probability maps in T1w space
    template
        List of templates to target
    anat2std_xfm
        List of transform files, collated with templates
    std2anat_xfm
        List of inverse transform files, collated with templates
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
    fsnative2t1w_xfm
        LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w

    Outputs
    -------
    bold_t1
        BOLD series, resampled to T1w space
    bold_mask_t1
        BOLD series mask in T1w space
    bold_std
        BOLD series, resampled to template space
    bold_mask_std
        BOLD series mask in template space
    confounds
        TSV of confounds
    surfaces
        BOLD series, resampled to FreeSurfer surfaces
    aroma_noise_ics
        Noise components identified by ICA-AROMA
    melodic_mix
        FSL MELODIC mixing matrix
    bold_cifti
        BOLD CIFTI image
    cifti_variant
        combination of target spaces for `bold_cifti`

    See Also
    --------

    * :py:func:`~niworkflows.func.util.init_bold_reference_wf`
    * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf`
    * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf`
    * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf`
    * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf`
    * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf`
    * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf`
    * :py:func:`~sdcflows.workflows.fmap.init_fmap_wf`
    * :py:func:`~sdcflows.workflows.pepolar.init_pepolar_unwarp_wf`
    * :py:func:`~sdcflows.workflows.phdiff.init_phdiff_wf`
    * :py:func:`~sdcflows.workflows.syn.init_syn_sdc_wf`
    * :py:func:`~sdcflows.workflows.unwarp.init_sdc_unwarp_wf`

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.func.util import init_bold_reference_wf
    from niworkflows.interfaces.nibabel import ApplyMask
    from niworkflows.interfaces.utility import KeySelect
    from niworkflows.interfaces.utils import DictMerge
    from sdcflows.workflows.base import init_sdc_estimate_wf, fieldmap_wrangler

    ref_file = bold_file
    mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1}
    bold_tlen = 10
    multiecho = isinstance(bold_file, list)

    # Have some options handy
    layout = config.execution.layout
    omp_nthreads = config.nipype.omp_nthreads
    freesurfer = config.workflow.run_reconall
    spaces = config.workflow.spaces

    if multiecho:
        tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file]
        ref_file = dict(zip(tes, bold_file))[min(tes)]

    if os.path.isfile(ref_file):
        bold_tlen, mem_gb = _create_mem_gb(ref_file)

    wf_name = _get_wf_name(ref_file)
    config.loggers.workflow.debug(
        'Creating bold processing workflow for "%s" (%.2f GB / %d TRs). '
        'Memory resampled/largemem=%.2f/%.2f GB.', ref_file,
        mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem'])

    sbref_file = None
    # Find associated sbref, if possible
    entities = layout.parse_file_entities(ref_file)
    entities['suffix'] = 'sbref'
    entities['extension'] = ['nii', 'nii.gz']  # Overwrite extensions
    files = layout.get(return_type='file', **entities)
    refbase = os.path.basename(ref_file)
    if 'sbref' in config.workflow.ignore:
        config.loggers.workflow.info("Single-band reference files ignored.")
    elif files and multiecho:
        config.loggers.workflow.warning(
            "Single-band reference found, but not supported in "
            "multi-echo workflows at this time. Ignoring.")
    elif files:
        sbref_file = files[0]
        sbbase = os.path.basename(sbref_file)
        if len(files) > 1:
            config.loggers.workflow.warning(
                "Multiple single-band reference files found for {}; using "
                "{}".format(refbase, sbbase))
        else:
            config.loggers.workflow.info(
                "Using single-band reference file %s.", sbbase)
    else:
        config.loggers.workflow.info("No single-band-reference found for %s.",
                                     refbase)

    metadata = layout.get_metadata(ref_file)

    # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn)
    fmaps = None
    if 'fieldmaps' not in config.workflow.ignore:
        fmaps = fieldmap_wrangler(layout,
                                  ref_file,
                                  use_syn=config.workflow.use_syn,
                                  force_syn=config.workflow.force_syn)
    elif config.workflow.use_syn or config.workflow.force_syn:
        # If fieldmaps are not enabled, activate SyN-SDC in unforced (False) mode
        fmaps = {'syn': False}

    # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort'
    run_stc = (bool(metadata.get("SliceTiming"))
               and 'slicetiming' not in config.workflow.ignore
               and (_get_series_len(ref_file) > 4 or "TooShort"))

    # Check if MEEPI for T2* coregistration target
    if config.workflow.t2s_coreg and not multiecho:
        config.loggers.workflow.warning(
            "No multiecho BOLD images found for T2* coregistration. "
            "Using standard EPI-T1 coregistration.")
        config.workflow.t2s_coreg = False

    # By default, force-bbr for t2s_coreg unless user specifies otherwise
    if config.workflow.t2s_coreg and config.workflow.use_bbr is None:
        config.workflow.use_bbr = True

    # Build workflow
    workflow = Workflow(name=wf_name)
    workflow.__postdesc__ = """\
All resamplings can be performed with *a single interpolation
step* by composing all the pertinent transformations (i.e. head-motion
transform matrices, susceptibility distortion correction when available,
and co-registrations to anatomical and output spaces).
Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs),
configured with Lanczos interpolation to minimize the smoothing
effects of other kernels [@lanczos].
Non-gridded (surface) resamplings were performed using `mri_vol2surf`
(FreeSurfer).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_file', 'subjects_dir', 'subject_id', 't1w_preproc', 't1w_mask',
        't1w_dseg', 't1w_tpms', 't1w_aseg', 't1w_aparc', 'anat2std_xfm',
        'std2anat_xfm', 'template', 't1w2fsnative_xfm', 'fsnative2t1w_xfm'
    ]),
                        name='inputnode')
    inputnode.inputs.bold_file = bold_file
    if sbref_file is not None:
        from niworkflows.interfaces.images import ValidateImage
        val_sbref = pe.Node(ValidateImage(in_file=sbref_file),
                            name='val_sbref')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1',
        'bold_aparc_t1', 'bold_std', 'bold_std_ref', 'bold_mask_std',
        'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti',
        'cifti_variant', 'cifti_metadata', 'cifti_density', 'surfaces',
        'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file',
        'confounds_metadata'
    ]),
                         name='outputnode')

    # Generate a brain-masked conversion of the t1w
    t1w_brain = pe.Node(ApplyMask(), name='t1w_brain')

    # BOLD buffer: an identity used as a pointer to either the original BOLD
    # or the STC'ed one for further use.
    boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']),
                         name='boldbuffer')

    summary = pe.Node(FunctionalSummary(
        slice_timing=run_stc,
        registration=('FSL', 'FreeSurfer')[freesurfer],
        registration_dof=config.workflow.bold2t1w_dof,
        registration_init=config.workflow.bold2t1w_init,
        pe_direction=metadata.get("PhaseEncodingDirection"),
        tr=metadata.get("RepetitionTime")),
                      name='summary',
                      mem_gb=config.DEFAULT_MEMORY_MIN_GB,
                      run_without_submitting=True)
    summary.inputs.dummy_scans = config.workflow.dummy_scans

    func_derivatives_wf = init_func_derivatives_wf(
        bids_root=layout.root,
        cifti_output=config.workflow.cifti_output,
        freesurfer=freesurfer,
        metadata=metadata,
        output_dir=str(config.execution.output_dir),
        spaces=spaces,
        use_aroma=config.workflow.use_aroma,
    )

    workflow.connect([
        (outputnode, func_derivatives_wf, [
            ('bold_t1', 'inputnode.bold_t1'),
            ('bold_t1_ref', 'inputnode.bold_t1_ref'),
            ('bold_aseg_t1', 'inputnode.bold_aseg_t1'),
            ('bold_aparc_t1', 'inputnode.bold_aparc_t1'),
            ('bold_mask_t1', 'inputnode.bold_mask_t1'),
            ('bold_native', 'inputnode.bold_native'),
            ('confounds', 'inputnode.confounds'),
            ('surfaces', 'inputnode.surf_files'),
            ('aroma_noise_ics', 'inputnode.aroma_noise_ics'),
            ('melodic_mix', 'inputnode.melodic_mix'),
            ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'),
            ('bold_cifti', 'inputnode.bold_cifti'),
            ('cifti_variant', 'inputnode.cifti_variant'),
            ('cifti_metadata', 'inputnode.cifti_metadata'),
            ('cifti_density', 'inputnode.cifti_density'),
            ('confounds_metadata', 'inputnode.confounds_metadata'),
        ]),
    ])

    # Generate a tentative boldref
    bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads)
    bold_reference_wf.inputs.inputnode.dummy_scans = config.workflow.dummy_scans
    if sbref_file is not None:
        workflow.connect([
            (val_sbref, bold_reference_wf, [('out_file',
                                             'inputnode.sbref_file')]),
        ])

    # Top-level BOLD splitter
    bold_split = pe.Node(FSLSplit(dimension='t'),
                         name='bold_split',
                         mem_gb=mem_gb['filesize'] * 3)

    # HMC on the BOLD
    bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf',
                                   mem_gb=mem_gb['filesize'],
                                   omp_nthreads=omp_nthreads)

    # calculate BOLD registration to T1w
    bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf',
                                   freesurfer=freesurfer,
                                   use_bbr=config.workflow.use_bbr,
                                   bold2t1w_dof=config.workflow.bold2t1w_dof,
                                   bold2t1w_init=config.workflow.bold2t1w_init,
                                   mem_gb=mem_gb['resampled'],
                                   omp_nthreads=omp_nthreads,
                                   use_compression=False)

    # apply BOLD registration to T1w
    bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf',
                                             freesurfer=freesurfer,
                                             use_fieldwarp=bool(fmaps),
                                             multiecho=multiecho,
                                             mem_gb=mem_gb['resampled'],
                                             omp_nthreads=omp_nthreads,
                                             use_compression=False)

    # get confounds
    bold_confounds_wf = init_bold_confs_wf(
        mem_gb=mem_gb['largemem'],
        metadata=metadata,
        regressors_all_comps=config.workflow.regressors_all_comps,
        regressors_fd_th=config.workflow.regressors_fd_th,
        regressors_dvars_th=config.workflow.regressors_dvars_th,
        name='bold_confounds_wf')
    bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False]

    # Apply transforms in 1 shot
    # Only use uncompressed output if AROMA is to be run
    bold_bold_trans_wf = init_bold_preproc_trans_wf(
        mem_gb=mem_gb['resampled'],
        omp_nthreads=omp_nthreads,
        use_compression=not config.execution.low_mem,
        use_fieldwarp=bool(fmaps),
        name='bold_bold_trans_wf')
    bold_bold_trans_wf.inputs.inputnode.name_source = ref_file

    # SLICE-TIME CORRECTION (or bypass) #############################################
    if run_stc is True:  # bool('TooShort') == True, so check True explicitly
        bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata)
        workflow.connect([
            (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols',
                                               'inputnode.skip_vols')]),
            (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]),
        ])
        if not multiecho:
            workflow.connect([(bold_reference_wf, bold_stc_wf, [
                ('outputnode.bold_file', 'inputnode.bold_file')
            ])])
        else:  # for meepi, iterate through stc_wf for all workflows
            meepi_echos = boldbuffer.clone(name='meepi_echos')
            meepi_echos.iterables = ('bold_file', bold_file)
            workflow.connect([(meepi_echos, bold_stc_wf,
                               [('bold_file', 'inputnode.bold_file')])])
    elif not multiecho:  # STC is too short or False
        # bypass STC from original BOLD to the splitter through boldbuffer
        workflow.connect([(bold_reference_wf, boldbuffer,
                           [('outputnode.bold_file', 'bold_file')])])
    else:
        # for meepi, iterate over all meepi echos to boldbuffer
        boldbuffer.iterables = ('bold_file', bold_file)

    # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ##########################
    bold_sdc_wf = init_sdc_estimate_wf(fmaps,
                                       metadata,
                                       omp_nthreads=omp_nthreads,
                                       debug=config.execution.debug)

    # MULTI-ECHO EPI DATA #############################################
    if multiecho:
        from niworkflows.func.util import init_skullstrip_bold_wf
        skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf')

        inputnode.inputs.bold_file = ref_file  # Replace reference w first echo

        join_echos = pe.JoinNode(
            niu.IdentityInterface(fields=['bold_files']),
            joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'),
            joinfield=['bold_files'],
            name='join_echos')

        # create optimal combination, adaptive T2* map
        bold_t2s_wf = init_bold_t2s_wf(echo_times=tes,
                                       mem_gb=mem_gb['resampled'],
                                       omp_nthreads=omp_nthreads,
                                       t2s_coreg=config.workflow.t2s_coreg,
                                       name='bold_t2smap_wf')

        workflow.connect([
            (skullstrip_bold_wf, join_echos,
             [('outputnode.skull_stripped_file', 'bold_files')]),
            (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]),
        ])

    # MAIN WORKFLOW STRUCTURE #######################################################
    workflow.connect([
        (inputnode, t1w_brain, [('t1w_preproc', 'in_file'),
                                ('t1w_mask', 'in_mask')]),
        # Generate early reference
        (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]),
        # BOLD buffer has slice-time corrected if it was run, original otherwise
        (boldbuffer, bold_split, [('bold_file', 'in_file')]),
        # HMC
        (bold_reference_wf, bold_hmc_wf,
         [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'),
          ('outputnode.bold_file', 'inputnode.bold_file')]),
        (bold_reference_wf, summary, [('outputnode.algo_dummy_scans',
                                       'algo_dummy_scans')]),
        # EPI-T1 registration workflow
        (
            inputnode,
            bold_reg_wf,
            [
                ('t1w_dseg', 'inputnode.t1w_dseg'),
                # Undefined if --fs-no-reconall, but this is safe
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id'),
                ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')
            ]),
        (t1w_brain, bold_reg_wf, [('out_file', 'inputnode.t1w_brain')]),
        (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'),
                                       ('t1w_mask', 'inputnode.t1w_mask'),
                                       ('t1w_aseg', 'inputnode.t1w_aseg'),
                                       ('t1w_aparc', 'inputnode.t1w_aparc')]),
        (t1w_brain, bold_t1_trans_wf, [('out_file', 'inputnode.t1w_brain')]),
        # unused if multiecho, but this is safe
        (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms',
                                          'inputnode.hmc_xforms')]),
        (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1',
                                          'inputnode.itk_bold_to_t1')]),
        (bold_t1_trans_wf, outputnode,
         [('outputnode.bold_t1', 'bold_t1'),
          ('outputnode.bold_t1_ref', 'bold_t1_ref'),
          ('outputnode.bold_aseg_t1', 'bold_aseg_t1'),
          ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]),
        (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]),
        # SDC (or pass-through workflow)
        (t1w_brain, bold_sdc_wf, [('out_file', 'inputnode.t1w_brain')]),
        (bold_reference_wf, bold_sdc_wf,
         [('outputnode.ref_image', 'inputnode.epi_file'),
          ('outputnode.ref_image_brain', 'inputnode.epi_brain'),
          ('outputnode.bold_mask', 'inputnode.epi_mask')]),
        (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp',
                                          'inputnode.fieldwarp')]),
        (bold_sdc_wf, bold_bold_trans_wf,
         [('outputnode.out_warp', 'inputnode.fieldwarp'),
          ('outputnode.epi_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction')
                                ]),
        # Connect bold_confounds_wf
        (inputnode, bold_confounds_wf, [('t1w_tpms', 'inputnode.t1w_tpms'),
                                        ('t1w_mask', 'inputnode.t1w_mask')]),
        (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file',
                                           'inputnode.movpar_file')]),
        (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold',
                                           'inputnode.t1_bold_xform')]),
        (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols',
                                                 'inputnode.skip_vols')]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_file', 'confounds'),
        ]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_metadata', 'confounds_metadata'),
        ]),
        # Connect bold_bold_trans_wf
        (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')]
         ),
        (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms',
                                            'inputnode.hmc_xforms')]),
        # Summary
        (outputnode, summary, [('confounds', 'confounds_file')]),
    ])

    if not config.workflow.t2s_coreg:
        workflow.connect([
            (bold_sdc_wf, bold_reg_wf, [('outputnode.epi_brain',
                                         'inputnode.ref_bold_brain')]),
            (bold_sdc_wf, bold_t1_trans_wf,
             [('outputnode.epi_brain', 'inputnode.ref_bold_brain'),
              ('outputnode.epi_mask', 'inputnode.ref_bold_mask')]),
        ])
    else:
        workflow.connect([
            # For t2s_coreg, replace EPI-to-T1w registration inputs
            (bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain',
                                         'inputnode.ref_bold_brain')]),
            (bold_t2s_wf, bold_t1_trans_wf,
             [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
              ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]),
        ])

    # for standard EPI data, pass along correct file
    if not multiecho:
        workflow.connect([
            (inputnode, func_derivatives_wf, [('bold_file',
                                               'inputnode.source_file')]),
            (bold_bold_trans_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_split, bold_t1_trans_wf, [('out_files',
                                             'inputnode.bold_split')]),
        ])
    else:  # for meepi, create and use optimal combination
        workflow.connect([
            # update name source for optimal combination
            (inputnode, func_derivatives_wf,
             [(('bold_file', combine_meepi_source), 'inputnode.source_file')]),
            (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold',
                                                       'inputnode.in_file')]),
            (bold_t2s_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold',
                                              'inputnode.bold_split')]),
        ])

    if fmaps:
        from sdcflows.workflows.outputs import init_sdc_unwarp_report_wf
        # Report on BOLD correction
        fmap_unwarp_report_wf = init_sdc_unwarp_report_wf()
        workflow.connect([
            (inputnode, fmap_unwarp_report_wf, [('t1w_dseg',
                                                 'inputnode.in_seg')]),
            (bold_reference_wf, fmap_unwarp_report_wf,
             [('outputnode.ref_image', 'inputnode.in_pre')]),
            (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold',
                                                   'inputnode.in_xfm')]),
            (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.epi_corrected',
                                                   'inputnode.in_post')]),
        ])

        # Overwrite ``out_path_base`` of unwarping DataSinks
        for node in fmap_unwarp_report_wf.list_node_names():
            if node.split('.')[-1].startswith('ds_'):
                fmap_unwarp_report_wf.get_node(
                    node).interface.out_path_base = 'fmriprep'

        for node in bold_sdc_wf.list_node_names():
            if node.split('.')[-1].startswith('ds_'):
                bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep'

        if 'syn' in fmaps:
            sdc_select_std = pe.Node(KeySelect(fields=['std2anat_xfm']),
                                     name='sdc_select_std',
                                     run_without_submitting=True)
            sdc_select_std.inputs.key = 'MNI152NLin2009cAsym'
            workflow.connect([
                (inputnode, sdc_select_std, [('std2anat_xfm', 'std2anat_xfm'),
                                             ('template', 'keys')]),
                (sdc_select_std, bold_sdc_wf, [('std2anat_xfm',
                                                'inputnode.std2anat_xfm')]),
            ])

        if fmaps.get('syn') is True:  # SyN forced
            syn_unwarp_report_wf = init_sdc_unwarp_report_wf(
                name='syn_unwarp_report_wf', forcedsyn=True)
            workflow.connect([
                (inputnode, syn_unwarp_report_wf, [('t1w_dseg',
                                                    'inputnode.in_seg')]),
                (bold_reference_wf, syn_unwarp_report_wf,
                 [('outputnode.ref_image', 'inputnode.in_pre')]),
                (bold_reg_wf, syn_unwarp_report_wf,
                 [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
                (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_ref',
                                                      'inputnode.in_post')]),
            ])

            # Overwrite ``out_path_base`` of unwarping DataSinks
            for node in syn_unwarp_report_wf.list_node_names():
                if node.split('.')[-1].startswith('ds_'):
                    syn_unwarp_report_wf.get_node(
                        node).interface.out_path_base = 'fmriprep'

    # Map final BOLD mask into T1w space (if required)
    nonstd_spaces = set(spaces.get_nonstandard())
    if nonstd_spaces.intersection(('T1w', 'anat')):
        from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as
                                                  ApplyTransforms)

        boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                  float=True),
                                  name='boldmask_to_t1w',
                                  mem_gb=0.1)
        workflow.connect([
            (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1',
                                             'transforms')]),
            (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1',
                                                  'reference_image')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]),
            (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]),
        ])

    if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')):
        workflow.connect([
            (bold_bold_trans_wf, outputnode, [('outputnode.bold',
                                               'bold_native')]),
            (bold_bold_trans_wf, func_derivatives_wf,
             [('outputnode.bold_ref', 'inputnode.bold_native_ref'),
              ('outputnode.bold_mask', 'inputnode.bold_mask_native')]),
        ])

    if spaces.get_spaces(nonstandard=False, dim=(3, )):
        # Apply transforms in 1 shot
        # Only use uncompressed output if AROMA is to be run
        bold_std_trans_wf = init_bold_std_trans_wf(
            freesurfer=freesurfer,
            mem_gb=mem_gb['resampled'],
            omp_nthreads=omp_nthreads,
            spaces=spaces,
            name='bold_std_trans_wf',
            use_compression=not config.execution.low_mem,
            use_fieldwarp=bool(fmaps),
        )
        workflow.connect([
            (inputnode, bold_std_trans_wf,
             [('template', 'inputnode.templates'),
              ('anat2std_xfm', 'inputnode.anat2std_xfm'),
              ('bold_file', 'inputnode.name_source'),
              ('t1w_aseg', 'inputnode.bold_aseg'),
              ('t1w_aparc', 'inputnode.bold_aparc')]),
            (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms',
                                               'inputnode.hmc_xforms')]),
            (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1',
                                               'inputnode.itk_bold_to_t1')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             bold_std_trans_wf, [('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
            (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp',
                                               'inputnode.fieldwarp')]),
            (bold_std_trans_wf, outputnode,
             [('outputnode.bold_std', 'bold_std'),
              ('outputnode.bold_std_ref', 'bold_std_ref'),
              ('outputnode.bold_mask_std', 'bold_mask_std')]),
        ])

        if freesurfer:
            workflow.connect([
                (bold_std_trans_wf, func_derivatives_wf, [
                    ('outputnode.bold_aseg_std', 'inputnode.bold_aseg_std'),
                    ('outputnode.bold_aparc_std', 'inputnode.bold_aparc_std'),
                ]),
                (bold_std_trans_wf, outputnode,
                 [('outputnode.bold_aseg_std', 'bold_aseg_std'),
                  ('outputnode.bold_aparc_std', 'bold_aparc_std')]),
            ])

        if not multiecho:
            workflow.connect([(bold_split, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])
        else:
            split_opt_comb = bold_split.clone(name='split_opt_comb')
            workflow.connect([(bold_t2s_wf, split_opt_comb,
                               [('outputnode.bold', 'in_file')]),
                              (split_opt_comb, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])

        # func_derivatives_wf internally parametrizes over snapshotted spaces.
        workflow.connect([
            (bold_std_trans_wf, func_derivatives_wf, [
                ('outputnode.template', 'inputnode.template'),
                ('outputnode.spatial_reference',
                 'inputnode.spatial_reference'),
                ('outputnode.bold_std_ref', 'inputnode.bold_std_ref'),
                ('outputnode.bold_std', 'inputnode.bold_std'),
                ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'),
            ]),
        ])

        if config.workflow.use_aroma:  # ICA-AROMA workflow
            from .confounds import init_ica_aroma_wf
            ica_aroma_wf = init_ica_aroma_wf(
                mem_gb=mem_gb['resampled'],
                metadata=metadata,
                omp_nthreads=omp_nthreads,
                use_fieldwarp=bool(fmaps),
                err_on_aroma_warn=config.workflow.aroma_err_on_warn,
                aroma_melodic_dim=config.workflow.aroma_melodic_dim,
                name='ica_aroma_wf')

            join = pe.Node(niu.Function(output_names=["out_file"],
                                        function=_to_join),
                           name='aroma_confounds')

            mrg_conf_metadata = pe.Node(niu.Merge(2),
                                        name='merge_confound_metadata',
                                        run_without_submitting=True)
            mrg_conf_metadata2 = pe.Node(DictMerge(),
                                         name='merge_confound_metadata2',
                                         run_without_submitting=True)
            workflow.disconnect([
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_file', 'confounds'),
                ]),
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_metadata', 'confounds_metadata'),
                ]),
            ])
            workflow.connect([
                (inputnode, ica_aroma_wf, [('bold_file',
                                            'inputnode.name_source')]),
                (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file',
                                              'inputnode.movpar_file')]),
                (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols',
                                                    'inputnode.skip_vols')]),
                (bold_confounds_wf, join, [('outputnode.confounds_file',
                                            'in_file')]),
                (bold_confounds_wf, mrg_conf_metadata,
                 [('outputnode.confounds_metadata', 'in1')]),
                (ica_aroma_wf, join, [('outputnode.aroma_confounds',
                                       'join_file')]),
                (ica_aroma_wf, mrg_conf_metadata,
                 [('outputnode.aroma_metadata', 'in2')]),
                (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),
                (ica_aroma_wf, outputnode,
                 [('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                  ('outputnode.melodic_mix', 'melodic_mix'),
                  ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')
                  ]),
                (join, outputnode, [('out_file', 'confounds')]),
                (mrg_conf_metadata2, outputnode, [('out_dict',
                                                   'confounds_metadata')]),
                (bold_std_trans_wf, ica_aroma_wf,
                 [('outputnode.bold_std', 'inputnode.bold_std'),
                  ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'),
                  ('outputnode.spatial_reference',
                   'inputnode.spatial_reference')]),
            ])

    # SURFACES ##################################################################################
    # Freesurfer
    freesurfer_spaces = spaces.get_fs_spaces()
    if freesurfer and freesurfer_spaces:
        config.loggers.workflow.debug(
            'Creating BOLD surface-sampling workflow.')
        bold_surf_wf = init_bold_surf_wf(
            mem_gb=mem_gb['resampled'],
            surface_spaces=freesurfer_spaces,
            medial_surface_nan=config.workflow.medial_surface_nan,
            name='bold_surf_wf')
        workflow.connect([
            (inputnode, bold_surf_wf,
             [('t1w_preproc', 'inputnode.t1w_preproc'),
              ('subjects_dir', 'inputnode.subjects_dir'),
              ('subject_id', 'inputnode.subject_id'),
              ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm')]),
            (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1',
                                               'inputnode.source_file')]),
            (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
            (bold_surf_wf, func_derivatives_wf, [('outputnode.target',
                                                  'inputnode.surf_refs')]),
        ])

        # CIFTI output
        if config.workflow.cifti_output:
            from .resampling import init_bold_grayords_wf
            bold_grayords_wf = init_bold_grayords_wf(
                grayord_density=config.workflow.cifti_output,
                mem_gb=mem_gb['resampled'],
                repetition_time=metadata['RepetitionTime'])

            workflow.connect([
                (inputnode, bold_grayords_wf, [('subjects_dir',
                                                'inputnode.subjects_dir')]),
                (bold_std_trans_wf, bold_grayords_wf,
                 [('outputnode.bold_std', 'inputnode.bold_std'),
                  ('outputnode.spatial_reference',
                   'inputnode.spatial_reference')]),
                (bold_surf_wf, bold_grayords_wf, [
                    ('outputnode.surfaces', 'inputnode.surf_files'),
                    ('outputnode.target', 'inputnode.surf_refs'),
                ]),
                (bold_grayords_wf, outputnode,
                 [('outputnode.cifti_bold', 'bold_cifti'),
                  ('outputnode.cifti_variant', 'cifti_variant'),
                  ('outputnode.cifti_metadata', 'cifti_metadata'),
                  ('outputnode.cifti_density', 'cifti_density')]),
            ])

    if spaces.get_spaces(nonstandard=False, dim=(3, )):
        carpetplot_wf = init_carpetplot_wf(
            mem_gb=mem_gb['resampled'],
            metadata=metadata,
            cifti_output=config.workflow.cifti_output,
            name='carpetplot_wf')

        if config.workflow.cifti_output:
            workflow.connect(bold_grayords_wf, 'outputnode.cifti_bold',
                             carpetplot_wf, 'inputnode.cifti_bold')
        else:
            # Xform to 'MNI152NLin2009cAsym' is always computed.
            carpetplot_select_std = pe.Node(KeySelect(
                fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'),
                                            name='carpetplot_select_std',
                                            run_without_submitting=True)

            workflow.connect([
                (inputnode, carpetplot_select_std, [('std2anat_xfm',
                                                     'std2anat_xfm'),
                                                    ('template', 'keys')]),
                (carpetplot_select_std, carpetplot_wf,
                 [('std2anat_xfm', 'inputnode.std2anat_xfm')]),
                (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
                 carpetplot_wf, [('outputnode.bold', 'inputnode.bold'),
                                 ('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
                (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold',
                                               'inputnode.t1_bold_xform')]),
            ])

        workflow.connect([(bold_confounds_wf, carpetplot_wf, [
            ('outputnode.confounds_file', 'inputnode.confounds_file')
        ])])

    # REPORTING ############################################################
    reportlets_dir = str(config.execution.work_dir / 'reportlets')
    ds_report_summary = pe.Node(DerivativesDataSink(desc='summary',
                                                    keep_dtype=True),
                                name='ds_report_summary',
                                run_without_submitting=True,
                                mem_gb=config.DEFAULT_MEMORY_MIN_GB)

    ds_report_validation = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, desc='validation', keep_dtype=True),
                                   name='ds_report_validation',
                                   run_without_submitting=True,
                                   mem_gb=config.DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (summary, ds_report_summary, [('out_report', 'in_file')]),
        (bold_reference_wf, ds_report_validation,
         [('outputnode.validation_report', 'in_file')]),
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = ref_file

    return workflow
示例#7
0
def init_syn_sdc_wf(omp_nthreads,
                    bold_pe=None,
                    atlas_threshold=3,
                    name='syn_sdc_wf'):
    """
    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.syn import init_syn_sdc_wf
        wf = init_syn_sdc_wf(
            bold_pe='j',
            omp_nthreads=8)

    **Inputs**

        bold_ref
            reference image
        bold_ref_brain
            skull-stripped reference image
        template : str
            Name of template targeted by ``template`` output space
        t1_brain
            skull-stripped, bias-corrected structural image
        t1_2_mni_reverse_transform
            inverse registration transform of T1w image to MNI template

    **Outputs**

        out_reference
            the ``bold_ref`` image after unwarping
        out_reference_brain
            the ``bold_ref_brain`` image after unwarping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_mask
            mask of the unwarped input file

    """

    if bold_pe is None or bold_pe[0] not in ['i', 'j']:
        LOGGER.warning(
            'Incorrect phase-encoding direction, assuming PA (posterior-to-anterior).'
        )
        bold_pe = 'j'

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the BOLD reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration` (ANTs {ants_ver}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
""".format(ants_ver=Registration().version or '<ver>')
    inputnode = pe.Node(niu.IdentityInterface([
        'bold_ref', 'bold_ref_brain', 'template', 't1_brain',
        't1_2_mni_reverse_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_reference', 'out_reference_brain', 'out_mask', 'out_warp']),
                         name='outputnode')

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = pkgr.resource_filename('fmriprep', 'data/fmap_atlas.nii.gz')
    # Registration specifications
    affine_transform = pkgr.resource_filename('fmriprep', 'data/affine.json')
    syn_transform = pkgr.resource_filename('fmriprep',
                                           'data/susceptibility_syn.json')

    invert_t1w = pe.Node(Rescale(invert=True), name='invert_t1w', mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1',
                       n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref',
                       n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3),
                             name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref',
        n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(fsl.maths.MathsCommand(
        args='-thr {:.8g} -bin'.format(atlas_threshold),
        output_datatype='char'),
                              name='threshold_atlas',
                              mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2),
                                name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(bold_pe[0] == 'i'), int(bold_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(Registration(from_file=syn_transform,
                               restrict_deformation=restrict),
                  name='syn',
                  n_procs=omp_nthreads)

    unwarp_ref = pe.Node(ApplyTransforms(dimension=3,
                                         float=True,
                                         interpolation='LanczosWindowedSinc'),
                         name='unwarp_ref')

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1_brain', 'in_file'),
                                 ('bold_ref', 'ref_file')]),
        (inputnode, ref_2_t1, [('bold_ref_brain', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('bold_ref', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [('t1_2_mni_reverse_transform', 'in2'),
                                     (('template', _prior_path), 'in3')]),
        (inputnode, atlas_2_ref, [('bold_ref', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('bold_ref_brain', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (syn, unwarp_ref, [('forward_transforms', 'transforms')]),
        (inputnode, unwarp_ref, [('bold_ref', 'reference_image'),
                                 ('bold_ref', 'input_image')]),
        (unwarp_ref, skullstrip_bold_wf, [('output_image', 'inputnode.in_file')
                                          ]),
        (unwarp_ref, outputnode, [('output_image', 'out_reference')]),
        (skullstrip_bold_wf, outputnode,
         [('outputnode.skull_stripped_file', 'out_reference_brain'),
          ('outputnode.mask_file', 'out_mask')]),
    ])

    return workflow
示例#8
0
def init_bold_confs_wf(
    mem_gb,
    metadata,
    regressors_all_comps,
    regressors_dvars_th,
    regressors_fd_th,
    freesurfer=False,
    name="bold_confs_wf",
):
    """
    Build a workflow to generate and write out confounding signals.

    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.
    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``)
    #. DVARS - original and standardized variants (``dvars``, ``std_dvars``)
    #. Framewise displacement, based on head-motion parameters
       (``framewise_displacement``)
    #. Temporal CompCor (``t_comp_cor_XX``)
    #. Anatomical CompCor (``a_comp_cor_XX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``cosine_XX``)
    #. Non-steady-state volumes (``non_steady_state_XX``)
    #. Estimated head-motion parameters, in mm and rad
       (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``)


    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.bold.confounds import init_bold_confs_wf
            wf = init_bold_confs_wf(
                mem_gb=1,
                metadata={},
                regressors_all_comps=False,
                regressors_dvars_th=1.5,
                regressors_fd_th=0.5,
            )

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of BOLD file in GB - please note that this size
        should be calculated after resamplings that may extend
        the FoV
    metadata : :obj:`dict`
        BIDS metadata for BOLD file
    name : :obj:`str`
        Name of workflow (default: ``bold_confs_wf``)
    regressors_all_comps : :obj:`bool`
        Indicates whether CompCor decompositions should return all
        components instead of the minimal number of components necessary
        to explain 50 percent of the variance in the decomposition mask.
    regressors_dvars_th : :obj:`float`
        Criterion for flagging DVARS outliers
    regressors_fd_th : :obj:`float`
        Criterion for flagging framewise displacement outliers

    Inputs
    ------
    bold
        BOLD image, after the prescribed corrections (STC, HMC and SDC)
        when available.
    bold_mask
        BOLD series mask
    movpar_file
        SPM-formatted motion parameters file
    rmsd_file
        Framewise displacement as measured by ``fsl_motion_outliers``.
    skip_vols
        number of non steady state volumes
    t1w_mask
        Mask of the skull-stripped template image
    t1w_tpms
        List of tissue probability maps in T1w space
    t1_bold_xform
        Affine matrix that maps the T1w space into alignment with
        the native BOLD space

    Outputs
    -------
    confounds_file
        TSV of all aggregated confounds
    rois_report
        Reportlet visualizing white-matter/CSF mask used for aCompCor,
        the ROI for tCompCor and the BOLD brain mask.
    confounds_metadata
        Confounds metadata dictionary.

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.images import SignalExtraction
    from niworkflows.interfaces.masks import ROIsPlot
    from niworkflows.interfaces.nibabel import ApplyMask, Binarize
    from niworkflows.interfaces.patches import (
        RobustACompCor as ACompCor,
        RobustTCompCor as TCompCor,
    )
    from niworkflows.interfaces.plotting import (CompCorVariancePlot,
                                                 ConfoundsCorrelationPlot)
    from niworkflows.interfaces.utils import (AddTSVHeader, TSV2JSON,
                                              DictMerge)
    from ...interfaces.confounds import aCompCorMasks

    gm_desc = (
        "dilating a GM mask extracted from the FreeSurfer's *aseg* segmentation"
        if freesurfer else
        "thresholding the corresponding partial volume map at 0.05")

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
Several confounding time-series were calculated based on the
*preprocessed BOLD*: framewise displacement (FD), DVARS and
three region-wise global signals.
FD was computed using two formulations following Power (absolute sum of
relative motions, @power_fd_dvars) and Jenkinson (relative root mean square
displacement between affines, @mcflirt).
FD and DVARS are calculated for each functional run, both using their
implementations in *Nipype* [following the definitions by @power_fd_dvars].
The three global signals are extracted within the CSF, the WM, and
the whole-brain masks.
Additionally, a set of physiological regressors were extracted to
allow for component-based noise correction [*CompCor*, @compcor].
Principal components are estimated after high-pass filtering the
*preprocessed BOLD* time-series (using a discrete cosine filter with
128s cut-off) for the two *CompCor* variants: temporal (tCompCor)
and anatomical (aCompCor).
tCompCor components are then calculated from the top 2% variable
voxels within the brain mask.
For aCompCor, three probabilistic masks (CSF, WM and combined CSF+WM)
are generated in anatomical space.
The implementation differs from that of Behzadi et al. in that instead
of eroding the masks by 2 pixels on BOLD space, the aCompCor masks are
subtracted a mask of pixels that likely contain a volume fraction of GM.
This mask is obtained by {gm_desc}, and it ensures components are not extracted
from voxels containing a minimal fraction of GM.
Finally, these masks are resampled into BOLD space and binarized by
thresholding at 0.99 (as in the original implementation).
Components are also calculated separately within the WM and CSF masks.
For each CompCor decomposition, the *k* components with the largest singular
values are retained, such that the retained components' time series are
sufficient to explain 50 percent of variance across the nuisance mask (CSF,
WM, combined, or temporal). The remaining components are dropped from
consideration.
The head-motion estimates calculated in the correction step were also
placed within the corresponding confounds file.
The confound time series derived from head motion estimates and global
signals were expanded with the inclusion of temporal derivatives and
quadratic terms for each [@confounds_satterthwaite_2013].
Frames that exceeded a threshold of {regressors_fd_th} mm FD or
{regressors_dvars_th} standardised DVARS were annotated as motion outliers.
"""
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols',
        't1w_mask', 't1w_tpms', 't1_bold_xform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'confounds_file', 'confounds_metadata', 'acompcor_masks',
        'tcompcor_mask'
    ]),
                         name='outputnode')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_nstd=True,
                                     save_std=True,
                                     remove_zerovariance=True),
                    name="dvars",
                    mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp",
                    mem_gb=mem_gb)

    # Generate aCompCor probseg maps
    acc_masks = pe.Node(aCompCorMasks(is_aseg=freesurfer), name="acc_masks")

    # Resample probseg maps in BOLD space via T1w-to-BOLD transform
    acc_msk_tfm = pe.MapNode(ApplyTransforms(interpolation='Gaussian',
                                             float=False),
                             iterfield=["input_image"],
                             name='acc_msk_tfm',
                             mem_gb=0.1)
    acc_msk_brain = pe.MapNode(ApplyMask(),
                               name="acc_msk_brain",
                               iterfield=["in_file"])
    acc_msk_bin = pe.MapNode(Binarize(thresh_low=0.99),
                             name='acc_msk_bin',
                             iterfield=["in_file"])
    acompcor = pe.Node(ACompCor(components_file='acompcor.tsv',
                                header_prefix='a_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                mask_names=['CSF', 'WM', 'combined'],
                                merge_method='none',
                                failure_mode='NaN'),
                       name="acompcor",
                       mem_gb=mem_gb)

    tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv',
                                header_prefix='t_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                percentile_threshold=.02,
                                failure_mode='NaN'),
                       name="tcompcor",
                       mem_gb=mem_gb)

    # Set number of components
    if regressors_all_comps:
        acompcor.inputs.num_components = 'all'
        tcompcor.inputs.num_components = 'all'
    else:
        acompcor.inputs.variance_threshold = 0.5
        tcompcor.inputs.variance_threshold = 0.5

    # Set TR if present
    if 'RepetitionTime' in metadata:
        tcompcor.inputs.repetition_time = metadata['RepetitionTime']
        acompcor.inputs.repetition_time = metadata['RepetitionTime']

    # Global and segment regressors
    signals_class_labels = [
        "global_signal",
        "csf",
        "white_matter",
        "csf_wm",
        "tcompcor",
    ]
    merge_rois = pe.Node(niu.Merge(3, ravel_inputs=True),
                         name='merge_rois',
                         run_without_submitting=True)
    signals = pe.Node(SignalExtraction(class_labels=signals_class_labels),
                      name="signals",
                      mem_gb=mem_gb)

    # Arrange confounds
    add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]),
                               name="add_dvars_header",
                               mem_gb=0.01,
                               run_without_submitting=True)
    add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]),
                                   name="add_std_dvars_header",
                                   mem_gb=0.01,
                                   run_without_submitting=True)
    add_motion_headers = pe.Node(AddTSVHeader(
        columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
                                 name="add_motion_headers",
                                 mem_gb=0.01,
                                 run_without_submitting=True)
    add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]),
                              name="add_rmsd_header",
                              mem_gb=0.01,
                              run_without_submitting=True)
    concat = pe.Node(GatherConfounds(),
                     name="concat",
                     mem_gb=0.01,
                     run_without_submitting=True)

    # CompCor metadata
    tcc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        drop_columns=['mask'],
        output=None,
        additional_metadata={'Method': 'tCompCor'},
        enforce_case=True),
                               name='tcc_metadata_fmt')
    acc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        output=None,
        additional_metadata={'Method': 'aCompCor'},
        enforce_case=True),
                               name='acc_metadata_fmt')
    mrg_conf_metadata = pe.Node(niu.Merge(3),
                                name='merge_confound_metadata',
                                run_without_submitting=True)
    mrg_conf_metadata.inputs.in3 = {
        label: {
            'Method': 'Mean'
        }
        for label in signals_class_labels
    }
    mrg_conf_metadata2 = pe.Node(DictMerge(),
                                 name='merge_confound_metadata2',
                                 run_without_submitting=True)

    # Expand model to include derivatives and quadratics
    model_expand = pe.Node(
        ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'),
        name='model_expansion')

    # Add spike regressors
    spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th,
                                            dvars_thresh=regressors_dvars_th),
                            name='spike_regressors')

    # Generate reportlet (ROIs)
    mrg_compcor = pe.Node(niu.Merge(2, ravel_inputs=True),
                          name='mrg_compcor',
                          run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'],
                                 generate_report=True),
                        name='rois_plot',
                        mem_gb=mem_gb)

    ds_report_bold_rois = pe.Node(DerivativesDataSink(
        desc='rois', datatype="figures", dismiss_entities=("echo", )),
                                  name='ds_report_bold_rois',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (CompCor)
    mrg_cc_metadata = pe.Node(niu.Merge(2),
                              name='merge_compcor_metadata',
                              run_without_submitting=True)
    compcor_plot = pe.Node(CompCorVariancePlot(
        variance_thresholds=(0.5, 0.7, 0.9),
        metadata_sources=['tCompCor', 'aCompCor']),
                           name='compcor_plot')
    ds_report_compcor = pe.Node(DerivativesDataSink(
        desc='compcorvar', datatype="figures", dismiss_entities=("echo", )),
                                name='ds_report_compcor',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (Confound correlation)
    conf_corr_plot = pe.Node(ConfoundsCorrelationPlot(
        reference_column='global_signal', max_dim=20),
                             name='conf_corr_plot')
    ds_report_conf_corr = pe.Node(DerivativesDataSink(
        desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )),
                                  name='ds_report_conf_corr',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    def _last(inlist):
        return inlist[-1]

    def _select_cols(table):
        import pandas as pd
        return [
            col for col in pd.read_table(table, nrows=2).columns
            if not col.startswith(("a_comp_cor_", "t_comp_cor_", "std_dvars"))
        ]

    workflow.connect([
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # aCompCor
        (inputnode, acompcor, [("bold", "realigned_file"),
                               ("skip_vols", "ignore_initial_volumes")]),
        (inputnode, acc_masks, [("t1w_tpms", "in_vfs"),
                                (("bold", _get_zooms), "bold_zooms")]),
        (inputnode, acc_msk_tfm, [("t1_bold_xform", "transforms"),
                                  ("bold_mask", "reference_image")]),
        (inputnode, acc_msk_brain, [("bold_mask", "in_mask")]),
        (acc_masks, acc_msk_tfm, [("out_masks", "input_image")]),
        (acc_msk_tfm, acc_msk_brain, [("output_image", "in_file")]),
        (acc_msk_brain, acc_msk_bin, [("out_file", "in_file")]),
        (acc_msk_bin, acompcor, [("out_file", "mask_files")]),

        # tCompCor
        (inputnode, tcompcor, [("bold", "realigned_file"),
                               ("skip_vols", "ignore_initial_volumes"),
                               ("bold_mask", "mask_files")]),
        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (inputnode, merge_rois, [('bold_mask', 'in1')]),
        (acc_msk_bin, merge_rois, [('out_file', 'in2')]),
        (tcompcor, merge_rois, [('high_variance_masks', 'in3')]),
        (merge_rois, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
        (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]),
        (dvars, add_dvars_header, [('out_nstd', 'in_file')]),
        (dvars, add_std_dvars_header, [('out_std', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_motion_headers, concat, [('out_file', 'motion')]),
        (add_rmsd_header, concat, [('out_file', 'rmsd')]),
        (add_dvars_header, concat, [('out_file', 'dvars')]),
        (add_std_dvars_header, concat, [('out_file', 'std_dvars')]),

        # Confounds metadata
        (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]),
        (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]),
        (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]),
        (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]),
        (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),

        # Expand the model with derivatives, quadratics, and spikes
        (concat, model_expand, [('confounds_file', 'confounds_file')]),
        (model_expand, spike_regress, [('confounds_file', 'confounds_file')]),

        # Set outputs
        (spike_regress, outputnode, [('confounds_file', 'confounds_file')]),
        (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]),
        (tcompcor, outputnode, [("high_variance_masks", "tcompcor_mask")]),
        (acc_msk_bin, outputnode, [("out_file", "acompcor_masks")]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk_bin, mrg_compcor, [(('out_file', _last), 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]),
        (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]),
        (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]),
        (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]),
        (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]),
        (concat, conf_corr_plot, [('confounds_file', 'confounds_file'),
                                  (('confounds_file', _select_cols), 'columns')
                                  ]),
        (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]),
    ])

    return workflow
示例#9
0
def init_carpetplot_wf(mem_gb, metadata, cifti_output, name="bold_carpet_wf"):
    """
    Build a workflow to generate *carpet* plots.

    Resamples the MNI parcellation (ad-hoc parcellation derived from the
    Harvard-Oxford template and others).

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of BOLD file in GB - please note that this size
        should be calculated after resamplings that may extend
        the FoV
    metadata : :obj:`dict`
        BIDS metadata for BOLD file
    name : :obj:`str`
        Name of workflow (default: ``bold_carpet_wf``)

    Inputs
    ------
    bold
        BOLD image, after the prescribed corrections (STC, HMC and SDC)
        when available.
    bold_mask
        BOLD series mask
    confounds_file
        TSV of all aggregated confounds
    t1_bold_xform
        Affine matrix that maps the T1w space into alignment with
        the native BOLD space
    std2anat_xfm
        ANTs-compatible affine-and-warp transform file
    cifti_bold
        BOLD image in CIFTI format, to be used in place of volumetric BOLD

    Outputs
    -------
    out_carpetplot
        Path of the generated SVG file

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'confounds_file', 't1_bold_xform', 'std2anat_xfm',
        'cifti_bold'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_carpetplot']),
                         name='outputnode')

    # List transforms
    mrg_xfms = pe.Node(niu.Merge(2), name='mrg_xfms')

    # Warp segmentation into EPI space
    resample_parc = pe.Node(ApplyTransforms(
        dimension=3,
        input_image=str(
            get_template('MNI152NLin2009cAsym',
                         resolution=1,
                         desc='carpet',
                         suffix='dseg',
                         extension=['.nii', '.nii.gz'])),
        interpolation='MultiLabel'),
                            name='resample_parc')

    # Carpetplot and confounds plot
    conf_plot = pe.Node(FMRISummary(tr=metadata['RepetitionTime'],
                                    confounds_list=[
                                        ('global_signal', None, 'GS'),
                                        ('csf', None, 'GSCSF'),
                                        ('white_matter', None, 'GSWM'),
                                        ('std_dvars', None, 'DVARS'),
                                        ('framewise_displacement', 'mm', 'FD')
                                    ]),
                        name='conf_plot',
                        mem_gb=mem_gb)
    ds_report_bold_conf = pe.Node(DerivativesDataSink(
        desc='carpetplot',
        datatype="figures",
        extension="svg",
        dismiss_entities=("echo", )),
                                  name='ds_report_bold_conf',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow = Workflow(name=name)
    # no need for segmentations if using CIFTI
    if cifti_output:
        workflow.connect(inputnode, 'cifti_bold', conf_plot, 'in_func')
    else:
        workflow.connect([
            (inputnode, mrg_xfms, [('t1_bold_xform', 'in1'),
                                   ('std2anat_xfm', 'in2')]),
            (inputnode, resample_parc, [('bold_mask', 'reference_image')]),
            (mrg_xfms, resample_parc, [('out', 'transforms')]),
            # Carpetplot
            (inputnode, conf_plot, [('bold', 'in_func'),
                                    ('bold_mask', 'in_mask')]),
            (resample_parc, conf_plot, [('output_image', 'in_segm')])
        ])

    workflow.connect([
        (inputnode, conf_plot, [('confounds_file', 'confounds_file')]),
        (conf_plot, ds_report_bold_conf, [('out_file', 'in_file')]),
        (conf_plot, outputnode, [('out_file', 'out_carpetplot')]),
    ])
    return workflow
示例#10
0
def init_bold_confs_wf(
    mem_gb,
    metadata,
    regressors_all_comps,
    regressors_dvars_th,
    regressors_fd_th,
    name="bold_confs_wf",
):
    """
    Build a workflow to generate and write out confounding signals.

    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.
    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``)
    #. DVARS - original and standardized variants (``dvars``, ``std_dvars``)
    #. Framewise displacement, based on head-motion parameters
       (``framewise_displacement``)
    #. Temporal CompCor (``t_comp_cor_XX``)
    #. Anatomical CompCor (``a_comp_cor_XX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``cosine_XX``)
    #. Non-steady-state volumes (``non_steady_state_XX``)
    #. Estimated head-motion parameters, in mm and rad
       (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``)


    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.bold.confounds import init_bold_confs_wf
            wf = init_bold_confs_wf(
                mem_gb=1,
                metadata={},
                regressors_all_comps=False,
                regressors_dvars_th=1.5,
                regressors_fd_th=0.5,
            )

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of BOLD file in GB - please note that this size
        should be calculated after resamplings that may extend
        the FoV
    metadata : :obj:`dict`
        BIDS metadata for BOLD file
    name : :obj:`str`
        Name of workflow (default: ``bold_confs_wf``)
    regressors_all_comps : :obj:`bool`
        Indicates whether CompCor decompositions should return all
        components instead of the minimal number of components necessary
        to explain 50 percent of the variance in the decomposition mask.
    regressors_dvars_th : :obj:`float`
        Criterion for flagging DVARS outliers
    regressors_fd_th : :obj:`float`
        Criterion for flagging framewise displacement outliers

    Inputs
    ------
    bold
        BOLD image, after the prescribed corrections (STC, HMC and SDC)
        when available.
    bold_mask
        BOLD series mask
    movpar_file
        SPM-formatted motion parameters file
    rmsd_file
        Framewise displacement as measured by ``fsl_motion_outliers``.
    skip_vols
        number of non steady state volumes
    t1w_mask
        Mask of the skull-stripped template image
    t1w_tpms
        List of tissue probability maps in T1w space
    t1_bold_xform
        Affine matrix that maps the T1w space into alignment with
        the native BOLD space

    Outputs
    -------
    confounds_file
        TSV of all aggregated confounds
    rois_report
        Reportlet visualizing white-matter/CSF mask used for aCompCor,
        the ROI for tCompCor and the BOLD brain mask.
    confounds_metadata
        Confounds metadata dictionary.

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.images import SignalExtraction
    from niworkflows.interfaces.masks import ROIsPlot
    from niworkflows.interfaces.patches import (
        RobustACompCor as ACompCor,
        RobustTCompCor as TCompCor,
    )
    from niworkflows.interfaces.plotting import (CompCorVariancePlot,
                                                 ConfoundsCorrelationPlot)
    from niworkflows.interfaces.utils import (TPM2ROI, AddTPMs, AddTSVHeader,
                                              TSV2JSON, DictMerge)

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
Several confounding time-series were calculated based on the
*preprocessed BOLD*: framewise displacement (FD), DVARS and
three region-wise global signals.
FD was computed using two formulations following Power (absolute sum of
relative motions, @power_fd_dvars) and Jenkinson (relative root mean square
displacement between affines, @mcflirt).
FD and DVARS are calculated for each functional run, both using their
implementations in *Nipype* [following the definitions by @power_fd_dvars].
The three global signals are extracted within the CSF, the WM, and
the whole-brain masks.
Additionally, a set of physiological regressors were extracted to
allow for component-based noise correction [*CompCor*, @compcor].
Principal components are estimated after high-pass filtering the
*preprocessed BOLD* time-series (using a discrete cosine filter with
128s cut-off) for the two *CompCor* variants: temporal (tCompCor)
and anatomical (aCompCor).
tCompCor components are then calculated from the top 5% variable
voxels within a mask covering the subcortical regions.
This subcortical mask is obtained by heavily eroding the brain mask,
which ensures it does not include cortical GM regions.
For aCompCor, components are calculated within the intersection of
the aforementioned mask and the union of CSF and WM masks calculated
in T1w space, after their projection to the native space of each
functional run (using the inverse BOLD-to-T1w transformation). Components
are also calculated separately within the WM and CSF masks.
For each CompCor decomposition, the *k* components with the largest singular
values are retained, such that the retained components' time series are
sufficient to explain 50 percent of variance across the nuisance mask (CSF,
WM, combined, or temporal). The remaining components are dropped from
consideration.
The head-motion estimates calculated in the correction step were also
placed within the corresponding confounds file.
The confound time series derived from head motion estimates and global
signals were expanded with the inclusion of temporal derivatives and
quadratic terms for each [@confounds_satterthwaite_2013].
Frames that exceeded a threshold of {fd} mm FD or {dv} standardised DVARS
were annotated as motion outliers.
""".format(fd=regressors_fd_th, dv=regressors_dvars_th)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'movpar_file', 'rmsd_file', 'skip_vols',
        't1w_mask', 't1w_tpms', 't1_bold_xform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']),
        name='outputnode')

    # Get masks ready in T1w space
    acc_tpm = pe.Node(
        AddTPMs(indices=[1, 2]),  # BIDS convention (WM=1, CSF=2)
        name='acc_tpm')  # acc stands for aCompCor
    csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi')
    wm_roi = pe.Node(
        TPM2ROI(erode_prop=0.6,
                mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='wm_roi')
    acc_roi = pe.Node(
        TPM2ROI(erode_prop=0.6,
                mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='acc_roi')

    # Map ROIs in T1w space into BOLD space
    csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='csf_tfm',
                      mem_gb=0.1)
    wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                     float=True),
                     name='wm_tfm',
                     mem_gb=0.1)
    acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='acc_tfm',
                      mem_gb=0.1)
    tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='tcc_tfm',
                      mem_gb=0.1)

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_nstd=True,
                                     save_std=True,
                                     remove_zerovariance=True),
                    name="dvars",
                    mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp",
                    mem_gb=mem_gb)

    # a/t-CompCor
    mrg_lbl_cc = pe.Node(niu.Merge(3),
                         name='merge_rois_cc',
                         run_without_submitting=True)

    tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv',
                                header_prefix='t_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                percentile_threshold=.05,
                                failure_mode='NaN'),
                       name="tcompcor",
                       mem_gb=mem_gb)

    acompcor = pe.Node(ACompCor(components_file='acompcor.tsv',
                                header_prefix='a_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                mask_names=['combined', 'CSF', 'WM'],
                                merge_method='none',
                                failure_mode='NaN'),
                       name="acompcor",
                       mem_gb=mem_gb)

    # Set number of components
    if regressors_all_comps:
        acompcor.inputs.num_components = 'all'
        tcompcor.inputs.num_components = 'all'
    else:
        acompcor.inputs.variance_threshold = 0.5
        tcompcor.inputs.variance_threshold = 0.5

    # Set TR if present
    if 'RepetitionTime' in metadata:
        tcompcor.inputs.repetition_time = metadata['RepetitionTime']
        acompcor.inputs.repetition_time = metadata['RepetitionTime']

    # Global and segment regressors
    signals_class_labels = ["csf", "white_matter", "global_signal"]
    mrg_lbl = pe.Node(niu.Merge(3),
                      name='merge_rois',
                      run_without_submitting=True)
    signals = pe.Node(SignalExtraction(class_labels=signals_class_labels),
                      name="signals",
                      mem_gb=mem_gb)

    # Arrange confounds
    add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]),
                               name="add_dvars_header",
                               mem_gb=0.01,
                               run_without_submitting=True)
    add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]),
                                   name="add_std_dvars_header",
                                   mem_gb=0.01,
                                   run_without_submitting=True)
    add_motion_headers = pe.Node(AddTSVHeader(
        columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
                                 name="add_motion_headers",
                                 mem_gb=0.01,
                                 run_without_submitting=True)
    add_rmsd_header = pe.Node(AddTSVHeader(columns=["rmsd"]),
                              name="add_rmsd_header",
                              mem_gb=0.01,
                              run_without_submitting=True)
    concat = pe.Node(GatherConfounds(),
                     name="concat",
                     mem_gb=0.01,
                     run_without_submitting=True)

    # CompCor metadata
    tcc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        drop_columns=['mask'],
        output=None,
        additional_metadata={'Method': 'tCompCor'},
        enforce_case=True),
                               name='tcc_metadata_fmt')
    acc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        output=None,
        additional_metadata={'Method': 'aCompCor'},
        enforce_case=True),
                               name='acc_metadata_fmt')
    mrg_conf_metadata = pe.Node(niu.Merge(3),
                                name='merge_confound_metadata',
                                run_without_submitting=True)
    mrg_conf_metadata.inputs.in3 = {
        label: {
            'Method': 'Mean'
        }
        for label in signals_class_labels
    }
    mrg_conf_metadata2 = pe.Node(DictMerge(),
                                 name='merge_confound_metadata2',
                                 run_without_submitting=True)

    # Expand model to include derivatives and quadratics
    model_expand = pe.Node(
        ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'),
        name='model_expansion')

    # Add spike regressors
    spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th,
                                            dvars_thresh=regressors_dvars_th),
                            name='spike_regressors')

    # Generate reportlet (ROIs)
    mrg_compcor = pe.Node(niu.Merge(2),
                          name='merge_compcor',
                          run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'],
                                 generate_report=True),
                        name='rois_plot',
                        mem_gb=mem_gb)

    ds_report_bold_rois = pe.Node(DerivativesDataSink(
        desc='rois', datatype="figures", dismiss_entities=("echo", )),
                                  name='ds_report_bold_rois',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (CompCor)
    mrg_cc_metadata = pe.Node(niu.Merge(2),
                              name='merge_compcor_metadata',
                              run_without_submitting=True)
    compcor_plot = pe.Node(CompCorVariancePlot(
        variance_thresholds=(0.5, 0.7, 0.9),
        metadata_sources=['tCompCor', 'aCompCor']),
                           name='compcor_plot')
    ds_report_compcor = pe.Node(DerivativesDataSink(
        desc='compcorvar', datatype="figures", dismiss_entities=("echo", )),
                                name='ds_report_compcor',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (Confound correlation)
    conf_corr_plot = pe.Node(ConfoundsCorrelationPlot(
        reference_column='global_signal', max_dim=70),
                             name='conf_corr_plot')
    ds_report_conf_corr = pe.Node(DerivativesDataSink(
        desc='confoundcorr', datatype="figures", dismiss_entities=("echo", )),
                                  name='ds_report_conf_corr',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    def _pick_csf(files):
        return files[2]  # after smriprep#189, this is BIDS-compliant.

    def _pick_wm(files):
        return files[1]  # after smriprep#189, this is BIDS-compliant.

    workflow.connect([
        # Massage ROIs (in T1w space)
        (inputnode, acc_tpm, [('t1w_tpms', 'in_files')]),
        (inputnode, csf_roi, [(('t1w_tpms', _pick_csf), 'in_tpm'),
                              ('t1w_mask', 'in_mask')]),
        (inputnode, wm_roi, [(('t1w_tpms', _pick_wm), 'in_tpm'),
                             ('t1w_mask', 'in_mask')]),
        (inputnode, acc_roi, [('t1w_mask', 'in_mask')]),
        (acc_tpm, acc_roi, [('out_file', 'in_tpm')]),
        # Map ROIs to BOLD
        (inputnode, csf_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, csf_tfm, [('roi_file', 'input_image')]),
        (inputnode, wm_tfm, [('bold_mask', 'reference_image'),
                             ('t1_bold_xform', 'transforms')]),
        (wm_roi, wm_tfm, [('roi_file', 'input_image')]),
        (inputnode, acc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (acc_roi, acc_tfm, [('roi_file', 'input_image')]),
        (inputnode, tcc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (acc_tfm, acc_msk, [('output_image', 'roi_file')]),
        (acc_msk, mrg_lbl_cc, [('out', 'in1')]),
        (csf_msk, mrg_lbl_cc, [('out', 'in2')]),
        (wm_msk, mrg_lbl_cc, [('out', 'in3')]),
        (mrg_lbl_cc, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (csf_tfm, csf_msk, [('output_image', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (wm_tfm, wm_msk, [('output_image', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
        (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]),
        (dvars, add_dvars_header, [('out_nstd', 'in_file')]),
        (dvars, add_std_dvars_header, [('out_std', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_motion_headers, concat, [('out_file', 'motion')]),
        (add_rmsd_header, concat, [('out_file', 'rmsd')]),
        (add_dvars_header, concat, [('out_file', 'dvars')]),
        (add_std_dvars_header, concat, [('out_file', 'std_dvars')]),

        # Confounds metadata
        (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]),
        (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]),
        (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]),
        (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]),
        (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),

        # Expand the model with derivatives, quadratics, and spikes
        (concat, model_expand, [('confounds_file', 'confounds_file')]),
        (model_expand, spike_regress, [('confounds_file', 'confounds_file')]),

        # Set outputs
        (spike_regress, outputnode, [('confounds_file', 'confounds_file')]),
        (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]),
        (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]),
        (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]),
        (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]),
        (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]),
        (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]),
        (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]),
    ])

    return workflow
示例#11
0
def init_bold_t1_trans_wf(freesurfer,
                          mem_gb,
                          omp_nthreads,
                          multiecho=False,
                          use_fieldwarp=False,
                          use_compression=True,
                          name='bold_t1_trans_wf'):
    """
    This workflow registers the reference BOLD image to T1-space, using a
    boundary-based registration (BBR) cost function.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.registration import init_bold_t1_trans_wf
        wf = init_bold_t1_trans_wf(freesurfer=True,
                                   mem_gb=3,
                                   omp_nthreads=1)

    **Parameters**

        freesurfer : bool
            Enable FreeSurfer functional registration (bbregister)
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to T1
        multiecho : bool
            If multiecho data was supplied, HMC already performed
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        name : str
            Name of workflow (default: ``bold_reg_wf``)

    **Inputs**

        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        ref_bold_brain
            Reference image to which BOLD series is aligned
            If ``fieldwarp == True``, ``ref_bold_brain`` should be unwarped
        ref_bold_mask
            Skull-stripping mask of reference image
        t1_brain
            Skull-stripped bias-corrected structural template image
        t1_mask
            Mask of the skull-stripped template image
        t1_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        t1_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_split
            Individual 3D BOLD volumes, not motion corrected
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_t1
            Motion-corrected BOLD series in T1 space
        bold_t1_ref
            Reference, contrast-enhanced summary of the motion-corrected BOLD series in T1w space
        bold_mask_t1
            BOLD mask in T1 space
        bold_aseg_t1
            FreeSurfer's ``aseg.mgz`` atlas, in T1w-space at the BOLD resolution
            (only if ``recon-all`` was run).
        bold_aparc_t1
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in T1w-space at the BOLD resolution
            (only if ``recon-all`` was run).


    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`
        * :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`

    """
    from .util import init_bold_reference_wf
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'ref_bold_brain', 'ref_bold_mask', 't1_brain',
        't1_mask', 't1_aseg', 't1_aparc', 'bold_split', 'fieldwarp',
        'hmc_xforms', 'itk_bold_to_t1'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1',
        'bold_aparc_t1'
    ]),
                         name='outputnode')

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB

    mask_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_t1w_tfm',
                           mem_gb=0.1)

    workflow.connect([
        (inputnode, gen_ref, [('ref_bold_brain', 'moving_image'),
                              ('t1_brain', 'fixed_image'),
                              ('t1_mask', 'fov_mask')]),
        (inputnode, mask_t1w_tfm, [('ref_bold_mask', 'input_image')]),
        (gen_ref, mask_t1w_tfm, [('out_file', 'reference_image')]),
        (inputnode, mask_t1w_tfm, [('itk_bold_to_t1', 'transforms')]),
        (mask_t1w_tfm, outputnode, [('output_image', 'bold_mask_t1')]),
    ])

    if freesurfer:
        # Resample aseg and aparc in T1w space (no transforms needed)
        aseg_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               transforms='identity',
                                               float=True),
                               name='aseg_t1w_tfm',
                               mem_gb=0.1)
        aparc_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                transforms='identity',
                                                float=True),
                                name='aparc_t1w_tfm',
                                mem_gb=0.1)

        workflow.connect([
            (inputnode, aseg_t1w_tfm, [('t1_aseg', 'input_image')]),
            (inputnode, aparc_t1w_tfm, [('t1_aparc', 'input_image')]),
            (gen_ref, aseg_t1w_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_t1w_tfm, [('out_file', 'reference_image')]),
            (aseg_t1w_tfm, outputnode, [('output_image', 'bold_aseg_t1')]),
            (aparc_t1w_tfm, outputnode, [('output_image', 'bold_aparc_t1')]),
        ])

    bold_to_t1w_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_t1w_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    # merge 3D volumes into 4D timeseries
    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(omp_nthreads, pre_mask=True)

    if not multiecho:
        # Merge transforms placing the head motion correction last
        nforms = 2 + int(use_fieldwarp)
        merge_xforms = pe.Node(niu.Merge(nforms),
                               name='merge_xforms',
                               run_without_submitting=True,
                               mem_gb=DEFAULT_MEMORY_MIN_GB)
        if use_fieldwarp:
            workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in2')])
                              ])

        workflow.connect([
            # merge transforms
            (inputnode, merge_xforms, [('hmc_xforms', 'in%d' % nforms),
                                       ('itk_bold_to_t1', 'in1')]),
            (merge_xforms, bold_to_t1w_transform, [('out', 'transforms')]),
            (inputnode, bold_to_t1w_transform, [('bold_split', 'input_image')
                                                ]),
        ])

    else:
        from nipype.interfaces.fsl import Split as FSLSplit
        bold_split = pe.Node(FSLSplit(dimension='t'),
                             name='bold_split',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, bold_split, [('bold_split', 'in_file')]),
            (bold_split, bold_to_t1w_transform, [('out_files', 'input_image')
                                                 ]),
            (inputnode, bold_to_t1w_transform, [('itk_bold_to_t1',
                                                 'transforms')]),
        ])

    workflow.connect([
        (inputnode, merge, [('name_source', 'header_source')]),
        (gen_ref, bold_to_t1w_transform, [('out_file', 'reference_image')]),
        (bold_to_t1w_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_t1w_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
        (merge, outputnode, [('out_file', 'bold_t1')]),
        (gen_final_ref, outputnode, [('outputnode.ref_image', 'bold_t1_ref')]),
    ])

    return workflow
示例#12
0
def init_anat_derivatives_wf(
    *,
    bids_root,
    freesurfer,
    num_t1w,
    output_dir,
    spaces,
    name='anat_derivatives_wf',
    tpm_labels=("GM", "WM", "CSF"),
):
    """
    Set up a battery of datasinks to store derivatives in the right location.
    Parameters
    ----------
    bids_root : :obj:`str`
        Root path of BIDS dataset
    freesurfer : :obj:`bool`
        FreeSurfer was enabled
    num_t1w : :obj:`int`
        Number of T1w images
    output_dir : :obj:`str`
        Directory in which to save derivatives
    name : :obj:`str`
        Workflow name (default: anat_derivatives_wf)
    tpm_labels : :obj:`tuple`
        Tissue probability maps in order
    Inputs
    ------
    template
        Template space and specifications
    source_files
        List of input T1w images
    t1w_ref_xfms
        List of affine transforms to realign input T1w images
    t1w_preproc
        The T1w reference map, which is calculated as the average of bias-corrected
        and preprocessed T1w images, defining the anatomical space.
    t1w_mask
        Mask of the ``t1w_preproc``
    t1w_dseg
        Segmentation in T1w space
    t1w_tpms
        Tissue probability maps in T1w space
    anat2std_xfm
        Nonlinear spatial transform to resample imaging data given in anatomical space
        into standard space.
    std2anat_xfm
        Inverse transform of ``anat2std_xfm``
    std_t1w
        T1w reference resampled in one or more standard spaces.
    std_mask
        Mask of skull-stripped template, in standard space
    std_dseg
        Segmentation, resampled into standard space
    std_tpms
        Tissue probability maps in standard space
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to
        FreeSurfer-conformed subject space
    fsnative2t1w_xfm
        LTA-style affine matrix translating from FreeSurfer-conformed
        subject space to T1w
    surfaces
        GIFTI surfaces (gray/white boundary, midthickness, pial, inflated)
    t1w_fs_aseg
        FreeSurfer's aseg segmentation, in native T1w space
    t1w_fs_aparc
        FreeSurfer's aparc+aseg segmentation, in native T1w space
    """
    from niworkflows.interfaces.utility import KeySelect
    from smriprep.interfaces import DerivativesDataSink
    from smriprep.workflows.outputs import (
        _bids_relative, _combine_cohort, _is_native, _drop_path
    )

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['template', 'source_files', 't1w_ref_xfms',
                    't1w_preproc', 't1w_mask', 't1w_dseg', 't1w_tpms',
                    'anat2std_xfm', 'std2anat_xfm',
                    't1w2fsnative_xfm', 'fsnative2t1w_xfm', 'surfaces',
                    't1w_fs_aseg', 't1w_fs_aparc']),
        name='inputnode')

    raw_sources = pe.Node(niu.Function(function=_bids_relative), name='raw_sources')
    raw_sources.inputs.bids_root = bids_root

    ds_t1w_preproc = pe.Node(
        DerivativesDataSink(base_directory=output_dir, desc='preproc', compress=True),
        name='ds_t1w_preproc', run_without_submitting=True)
    ds_t1w_preproc.inputs.SkullStripped = False

    ds_t1w_mask = pe.Node(
        DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask',
                            compress=True),
        name='ds_t1w_mask', run_without_submitting=True)
    ds_t1w_mask.inputs.Type = 'Brain'

    ds_t1w_dseg = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix='dseg', compress=True),
        name='ds_t1w_dseg', run_without_submitting=True)

    ds_t1w_tpms = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix='probseg', compress=True),
        name='ds_t1w_tpms', run_without_submitting=True)
    ds_t1w_tpms.inputs.label = tpm_labels

    workflow.connect([
        (inputnode, raw_sources, [('source_files', 'in_files')]),
        (inputnode, ds_t1w_preproc, [('t1w_preproc', 'in_file'),
                                     ('source_files', 'source_file')]),
        (inputnode, ds_t1w_mask, [('t1w_mask', 'in_file'),
                                  ('source_files', 'source_file')]),
        (inputnode, ds_t1w_tpms, [('t1w_tpms', 'in_file'),
                                  ('source_files', 'source_file')]),
        (inputnode, ds_t1w_dseg, [('t1w_dseg', 'in_file'),
                                  ('source_files', 'source_file')]),
        (raw_sources, ds_t1w_mask, [('out', 'RawSources')]),
    ])

    # Transforms
    if spaces.get_spaces(nonstandard=False, dim=(3,)):
        ds_std2t1w_xfm = pe.MapNode(
            DerivativesDataSink(base_directory=output_dir, to='T1w', mode='image', suffix='xfm'),
            iterfield=('in_file', 'from'),
            name='ds_std2t1w_xfm', run_without_submitting=True)

        ds_t1w2std_xfm = pe.MapNode(
            DerivativesDataSink(base_directory=output_dir, mode='image', suffix='xfm',
                                **{'from': 'T1w'}),
            iterfield=('in_file', 'to'),
            name='ds_t1w2std_xfm', run_without_submitting=True)

        workflow.connect([
            (inputnode, ds_t1w2std_xfm, [
                ('anat2std_xfm', 'in_file'),
                (('template', _combine_cohort), 'to'),
                ('source_files', 'source_file')]),
            (inputnode, ds_std2t1w_xfm, [
                ('std2anat_xfm', 'in_file'),
                (('template', _combine_cohort), 'from'),
                ('source_files', 'source_file')]),
        ])

    if num_t1w > 1:
        # Please note the dictionary unpacking to provide the from argument.
        # It is necessary because from is a protected keyword (not allowed as argument name).
        ds_t1w_ref_xfms = pe.MapNode(
            DerivativesDataSink(base_directory=output_dir, to='T1w', mode='image', suffix='xfm',
                                extension='txt', **{'from': 'orig'}),
            iterfield=['source_file', 'in_file'],
            name='ds_t1w_ref_xfms', run_without_submitting=True)
        workflow.connect([
            (inputnode, ds_t1w_ref_xfms, [('source_files', 'source_file'),
                                          ('t1w_ref_xfms', 'in_file')]),
        ])

    # Write derivatives in standard spaces specified by --output-spaces
    if getattr(spaces, '_cached') is not None and spaces.cached.references:
        from niworkflows.interfaces.space import SpaceDataSource
        from niworkflows.interfaces.utils import GenerateSamplingReference
        from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms

        spacesource = pe.Node(SpaceDataSource(),
                              name='spacesource', run_without_submitting=True)
        spacesource.iterables = ('in_tuple', [
            (s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3,))
        ])

        gen_tplid = pe.Node(niu.Function(function=_fmt_cohort),
                            name="gen_tplid", run_without_submitting=True)

        select_xfm = pe.Node(KeySelect(
            fields=['anat2std_xfm']),
            name='select_xfm', run_without_submitting=True)
        select_tpl = pe.Node(TemplateFlowSelect(), name='select_tpl', run_without_submitting=True)

        gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref', mem_gb=0.01)

        # Resample T1w-space inputs
        anat2std_t1w = pe.Node(ApplyTransforms(
            dimension=3, default_value=0, float=True,
            interpolation='LanczosWindowedSinc'), name='anat2std_t1w')

        anat2std_mask = pe.Node(ApplyTransforms(
            interpolation='MultiLabel'), name='anat2std_mask'
        )
        anat2std_dseg = pe.Node(ApplyTransforms(
            interpolation='MultiLabel'), name='anat2std_dseg'
        )
        anat2std_tpms = pe.MapNode(ApplyTransforms(
            dimension=3, default_value=0, float=True, interpolation='Gaussian'),
            iterfield=['input_image'], name='anat2std_tpms'
        )

        ds_std_t1w = pe.Node(
            DerivativesDataSink(base_directory=output_dir, desc='preproc', keep_dtype=True,
                                compress=True),
            name='ds_std_t1w', run_without_submitting=True)
        ds_std_t1w.inputs.SkullStripped = True

        ds_std_mask = pe.Node(
            DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask',
                                compress=True),
            name='ds_std_mask', run_without_submitting=True)
        ds_std_mask.inputs.Type = 'Brain'

        ds_std_dseg = pe.Node(
            DerivativesDataSink(base_directory=output_dir, suffix='dseg', compress=True),
            name='ds_std_dseg', run_without_submitting=True)

        ds_std_tpms = pe.Node(
            DerivativesDataSink(base_directory=output_dir, suffix='probseg', compress=True),
            name='ds_std_tpms', run_without_submitting=True)

        # CRITICAL: the sequence of labels here (CSF-GM-WM) is that of the output of FSL-FAST
        #           (intensity mean, per tissue). This order HAS to be matched also by the ``tpms``
        #           output in the data/io_spec.json file.
        ds_std_tpms.inputs.label = tpm_labels
        workflow.connect([
            (inputnode, anat2std_t1w, [('t1w_preproc', 'input_image')]),
            (inputnode, anat2std_mask, [('t1w_mask', 'input_image')]),
            (inputnode, anat2std_dseg, [('t1w_dseg', 'input_image')]),
            (inputnode, anat2std_tpms, [('t1w_tpms', 'input_image')]),
            (inputnode, gen_ref, [('t1w_preproc', 'moving_image')]),
            (inputnode, select_xfm, [
                ('anat2std_xfm', 'anat2std_xfm'),
                ('template', 'keys')]),
            (spacesource, gen_tplid, [('space', 'template'),
                                      ('cohort', 'cohort')]),
            (gen_tplid, select_xfm, [('out', 'key')]),
            (spacesource, select_tpl, [('space', 'template'),
                                       ('cohort', 'cohort'),
                                       (('resolution', _no_native), 'resolution')]),
            (spacesource, gen_ref, [(('resolution', _is_native), 'keep_native')]),
            (select_tpl, gen_ref, [('t2w_file', 'fixed_image')]),
            (anat2std_t1w, ds_std_t1w, [('output_image', 'in_file')]),
            (anat2std_mask, ds_std_mask, [('output_image', 'in_file')]),
            (anat2std_dseg, ds_std_dseg, [('output_image', 'in_file')]),
            (anat2std_tpms, ds_std_tpms, [('output_image', 'in_file')]),
            (select_tpl, ds_std_mask, [(('brain_mask', _drop_path), 'RawSources')]),
        ])

        workflow.connect(
            # Connect apply transforms nodes
            [
                (gen_ref, n, [('out_file', 'reference_image')])
                for n in (anat2std_t1w, anat2std_mask, anat2std_dseg, anat2std_tpms)
            ]
            + [
                (select_xfm, n, [('anat2std_xfm', 'transforms')])
                for n in (anat2std_t1w, anat2std_mask, anat2std_dseg, anat2std_tpms)
            ]
            # Connect the source_file input of these datasinks
            + [
                (inputnode, n, [('source_files', 'source_file')])
                for n in (ds_std_t1w, ds_std_mask, ds_std_dseg, ds_std_tpms)
            ]
            # Connect the space input of these datasinks
            + [
                (spacesource, n, [
                    ('space', 'space'), ('cohort', 'cohort'), ('resolution', 'resolution')
                ])
                for n in (ds_std_t1w, ds_std_mask, ds_std_dseg, ds_std_tpms)
            ]
        )

    return workflow
示例#13
0
def init_anat_norm_wf(
    *,
    debug,
    omp_nthreads,
    templates,
    name="anat_norm_wf",
):
    """
    Build an individual spatial normalization workflow using ``antsRegistration``.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from fmriprep_rodents.patch.workflows.anatomical import init_anat_norm_wf
            wf = init_anat_norm_wf(
                debug=False,
                omp_nthreads=1,
                templates=['Fischer344'],
            )

    .. important::
        This workflow defines an iterable input over the input parameter ``templates``,
        so Nipype will produce one copy of the downstream workflows which connect
        ``poutputnode.template`` or ``poutputnode.template_spec`` to their inputs
        (``poutputnode`` stands for *parametric output node*).
        Nipype refers to this expansion of the graph as *parameterized execution*.
        If a joint list of values is required (and thus cutting off parameterization),
        please use the equivalent outputs of ``outputnode`` (which *joins* all the
        parameterized execution paths).

    Parameters
    ----------
    debug : :obj:`bool`
        Apply sloppy arguments to speed up processing. Use with caution,
        registration processes will be very inaccurate.
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use.
    templates : :obj:`list` of :obj:`str`
        List of standard space fullnames (e.g., ``MNI152NLin6Asym``
        or ``MNIPediatricAsym:cohort-4``) which are targets for spatial
        normalization.

    Inputs
    ------
    moving_image
        The input image that will be normalized to standard space.
    moving_mask
        A precise brain mask separating skull/skin/fat from brain
        structures.
    moving_segmentation
        A brain tissue segmentation of the ``moving_image``.
    moving_tpms
        tissue probability maps (TPMs) corresponding to the
        ``moving_segmentation``.
    lesion_mask
        (optional) A mask to exclude regions from the cost-function
        input domain to enable standardization of lesioned brains.
    orig_t1w
        The original T1w image from the BIDS structure.
    template
        Template name and specification

    Outputs
    -------
    standardized
        The T1w after spatial normalization, in template space.
    anat2std_xfm
        The T1w-to-template transform.
    std2anat_xfm
        The template-to-T1w transform.
    std_mask
        The ``moving_mask`` in template space (matches ``standardized`` output).
    std_dseg
        The ``moving_segmentation`` in template space (matches ``standardized``
        output).
    std_tpms
        The ``moving_tpms`` in template space (matches ``standardized`` output).
    template
        Template name extracted from the input parameter ``template``, for further
        use in downstream nodes.
    template_spec
        Template specifications extracted from the input parameter ``template``, for
        further use in downstream nodes.

    """
    from collections import defaultdict
    from nipype.interfaces.ants import ImageMath
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from smriprep.interfaces.templateflow import TemplateDesc
    from ..interfaces import RobustMNINormalization

    ntpls = len(templates)
    workflow = Workflow(name=name)

    if templates:
        workflow.__desc__ = """\
Volume-based spatial normalization to {targets} ({targets_id}) was performed through
nonlinear registration with `antsRegistration` (ANTs {ants_ver}),
using brain-extracted versions of both T1w reference and the T1w template.
The following template{tpls} selected for spatial normalization:
""".format(
            ants_ver=ANTsInfo.version() or '(version unknown)',
            targets='%s standard space%s' % (defaultdict(
                'several'.format, {1: 'one', 2: 'two', 3: 'three', 4: 'four'})[ntpls],
                's' * (ntpls != 1)),
            targets_id=', '.join(templates),
            tpls=(' was', 's were')[ntpls != 1]
        )

        # Append template citations to description
        for template in templates:
            template_meta = get_metadata(template.split(':')[0])
            template_refs = ['@%s' % template.split(':')[0].lower()]

            if template_meta.get('RRID', None):
                template_refs += ['RRID:%s' % template_meta['RRID']]

            workflow.__desc__ += """\
*{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format(
                template=template,
                template_name=template_meta['Name'],
                template_refs=', '.join(template_refs)
            )
            workflow.__desc__ += (', ', '.')[template == templates[-1][0]]

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'lesion_mask',
        'moving_image',
        'moving_mask',
        'moving_segmentation',
        'moving_tpms',
        'orig_t1w',
        'template',
    ]), name='inputnode')
    inputnode.iterables = [('template', templates)]

    out_fields = [
        'anat2std_xfm',
        'standardized',
        'std2anat_xfm',
        'std_dseg',
        'std_mask',
        'std_tpms',
        'template',
        'template_spec',
    ]
    poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name='poutputnode')

    split_desc = pe.Node(TemplateDesc(), run_without_submitting=True, name='split_desc')

    tf_select = pe.Node(TemplateFlowSelect(),
                        name='tf_select', run_without_submitting=True)

    # With the improvements from nipreps/niworkflows#342 this truncation is now necessary
    trunc_mov = pe.Node(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'),
                        name='trunc_mov')

    registration = pe.Node(RobustMNINormalization(
        float=True, flavor=['precise', 'testing'][debug],
    ), name='registration', n_procs=omp_nthreads, mem_gb=2)

    # Resample T1w-space inputs
    tpl_moving = pe.Node(ApplyTransforms(
        dimension=3, default_value=0, float=True,
        interpolation='LanczosWindowedSinc'), name='tpl_moving')

    std_mask = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='std_mask')
    std_dseg = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='std_dseg')

    std_tpms = pe.MapNode(ApplyTransforms(dimension=3, default_value=0, float=True,
                                          interpolation='Gaussian'),
                          iterfield=['input_image'], name='std_tpms')

    workflow.connect([
        (inputnode, split_desc, [('template', 'template')]),
        (inputnode, poutputnode, [('template', 'template')]),
        (inputnode, trunc_mov, [('moving_image', 'op1')]),
        (inputnode, registration, [
            ('moving_mask', 'moving_mask'),
            ('lesion_mask', 'lesion_mask')]),
        (inputnode, tpl_moving, [('moving_image', 'input_image')]),
        (inputnode, std_mask, [('moving_mask', 'input_image')]),
        (split_desc, tf_select, [('name', 'template'),
                                 ('spec', 'template_spec')]),
        (split_desc, registration, [('name', 'template'),
                                    (('spec', _no_atlas), 'template_spec')]),
        (tf_select, tpl_moving, [('t2w_file', 'reference_image')]),
        (tf_select, std_mask, [('t2w_file', 'reference_image')]),
        (tf_select, std_dseg, [('t2w_file', 'reference_image')]),
        (tf_select, std_tpms, [('t2w_file', 'reference_image')]),
        (trunc_mov, registration, [
            ('output_image', 'moving_image')]),
        (registration, tpl_moving, [('composite_transform', 'transforms')]),
        (registration, std_mask, [('composite_transform', 'transforms')]),
        (inputnode, std_dseg, [('moving_segmentation', 'input_image')]),
        (registration, std_dseg, [('composite_transform', 'transforms')]),
        (inputnode, std_tpms, [('moving_tpms', 'input_image')]),
        (registration, std_tpms, [('composite_transform', 'transforms')]),
        (registration, poutputnode, [
            ('composite_transform', 'anat2std_xfm'),
            ('inverse_composite_transform', 'std2anat_xfm')]),
        (tpl_moving, poutputnode, [('output_image', 'standardized')]),
        (std_mask, poutputnode, [('output_image', 'std_mask')]),
        (std_dseg, poutputnode, [('output_image', 'std_dseg')]),
        (std_tpms, poutputnode, [('output_image', 'std_tpms')]),
        (split_desc, poutputnode, [('spec', 'template_spec')]),
    ])

    # Provide synchronized output
    outputnode = pe.JoinNode(niu.IdentityInterface(fields=out_fields),
                             name='outputnode', joinsource='inputnode')
    workflow.connect([
        (poutputnode, outputnode, [(f, f) for f in out_fields]),
    ])

    return workflow
示例#14
0
def init_unwarp_wf(omp_nthreads=1, debug=False, name="unwarp_wf"):
    """
    Set up a workflow that unwarps the input :abbr:`EPI (echo-planar imaging)` dataset.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.apply.correction import init_unwarp_wf
            wf = init_unwarp_wf(omp_nthreads=2)

    Parameters
    ----------
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use.
    name : :obj:`str`
        Unique name of this workflow.
    debug : :obj:`bool`
        Whether to run in *sloppy* mode.

    Inputs
    ------
    distorted
        the target EPI image.
    metadata
        dictionary of metadata corresponding to the target EPI image
    fmap_coeff
        fieldmap coefficients in distorted EPI space.

    Outputs
    -------
    fieldmap
        the displacements field interpolated from the B-Spline coefficients
        and scaled by the appropriate parameters (readout time of the EPI
        target and voxel size along PE).
    corrected
        the target EPI reference image, after applying unwarping.
    corrected_mask
        a fast mask calculated from the corrected EPI reference.

    """
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from ...interfaces.epi import GetReadoutTime
    from ...interfaces.bspline import Coefficients2Warp
    from ..ancillary import init_brainextraction_wf

    workflow = Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=["distorted", "metadata", "fmap_coeff"]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["corrected", "fieldmap", "corrected_mask"]),
        name="outputnode",
    )

    rotime = pe.Node(GetReadoutTime(), name="rotime")
    rotime.interface._always_run = debug
    resample = pe.Node(Coefficients2Warp(low_mem=debug), name="resample")
    unwarp = pe.Node(ApplyTransforms(dimension=3, interpolation="BSpline"),
                     name="unwarp")
    brainextraction_wf = init_brainextraction_wf()

    # fmt:off
    workflow.connect([
        (inputnode, rotime, [("distorted", "in_file"),
                             ("metadata", "metadata")]),
        (inputnode, resample, [("distorted", "in_target"),
                               ("fmap_coeff", "in_coeff")]),
        (rotime, resample, [("readout_time", "ro_time"),
                            ("pe_direction", "pe_dir")]),
        (inputnode, unwarp, [("distorted", "reference_image"),
                             ("distorted", "input_image")]),
        (resample, unwarp, [("out_warp", "transforms")]),
        (resample, outputnode, [("out_field", "fieldmap")]),
        (unwarp, brainextraction_wf, [("output_image", "inputnode.in_file")]),
        (brainextraction_wf, outputnode, [
            ("outputnode.out_file", "corrected"),
            ("outputnode.out_mask", "corrected_mask"),
        ]),
    ])
    # fmt:on
    return workflow
示例#15
0
def init_anat_norm_wf(
    *,
    debug,
    omp_nthreads,
    templates,
    name="anat_norm_wf",
):
    """
    Build an individual spatial normalization workflow using ``antsRegistration``.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.norm import init_anat_norm_wf
            wf = init_anat_norm_wf(
                debug=False,
                omp_nthreads=1,
                templates=['MNI152NLin2009cAsym', 'MNI152NLin6Asym'],
            )

    .. important::
        This workflow defines an iterable input over the input parameter ``templates``,
        so Nipype will produce one copy of the downstream workflows which connect
        ``poutputnode.template`` or ``poutputnode.template_spec`` to their inputs
        (``poutputnode`` stands for *parametric output node*).
        Nipype refers to this expansion of the graph as *parameterized execution*.
        If a joint list of values is required (and thus cutting off parameterization),
        please use the equivalent outputs of ``outputnode`` (which *joins* all the
        parameterized execution paths).

    Parameters
    ----------
    debug : :obj:`bool`
        Apply sloppy arguments to speed up processing. Use with caution,
        registration processes will be very inaccurate.
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use.
    templates : :obj:`list` of :obj:`str`
        List of standard space fullnames (e.g., ``MNI152NLin6Asym``
        or ``MNIPediatricAsym:cohort-4``) which are targets for spatial
        normalization.

    Inputs
    ------
    moving_image
        The input image that will be normalized to standard space.
    moving_mask
        A precise brain mask separating skull/skin/fat from brain
        structures.
    moving_segmentation
        A brain tissue segmentation of the ``moving_image``.
    moving_tpms
        tissue probability maps (TPMs) corresponding to the
        ``moving_segmentation``.
    lesion_mask
        (optional) A mask to exclude regions from the cost-function
        input domain to enable standardization of lesioned brains.
    orig_t1w
        The original T1w image from the BIDS structure.
    template
        Template name and specification

    Outputs
    -------
    standardized
        The T1w after spatial normalization, in template space.
    anat2std_xfm
        The T1w-to-template transform.
    std2anat_xfm
        The template-to-T1w transform.
    std_mask
        The ``moving_mask`` in template space (matches ``standardized`` output).
    std_dseg
        The ``moving_segmentation`` in template space (matches ``standardized``
        output).
    std_tpms
        The ``moving_tpms`` in template space (matches ``standardized`` output).
    template
        Template name extracted from the input parameter ``template``, for further
        use in downstream nodes.
    template_spec
        Template specifications extracted from the input parameter ``template``, for
        further use in downstream nodes.

    """
    ntpls = len(templates)
    workflow = Workflow(name=name)

    if templates:
        workflow.__desc__ = """\
Volume-based spatial normalization to {targets} ({targets_id}) was performed through
nonlinear registration with `antsRegistration` (ANTs {ants_ver}),
using brain-extracted versions of both T1w reference and the T1w template.
The following template{tpls} selected for spatial normalization:
""".format(
            ants_ver=ANTsInfo.version() or "(version unknown)",
            targets="%s standard space%s" % (
                defaultdict("several".format, {
                    1: "one",
                    2: "two",
                    3: "three",
                    4: "four"
                })[ntpls],
                "s" * (ntpls != 1),
            ),
            targets_id=", ".join(templates),
            tpls=(" was", "s were")[ntpls != 1],
        )

        # Append template citations to description
        for template in templates:
            template_meta = get_metadata(template.split(":")[0])
            template_refs = ["@%s" % template.split(":")[0].lower()]

            if template_meta.get("RRID", None):
                template_refs += ["RRID:%s" % template_meta["RRID"]]

            workflow.__desc__ += """\
*{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format(
                template=template,
                template_name=template_meta["Name"],
                template_refs=", ".join(template_refs),
            )
            workflow.__desc__ += ".\n" if template == templates[-1] else ", "

    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "lesion_mask",
            "moving_image",
            "moving_mask",
            "moving_segmentation",
            "moving_tpms",
            "orig_t1w",
            "template",
        ]),
        name="inputnode",
    )
    inputnode.iterables = [("template", templates)]

    out_fields = [
        "anat2std_xfm",
        "standardized",
        "std2anat_xfm",
        "std_dseg",
        "std_mask",
        "std_tpms",
        "template",
        "template_spec",
    ]
    poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields),
                          name="poutputnode")

    split_desc = pe.Node(TemplateDesc(),
                         run_without_submitting=True,
                         name="split_desc")

    tf_select = pe.Node(
        TemplateFlowSelect(resolution=1 + debug),
        name="tf_select",
        run_without_submitting=True,
    )

    # With the improvements from nipreps/niworkflows#342 this truncation is now necessary
    trunc_mov = pe.Node(
        ants.ImageMath(operation="TruncateImageIntensity",
                       op2="0.01 0.999 256"),
        name="trunc_mov",
    )

    registration = pe.Node(
        SpatialNormalization(
            float=True,
            flavor=["precise", "testing"][debug],
        ),
        name="registration",
        n_procs=omp_nthreads,
        mem_gb=2,
    )

    # Resample T1w-space inputs
    tpl_moving = pe.Node(
        ApplyTransforms(
            dimension=3,
            default_value=0,
            float=True,
            interpolation="LanczosWindowedSinc",
        ),
        name="tpl_moving",
    )

    std_mask = pe.Node(ApplyTransforms(interpolation="MultiLabel"),
                       name="std_mask")
    std_dseg = pe.Node(ApplyTransforms(interpolation="MultiLabel"),
                       name="std_dseg")

    std_tpms = pe.MapNode(
        ApplyTransforms(dimension=3,
                        default_value=0,
                        float=True,
                        interpolation="Gaussian"),
        iterfield=["input_image"],
        name="std_tpms",
    )

    # fmt:off
    workflow.connect([
        (inputnode, split_desc, [('template', 'template')]),
        (inputnode, poutputnode, [('template', 'template')]),
        (inputnode, trunc_mov, [('moving_image', 'op1')]),
        (inputnode, registration, [('moving_mask', 'moving_mask'),
                                   ('lesion_mask', 'lesion_mask')]),
        (inputnode, tpl_moving, [('moving_image', 'input_image')]),
        (inputnode, std_mask, [('moving_mask', 'input_image')]),
        (split_desc, tf_select, [('name', 'template'),
                                 ('spec', 'template_spec')]),
        (split_desc, registration, [('name', 'template'),
                                    ('spec', 'template_spec')]),
        (tf_select, tpl_moving, [('t1w_file', 'reference_image')]),
        (tf_select, std_mask, [('t1w_file', 'reference_image')]),
        (tf_select, std_dseg, [('t1w_file', 'reference_image')]),
        (tf_select, std_tpms, [('t1w_file', 'reference_image')]),
        (trunc_mov, registration, [('output_image', 'moving_image')]),
        (registration, tpl_moving, [('composite_transform', 'transforms')]),
        (registration, std_mask, [('composite_transform', 'transforms')]),
        (inputnode, std_dseg, [('moving_segmentation', 'input_image')]),
        (registration, std_dseg, [('composite_transform', 'transforms')]),
        (inputnode, std_tpms, [('moving_tpms', 'input_image')]),
        (registration, std_tpms, [('composite_transform', 'transforms')]),
        (registration, poutputnode, [('composite_transform', 'anat2std_xfm'),
                                     ('inverse_composite_transform',
                                      'std2anat_xfm')]),
        (tpl_moving, poutputnode, [('output_image', 'standardized')]),
        (std_mask, poutputnode, [('output_image', 'std_mask')]),
        (std_dseg, poutputnode, [('output_image', 'std_dseg')]),
        (std_tpms, poutputnode, [('output_image', 'std_tpms')]),
        (split_desc, poutputnode, [('spec', 'template_spec')]),
    ])
    # fmt:on

    # Provide synchronized output
    outputnode = pe.JoinNode(
        niu.IdentityInterface(fields=out_fields),
        name="outputnode",
        joinsource="inputnode",
    )
    # fmt:off
    workflow.connect([
        (poutputnode, outputnode, [(f, f) for f in out_fields]),
    ])
    # fmt:on

    return workflow
示例#16
0
def init_bold_confs_wf(mem_gb, use_aroma, ignore_aroma_err, metadata,
                       name="bold_confs_wf"):
    """
    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.

    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``CSF``, ``WhiteMatter``, ``GlobalSignal``)
    #. DVARS - standard, nonstandard, and voxel-wise standard variants
       (``stdDVARS``, ``non-stdDVARS``, ``vx-wisestdDVARS``)
    #. Framewise displacement, based on MCFLIRT motion parameters
       (``FramewiseDisplacement``)
    #. Temporal CompCor (``tCompCorXX``)
    #. Anatomical CompCor (``aCompCorXX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``CosineXX``)
    #. Non-steady-state volumes (``NonSteadyStateXX``)
    #. Estimated head-motion parameters, in mm and rad
       (``X``, ``Y``, ``Z``, ``RotX``, ``RotY``, ``RotZ``)
    #. ICA-AROMA-identified noise components, if enabled
       (``AROMAAggrCompXX``)

    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_bold_confs_wf
        wf = init_bold_confs_wf(
            mem_gb=1,
            use_aroma=True,
            ignore_aroma_err=True,
            metadata={})

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        use_aroma : bool
            Perform ICA-AROMA on MNI-resampled functional series
        ignore_aroma_err : bool
            Do not fail on ICA-AROMA errors
        metadata : dict
            BIDS metadata for BOLD file

    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        movpar_file
            SPM-formatted motion parameters file
        t1_mask
            Mask of the skull-stripped template image
        t1_tpms
            List of tissue probability maps in T1w space
        t1_bold_xform
            Affine matrix that maps the T1w space into alignment with
            the native BOLD space
        bold_mni
            BOLD image resampled in MNI space (only if ``use_aroma`` enabled)
        bold_mask_mni
            Brain mask corresponding to the BOLD image resampled in MNI space
            (only if ``use_aroma`` enabled)

    **Outputs**

        confounds_file
            TSV of all aggregated confounds
        confounds_list
            List of calculated confounds for reporting
        acompcor_report
            Reportlet visualizing white-matter/CSF mask used for aCompCor
        tcompcor_report
            Reportlet visualizing ROI identified in tCompCor
        ica_aroma_report
            Reportlet visualizing MELODIC ICs, with ICA-AROMA signal/noise labels
        aroma_noise_ics
            CSV of noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        nonaggr_denoised_file
            BOLD series with non-aggressive ICA-AROMA denoising applied

    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`

    """

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bold', 'bold_mask', 'movpar_file', 't1_mask', 't1_tpms',
                't1_bold_xform', 'bold_mni', 'bold_mask_mni']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['confounds_file', 'confounds_list', 'rois_report', 'ica_aroma_report',
                'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file']),
        name='outputnode')

    # Get masks ready in T1w space
    acc_tpm = pe.Node(AddTPMs(indices=[0, 2]), name='tpms_add_csf_wm')  # acc stands for aCompCor
    csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi')
    wm_roi = pe.Node(TPM2ROI(
        erode_prop=0.6, mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='wm_roi')
    acc_roi = pe.Node(TPM2ROI(
        erode_prop=0.6, mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='acc_roi')

    # Map ROIs in T1w space into BOLD space
    csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='csf_tfm', mem_gb=0.1)
    wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                     name='wm_tfm', mem_gb=0.1)
    acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='acc_tfm', mem_gb=0.1)
    tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='tcc_tfm', mem_gb=0.1)

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_all=True, remove_zerovariance=True),
                    name="dvars", mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp", mem_gb=mem_gb)

    # a/t-CompCor
    non_steady_state = pe.Node(nac.NonSteadyStateDetector(), name='non_steady_state')
    tcompcor = pe.Node(nac.TCompCor(
        components_file='tcompcor.tsv', pre_filter='cosine', save_pre_filter=True,
        percentile_threshold=.05), name="tcompcor", mem_gb=mem_gb)

    acompcor = pe.Node(nac.ACompCor(
        components_file='acompcor.tsv', pre_filter='cosine', save_pre_filter=True),
        name="acompcor", mem_gb=mem_gb)

    # Set TR if present
    if 'RepetitionTime' in metadata:
        tcompcor.inputs.repetition_time = metadata['RepetitionTime']
        acompcor.inputs.repetition_time = metadata['RepetitionTime']

    # Global and segment regressors
    mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True)
    signals = pe.Node(SignalExtraction(
        detrend=True, class_labels=["CSF", "WhiteMatter", "GlobalSignal"]),
        name="signals", mem_gb=mem_gb)

    # Arrange confounds
    add_header = pe.Node(AddTSVHeader(columns=["X", "Y", "Z", "RotX", "RotY", "RotZ"]),
                         name="add_header", mem_gb=0.01, run_without_submitting=True)
    concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True)

    # Generate reportlet
    mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(compress_report=True, colors=['r', 'b', 'magenta'],
                        generate_report=True), name='rois_plot')

    def _pick_csf(files):
        return files[0]

    def _pick_wm(files):
        return files[-1]

    workflow = pe.Workflow(name=name)
    workflow.connect([
        # Massage ROIs (in T1w space)
        (inputnode, acc_tpm, [('t1_tpms', 'in_files')]),
        (inputnode, csf_roi, [(('t1_tpms', _pick_csf), 'in_tpm'),
                              ('t1_mask', 'in_mask')]),
        (inputnode, wm_roi, [(('t1_tpms', _pick_wm), 'in_tpm'),
                             ('t1_mask', 'in_mask')]),
        (inputnode, acc_roi, [('t1_mask', 'in_mask')]),
        (acc_tpm, acc_roi, [('out_file', 'in_tpm')]),
        # Map ROIs to BOLD
        (inputnode, csf_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, csf_tfm, [('roi_file', 'input_image')]),
        (inputnode, wm_tfm, [('bold_mask', 'reference_image'),
                             ('t1_bold_xform', 'transforms')]),
        (wm_roi, wm_tfm, [('roi_file', 'input_image')]),
        (inputnode, acc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (acc_roi, acc_tfm, [('roi_file', 'input_image')]),
        (inputnode, tcc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'),
                            ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # Calculate nonsteady state
        (inputnode, non_steady_state, [('bold', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (non_steady_state, tcompcor, [('n_volumes_to_discard', 'ignore_initial_volumes')]),
        (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (non_steady_state, acompcor, [('n_volumes_to_discard', 'ignore_initial_volumes')]),
        (acc_tfm, acc_msk, [('output_image', 'roi_file')]),
        (acc_msk, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (csf_tfm, csf_msk, [('output_image', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (wm_tfm, wm_msk, [('output_image', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_header, [('movpar_file', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (dvars, concat, [('out_all', 'dvars')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_header, concat, [('out_file', 'motion')]),

        # Set outputs
        (concat, outputnode, [('confounds_file', 'confounds_file'),
                              ('confounds_list', 'confounds_list')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, outputnode, [('out_report', 'rois_report')]),
    ])

    if use_aroma:
        # ICA-AROMA
        ica_aroma_wf = init_ica_aroma_wf(name='ica_aroma_wf',
                                         ignore_aroma_err=ignore_aroma_err)
        workflow.connect([
            (inputnode, ica_aroma_wf, [('bold_mni', 'inputnode.bold_mni'),
                                       ('bold_mask_mni', 'inputnode.bold_mask_mni'),
                                       ('movpar_file', 'inputnode.movpar_file')]),
            (ica_aroma_wf, concat,
                [('outputnode.aroma_confounds', 'aroma')]),
            (ica_aroma_wf, outputnode,
                [('outputnode.out_report', 'ica_aroma_report'),
                 ('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                 ('outputnode.melodic_mix', 'melodic_mix'),
                 ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')])
        ])
    return workflow
示例#17
0
def init_enhance_and_skullstrip_bold_wf(
        name='enhance_and_skullstrip_bold_wf',
        pre_mask=False,
        omp_nthreads=1):
    """
    This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)`
    :abbr:`fMRI (functional MRI)` average/summary (e.g. a reference image
    averaging non-steady-state timepoints), and sharpens the histogram
    with the application of the N4 algorithm for removing the
    :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal
    mask.

    Steps of this workflow are:

      1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s
         :abbr:`EPI (echo-planar imaging)` -*boldref* template, which
         is in MNI space.
         The tentative mask is obtained by resampling the MNI template's
         brainmask into *boldref*-space.
      2. Binary dilation of the tentative mask with a sphere of 3mm diameter.
      3. Run ANTs' ``N4BiasFieldCorrection`` on the input
         :abbr:`BOLD (blood-oxygen level-dependant)` average, using the
         mask generated in 1) instead of the internal Otsu thresholding.
      4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology
         dilation of one iteration and a sphere of 6mm as structuring element.
      5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image
         with the latest mask calculated in 3), then use AFNI's ``3dUnifize``
         to *standardize* the T2* contrast distribution.
      6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast
         enhancement of 4).
      7. Calculate a final mask as the intersection of 4) and 6).
      8. Apply final mask on the enhanced reference.

    Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and
    a tentative mask is passed in to the workflow throught the ``pre_mask``
    Nipype input.


    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.util import init_enhance_and_skullstrip_bold_wf
        wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1)

    **Parameters**
        name : str
            Name of workflow (default: ``enhance_and_skullstrip_bold_wf``)
        pre_mask : bool
            Indicates whether the ``pre_mask`` input will be set (and thus, step 1
            should be skipped).
        omp_nthreads : int
            number of threads available to parallel nodes

    **Inputs**

        in_file
            BOLD image (single volume)
        pre_mask : bool
            A tentative brain mask to initialize the workflow (requires ``pre_mask``
            parameter set ``True``).


    **Outputs**

        bias_corrected_file
            the ``in_file`` after `N4BiasFieldCorrection`_
        skull_stripped_file
            the ``bias_corrected_file`` after skull-stripping
        mask_file
            mask of the skull-stripped input file
        out_report
            reportlet for the skull-stripping

    .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053
    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode')

    # Dilate pre_mask
    pre_dilate = pe.Node(fsl.DilateImage(
        operation='max', kernel_shape='sphere', kernel_size=3.0,
        internal_datatype='char'), name='pre_mask_dilate')

    # Ensure mask's header matches reference's
    check_hdr = pe.Node(MatchHeader(), name='check_hdr',
                        run_without_submitting=True)

    # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                         name='n4_correct', n_procs=1)

    # Create a generous BET mask out of the bias-corrected EPI
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    bet_dilate = pe.Node(fsl.DilateImage(
        operation='max', kernel_shape='sphere', kernel_size=6.0,
        internal_datatype='char'), name='skullstrip_first_dilate')
    bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask')

    # Use AFNI's unifize for T2 constrast & fix header
    unifize = pe.Node(afni.Unifize(
        t2=True, outputtype='NIFTI_GZ',
        # Default -clfrac is 0.1, 0.4 was too conservative
        # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation)
        args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
        out_file="uni.nii.gz"), name='unifize')
    fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)

    # Run ANFI's 3dAutomask to extract a refined brain mask
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1)

    # Take intersection of both masks
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')

    # Compute masked brain
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')

    if not pre_mask:
        bold_template = get_template('MNI152NLin2009cAsym', 'res-02_desc-fMRIPrep_boldref.nii.gz')
        brain_mask = get_template('MNI152NLin2009cAsym', 'res-02_desc-brain_mask.nii.gz')

        # Initialize transforms with antsAI
        init_aff = pe.Node(AI(
            fixed_image=str(bold_template),
            fixed_image_mask=str(brain_mask),
            metric=('Mattes', 32, 'Regular', 0.2),
            transform=('Affine', 0.1),
            search_factor=(20, 0.12),
            principal_axes=False,
            convergence=(10, 1e-6, 10),
            verbose=True),
            name='init_aff',
            n_procs=omp_nthreads)

        # Registration().version may be None
        if parseversion(Registration().version or '0.0.0') > Version('2.2.0'):
            init_aff.inputs.search_grid = (40, (0, 40, 40))

        # Set up spatial normalization
        norm = pe.Node(Registration(
            from_file=pkgr_fn(
                'fmriprep.data',
                'epi_atlasbased_brainmask.json')),
            name='norm',
            n_procs=omp_nthreads)
        norm.inputs.fixed_image = str(bold_template)
        map_brainmask = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True, input_image=str(brain_mask)),
            name='map_brainmask'
        )
        workflow.connect([
            (inputnode, init_aff, [('in_file', 'moving_image')]),
            (inputnode, map_brainmask, [('in_file', 'reference_image')]),
            (inputnode, norm, [('in_file', 'moving_image')]),
            (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
            (norm, map_brainmask, [
                ('reverse_invert_flags', 'invert_transform_flags'),
                ('reverse_transforms', 'transforms')]),
            (map_brainmask, pre_dilate, [('output_image', 'in_file')]),
        ])
    else:
        workflow.connect([
            (inputnode, pre_dilate, [('pre_mask', 'in_file')]),
        ])

    workflow.connect([
        (inputnode, check_hdr, [('in_file', 'reference')]),
        (pre_dilate, check_hdr, [('out_file', 'in_file')]),
        (check_hdr, n4_correct, [('out_file', 'mask_image')]),
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
        (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
        (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
        (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]),
        (bet_dilate, bet_mask, [('out_file', 'mask_file')]),
        (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]),
        (bet_mask, unifize, [('out_file', 'in_file')]),
        (unifize, fixhdr_unifize, [('out_file', 'in_file')]),
        (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')]),
        (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
        (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
    ])

    return workflow
示例#18
0
def init_func_preproc_wf(
    aroma_melodic_dim,
    bold2t1w_dof,
    bold_file,
    cifti_output,
    debug,
    dummy_scans,
    err_on_aroma_warn,
    fmap_bspline,
    fmap_demean,
    force_syn,
    freesurfer,
    ignore,
    low_mem,
    medial_surface_nan,
    omp_nthreads,
    output_dir,
    output_spaces,
    regressors_all_comps,
    regressors_dvars_th,
    regressors_fd_th,
    reportlets_dir,
    t2s_coreg,
    use_aroma,
    use_bbr,
    use_syn,
    layout=None,
    num_bold=1,
):
    """
    This workflow controls the functional preprocessing stages of FMRIPREP.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold import init_func_preproc_wf
        from collections import namedtuple, OrderedDict
        BIDSLayout = namedtuple('BIDSLayout', ['root'])
        wf = init_func_preproc_wf(
            aroma_melodic_dim=-200,
            bold2t1w_dof=9,
            bold_file='/completely/made/up/path/sub-01_task-nback_bold.nii.gz',
            cifti_output=False,
            debug=False,
            dummy_scans=None,
            err_on_aroma_warn=False,
            fmap_bspline=True,
            fmap_demean=True,
            force_syn=True,
            freesurfer=True,
            ignore=[],
            low_mem=False,
            medial_surface_nan=False,
            omp_nthreads=1,
            output_dir='.',
            output_spaces=OrderedDict([
                ('MNI152Lin', {}), ('fsaverage', {'density': '10k'}),
                ('T1w', {}), ('fsnative', {})]),
            regressors_all_comps=False,
            regressors_dvars_th=1.5,
            regressors_fd_th=0.5,
            reportlets_dir='.',
            t2s_coreg=False,
            use_aroma=False,
            use_bbr=True,
            use_syn=True,
            layout=BIDSLayout('.'),
            num_bold=1,
        )

    **Parameters**

        aroma_melodic_dim : int
            Maximum number of components identified by MELODIC within ICA-AROMA
            (default is -200, ie. no limitation).
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        bold_file : str
            BOLD series NIfTI file
        cifti_output : bool
            Generate bold CIFTI file in output spaces
        debug : bool
            Enable debugging outputs
        dummy_scans : int or None
            Number of volumes to consider as non steady state
        err_on_aroma_warn : bool
            Do not crash on ICA-AROMA errors
        fmap_bspline : bool
            **Experimental**: Fit B-Spline field using least-squares
        fmap_demean : bool
            Demean voxel-shift map during unwarp
        force_syn : bool
            **Temporary**: Always run SyN-based SDC
        freesurfer : bool
            Enable FreeSurfer functional registration (bbregister) and resampling
            BOLD series to FreeSurfer surface meshes.
        ignore : list
            Preprocessing steps to skip (may include "slicetiming", "fieldmaps")
        low_mem : bool
            Write uncompressed .nii files in some cases to reduce memory usage
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files
        omp_nthreads : int
            Maximum number of threads an individual process may use
        output_dir : str
            Directory in which to save derivatives
        output_spaces : OrderedDict
            Ordered dictionary where keys are TemplateFlow ID strings (e.g. ``MNI152Lin``,
            ``MNI152NLin6Asym``, ``MNI152NLin2009cAsym``, or ``fsLR``) strings designating
            nonstandard references (e.g. ``T1w`` or ``anat``, ``sbref``, ``run``, etc.),
            or paths pointing to custom templates organized in a TemplateFlow-like structure.
            Values of the dictionary aggregate modifiers (e.g. the value for the key ``MNI152Lin``
            could be ``{'resolution': 2}`` if one wants the resampling to be done on the 2mm
            resolution version of the selected template).
        regressors_all_comps
            Return all CompCor component time series instead of the top fraction
        regressors_dvars_th
            Criterion for flagging DVARS outliers
        regressors_fd_th
            Criterion for flagging framewise displacement outliers
        reportlets_dir : str
            Absolute path of a directory in which reportlets will be temporarily stored
        t2s_coreg : bool
            For multiecho EPI, use the calculated T2*-map for T2*-driven coregistration
        use_aroma : bool
            Perform ICA-AROMA on MNI-resampled functional series
        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
            When using ``t2s_coreg``, BBR will be enabled by default unless
            explicitly specified otherwise.
        use_syn : bool
            **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC).
            If fieldmaps are present and enabled, this is not run, by default.
        layout : BIDSLayout
            BIDSLayout structure to enable metadata retrieval
        num_bold : int
            Total number of BOLD files that have been set for preprocessing
            (default is 1)

    **Inputs**

        bold_file
            BOLD series NIfTI file
        t1_preproc
            Bias-corrected structural template image
        t1_brain
            Skull-stripped ``t1_preproc``
        t1_mask
            Mask of the skull-stripped template image
        t1_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        t1_tpms
            List of tissue probability maps in T1w space
        anat2std_xfm
            ANTs-compatible affine-and-warp transform file
        std2anat_xfm
            ANTs-compatible affine-and-warp transform file (inverse)
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
        t1_2_fsnative_reverse_transform
            LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w


    **Outputs**

        bold_t1
            BOLD series, resampled to T1w space
        bold_mask_t1
            BOLD series mask in T1w space
        bold_std
            BOLD series, resampled to template space
        bold_mask_std
            BOLD series mask in template space
        confounds
            TSV of confounds
        surfaces
            BOLD series, resampled to FreeSurfer surfaces
        aroma_noise_ics
            Noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        bold_cifti
            BOLD CIFTI image
        cifti_variant
            combination of target spaces for `bold_cifti`


    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.util.init_bold_reference_wf`
        * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf`
        * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf`
        * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf`
        * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.pepolar.init_pepolar_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_fmap_estimator_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_sdc_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_nonlinear_sdc_wf`

    """
    from .resampling import NONSTANDARD_REFERENCES
    from ..fieldmap.base import init_sdc_wf  # Avoid circular dependency (#1066)

    # Filter out standard spaces to a separate dict
    std_spaces = OrderedDict([(key, modifiers)
                              for key, modifiers in output_spaces.items()
                              if key not in NONSTANDARD_REFERENCES])
    volume_std_spaces = OrderedDict([(key, modifiers)
                                     for key, modifiers in std_spaces.items()
                                     if not key.startswith('fs')])

    ref_file = bold_file
    mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1}
    bold_tlen = 10
    multiecho = isinstance(bold_file, list)

    if multiecho:
        tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file]
        ref_file = dict(zip(tes, bold_file))[min(tes)]

    if os.path.isfile(ref_file):
        bold_tlen, mem_gb = _create_mem_gb(ref_file)

    wf_name = _get_wf_name(ref_file)
    LOGGER.log(
        25, ('Creating bold processing workflow for "%s" (%.2f GB / %d TRs). '
             'Memory resampled/largemem=%.2f/%.2f GB.'), ref_file,
        mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem'])

    sbref_file = None
    # For doc building purposes
    if not hasattr(layout, 'parse_file_entities'):
        LOGGER.log(25, 'No valid layout: building empty workflow.')
        metadata = {
            'RepetitionTime': 2.0,
            'SliceTiming': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
            'PhaseEncodingDirection': 'j',
        }
        fmaps = [{
            'suffix':
            'phasediff',
            'phasediff':
            'sub-03/ses-2/fmap/sub-03_ses-2_run-1_phasediff.nii.gz',
            'magnitude1':
            'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude1.nii.gz',
            'magnitude2':
            'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude2.nii.gz',
        }]
        run_stc = True
        multiecho = False
    else:
        # Find associated sbref, if possible
        entities = layout.parse_file_entities(ref_file)
        entities['suffix'] = 'sbref'
        entities['extension'] = ['nii', 'nii.gz']  # Overwrite extensions
        files = layout.get(return_type='file', **entities)
        refbase = os.path.basename(ref_file)
        if 'sbref' in ignore:
            LOGGER.info("Single-band reference files ignored.")
        elif files and multiecho:
            LOGGER.warning("Single-band reference found, but not supported in "
                           "multi-echo workflows at this time. Ignoring.")
        elif files:
            sbref_file = files[0]
            sbbase = os.path.basename(sbref_file)
            if len(files) > 1:
                LOGGER.warning(
                    "Multiple single-band reference files found for {}; using "
                    "{}".format(refbase, sbbase))
            else:
                LOGGER.log(
                    25, "Using single-band reference file {}".format(sbbase))
        else:
            LOGGER.log(25,
                       "No single-band-reference found for {}".format(refbase))

        metadata = layout.get_metadata(ref_file)

        # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn)
        fmaps = []
        if 'fieldmaps' not in ignore:
            for fmap in layout.get_fieldmap(ref_file, return_list=True):
                if fmap['suffix'] == 'phase':
                    LOGGER.warning("""\
Found phase1/2 type of fieldmaps, which are not currently supported. \
fMRIPrep will discard them for susceptibility distortion correction. \
Please, follow up on this issue at \
https://github.com/poldracklab/fmriprep/issues/1655.""")
                else:
                    fmap['metadata'] = layout.get_metadata(
                        fmap[fmap['suffix']])
                    fmaps.append(fmap)

        # Run SyN if forced or in the absence of fieldmap correction
        if force_syn or (use_syn and not fmaps):
            fmaps.append({'suffix': 'syn'})

        # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort'
        run_stc = ("SliceTiming" in metadata and 'slicetiming' not in ignore
                   and (_get_series_len(ref_file) > 4 or "TooShort"))

    # Check if MEEPI for T2* coregistration target
    if t2s_coreg and not multiecho:
        LOGGER.warning(
            "No multiecho BOLD images found for T2* coregistration. "
            "Using standard EPI-T1 coregistration.")
        t2s_coreg = False

    # By default, force-bbr for t2s_coreg unless user specifies otherwise
    if t2s_coreg and use_bbr is None:
        use_bbr = True

    # Build workflow
    workflow = Workflow(name=wf_name)
    workflow.__desc__ = """

Functional data preprocessing

: For each of the {num_bold} BOLD runs found per subject (across all
tasks and sessions), the following preprocessing was performed.
""".format(num_bold=num_bold)

    workflow.__postdesc__ = """\
All resamplings can be performed with *a single interpolation
step* by composing all the pertinent transformations (i.e. head-motion
transform matrices, susceptibility distortion correction when available,
and co-registrations to anatomical and output spaces).
Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs),
configured with Lanczos interpolation to minimize the smoothing
effects of other kernels [@lanczos].
Non-gridded (surface) resamplings were performed using `mri_vol2surf`
(FreeSurfer).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_file', 'subjects_dir', 'subject_id', 't1_preproc', 't1_brain',
        't1_mask', 't1_seg', 't1_tpms', 't1_aseg', 't1_aparc', 'anat2std_xfm',
        'std2anat_xfm', 'template', 'joint_anat2std_xfm', 'joint_std2anat_xfm',
        'joint_template', 't1_2_fsnative_forward_transform',
        't1_2_fsnative_reverse_transform'
    ]),
                        name='inputnode')
    inputnode.inputs.bold_file = bold_file
    if sbref_file is not None:
        from niworkflows.interfaces.images import ValidateImage
        val_sbref = pe.Node(ValidateImage(in_file=sbref_file),
                            name='val_sbref')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1',
        'bold_aparc_t1', 'bold_std', 'bold_std_ref'
        'bold_mask_std', 'bold_aseg_std', 'bold_aparc_std', 'bold_native',
        'bold_cifti', 'cifti_variant', 'cifti_variant_key', 'surfaces',
        'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file',
        'confounds_metadata'
    ]),
                         name='outputnode')

    # BOLD buffer: an identity used as a pointer to either the original BOLD
    # or the STC'ed one for further use.
    boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']),
                         name='boldbuffer')

    summary = pe.Node(FunctionalSummary(
        slice_timing=run_stc,
        registration=('FSL', 'FreeSurfer')[freesurfer],
        registration_dof=bold2t1w_dof,
        pe_direction=metadata.get("PhaseEncodingDirection"),
        tr=metadata.get("RepetitionTime")),
                      name='summary',
                      mem_gb=DEFAULT_MEMORY_MIN_GB,
                      run_without_submitting=True)
    summary.inputs.dummy_scans = dummy_scans

    # CIfTI output: currently, we only support fsaverage{5,6}
    cifti_spaces = set(s for s in output_spaces.keys()
                       if s in ('fsaverage5', 'fsaverage6'))
    fsaverage_den = output_spaces.get('fsaverage', {}).get('den')
    if fsaverage_den:
        cifti_spaces.add(FSAVERAGE_DENSITY[fsaverage_den])
    cifti_output = cifti_output and cifti_spaces
    func_derivatives_wf = init_func_derivatives_wf(
        bids_root=layout.root,
        cifti_output=cifti_output,
        freesurfer=freesurfer,
        metadata=metadata,
        output_dir=output_dir,
        output_spaces=output_spaces,
        standard_spaces=list(std_spaces.keys()),
        use_aroma=use_aroma,
    )

    workflow.connect([
        (outputnode, func_derivatives_wf, [
            ('bold_t1', 'inputnode.bold_t1'),
            ('bold_t1_ref', 'inputnode.bold_t1_ref'),
            ('bold_aseg_t1', 'inputnode.bold_aseg_t1'),
            ('bold_aparc_t1', 'inputnode.bold_aparc_t1'),
            ('bold_mask_t1', 'inputnode.bold_mask_t1'),
            ('bold_native', 'inputnode.bold_native'),
            ('confounds', 'inputnode.confounds'),
            ('surfaces', 'inputnode.surfaces'),
            ('aroma_noise_ics', 'inputnode.aroma_noise_ics'),
            ('melodic_mix', 'inputnode.melodic_mix'),
            ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'),
            ('bold_cifti', 'inputnode.bold_cifti'),
            ('cifti_variant', 'inputnode.cifti_variant'),
            ('cifti_variant_key', 'inputnode.cifti_variant_key'),
            ('confounds_metadata', 'inputnode.confounds_metadata'),
        ]),
    ])

    # Generate a tentative boldref
    bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads)
    bold_reference_wf.inputs.inputnode.dummy_scans = dummy_scans
    if sbref_file is not None:
        workflow.connect([
            (val_sbref, bold_reference_wf, [('out_file',
                                             'inputnode.sbref_file')]),
        ])

    # Top-level BOLD splitter
    bold_split = pe.Node(FSLSplit(dimension='t'),
                         name='bold_split',
                         mem_gb=mem_gb['filesize'] * 3)

    # HMC on the BOLD
    bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf',
                                   mem_gb=mem_gb['filesize'],
                                   omp_nthreads=omp_nthreads)

    # calculate BOLD registration to T1w
    bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf',
                                   freesurfer=freesurfer,
                                   use_bbr=use_bbr,
                                   bold2t1w_dof=bold2t1w_dof,
                                   mem_gb=mem_gb['resampled'],
                                   omp_nthreads=omp_nthreads,
                                   use_compression=False)

    # apply BOLD registration to T1w
    bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf',
                                             freesurfer=freesurfer,
                                             use_fieldwarp=(fmaps is not None
                                                            or use_syn),
                                             multiecho=multiecho,
                                             mem_gb=mem_gb['resampled'],
                                             omp_nthreads=omp_nthreads,
                                             use_compression=False)

    # get confounds
    bold_confounds_wf = init_bold_confs_wf(
        mem_gb=mem_gb['largemem'],
        metadata=metadata,
        regressors_all_comps=regressors_all_comps,
        regressors_fd_th=regressors_fd_th,
        regressors_dvars_th=regressors_dvars_th,
        name='bold_confounds_wf')
    bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False]

    # Apply transforms in 1 shot
    # Only use uncompressed output if AROMA is to be run
    bold_bold_trans_wf = init_bold_preproc_trans_wf(
        mem_gb=mem_gb['resampled'],
        omp_nthreads=omp_nthreads,
        use_compression=not low_mem,
        use_fieldwarp=(fmaps is not None or use_syn),
        name='bold_bold_trans_wf')
    bold_bold_trans_wf.inputs.inputnode.name_source = ref_file

    # SLICE-TIME CORRECTION (or bypass) #############################################
    if run_stc is True:  # bool('TooShort') == True, so check True explicitly
        bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata)
        workflow.connect([
            (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols',
                                               'inputnode.skip_vols')]),
            (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]),
        ])
        if not multiecho:
            workflow.connect([(bold_reference_wf, bold_stc_wf, [
                ('outputnode.bold_file', 'inputnode.bold_file')
            ])])
        else:  # for meepi, iterate through stc_wf for all workflows
            meepi_echos = boldbuffer.clone(name='meepi_echos')
            meepi_echos.iterables = ('bold_file', bold_file)
            workflow.connect([(meepi_echos, bold_stc_wf,
                               [('bold_file', 'inputnode.bold_file')])])
    elif not multiecho:  # STC is too short or False
        # bypass STC from original BOLD to the splitter through boldbuffer
        workflow.connect([(bold_reference_wf, boldbuffer,
                           [('outputnode.bold_file', 'bold_file')])])
    else:
        # for meepi, iterate over all meepi echos to boldbuffer
        boldbuffer.iterables = ('bold_file', bold_file)

    # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ##########################
    bold_sdc_wf = init_sdc_wf(fmaps,
                              metadata,
                              omp_nthreads=omp_nthreads,
                              debug=debug,
                              fmap_demean=fmap_demean,
                              fmap_bspline=fmap_bspline)
    # If no standard space is given, use the default for SyN-SDC
    if not volume_std_spaces or 'MNI152NLin2009cAsym' in volume_std_spaces:
        bold_sdc_wf.inputs.inputnode.template = 'MNI152NLin2009cAsym'
    else:
        bold_sdc_wf.inputs.inputnode.template = next(iter(volume_std_spaces))

    if not fmaps:
        LOGGER.warning('SDC: no fieldmaps found or they were ignored (%s).',
                       ref_file)
    elif fmaps[0]['suffix'] == 'syn':
        LOGGER.warning(
            'SDC: no fieldmaps found or they were ignored. '
            'Using EXPERIMENTAL "fieldmap-less SyN" correction '
            'for dataset %s.', ref_file)
    else:
        LOGGER.log(
            25, 'SDC: fieldmap estimation of type "%s" intended for %s found.',
            fmaps[0]['suffix'], ref_file)

    # Overwrite ``out_path_base`` of sdcflows' DataSinks
    for node in bold_sdc_wf.list_node_names():
        if node.split('.')[-1].startswith('ds_'):
            bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep'

    # MULTI-ECHO EPI DATA #############################################
    if multiecho:
        from .util import init_skullstrip_bold_wf
        skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf')

        inputnode.inputs.bold_file = ref_file  # Replace reference w first echo

        join_echos = pe.JoinNode(
            niu.IdentityInterface(fields=['bold_files']),
            joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'),
            joinfield=['bold_files'],
            name='join_echos')

        # create optimal combination, adaptive T2* map
        bold_t2s_wf = init_bold_t2s_wf(echo_times=tes,
                                       mem_gb=mem_gb['resampled'],
                                       omp_nthreads=omp_nthreads,
                                       t2s_coreg=t2s_coreg,
                                       name='bold_t2smap_wf')

        workflow.connect([
            (skullstrip_bold_wf, join_echos,
             [('outputnode.skull_stripped_file', 'bold_files')]),
            (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]),
        ])

    # MAIN WORKFLOW STRUCTURE #######################################################
    workflow.connect([
        # Generate early reference
        (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]),
        # BOLD buffer has slice-time corrected if it was run, original otherwise
        (boldbuffer, bold_split, [('bold_file', 'in_file')]),
        # HMC
        (bold_reference_wf, bold_hmc_wf,
         [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'),
          ('outputnode.bold_file', 'inputnode.bold_file')]),
        (bold_reference_wf, summary, [('outputnode.algo_dummy_scans',
                                       'algo_dummy_scans')]),
        # EPI-T1 registration workflow
        (
            inputnode,
            bold_reg_wf,
            [
                ('t1_brain', 'inputnode.t1_brain'),
                ('t1_seg', 'inputnode.t1_seg'),
                # Undefined if --no-freesurfer, but this is safe
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id'),
                ('t1_2_fsnative_reverse_transform',
                 'inputnode.t1_2_fsnative_reverse_transform')
            ]),
        (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'),
                                       ('t1_brain', 'inputnode.t1_brain'),
                                       ('t1_mask', 'inputnode.t1_mask'),
                                       ('t1_aseg', 'inputnode.t1_aseg'),
                                       ('t1_aparc', 'inputnode.t1_aparc')]),
        # unused if multiecho, but this is safe
        (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms',
                                          'inputnode.hmc_xforms')]),
        (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1',
                                          'inputnode.itk_bold_to_t1')]),
        (bold_t1_trans_wf, outputnode,
         [('outputnode.bold_t1', 'bold_t1'),
          ('outputnode.bold_t1_ref', 'bold_t1_ref'),
          ('outputnode.bold_aseg_t1', 'bold_aseg_t1'),
          ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]),
        (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]),
        # SDC (or pass-through workflow)
        (inputnode, bold_sdc_wf, [('joint_template', 'inputnode.templates'),
                                  ('joint_std2anat_xfm',
                                   'inputnode.std2anat_xfm')]),
        (inputnode, bold_sdc_wf, [('t1_brain', 'inputnode.t1_brain')]),
        (bold_reference_wf, bold_sdc_wf,
         [('outputnode.ref_image', 'inputnode.bold_ref'),
          ('outputnode.ref_image_brain', 'inputnode.bold_ref_brain'),
          ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        # For t2s_coreg, replace EPI-to-T1w registration inputs
        (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_reg_wf,
         [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain')]),
        (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_t1_trans_wf,
         [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
          ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]),
        (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp',
                                          'inputnode.fieldwarp')]),
        (bold_sdc_wf, bold_bold_trans_wf,
         [('outputnode.out_warp', 'inputnode.fieldwarp'),
          ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction')
                                ]),
        # Connect bold_confounds_wf
        (inputnode, bold_confounds_wf, [('t1_tpms', 'inputnode.t1_tpms'),
                                        ('t1_mask', 'inputnode.t1_mask')]),
        (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file',
                                           'inputnode.movpar_file')]),
        (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold',
                                           'inputnode.t1_bold_xform')]),
        (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols',
                                                 'inputnode.skip_vols')]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_file', 'confounds'),
        ]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_metadata', 'confounds_metadata'),
        ]),
        # Connect bold_bold_trans_wf
        (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')]
         ),
        (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms',
                                            'inputnode.hmc_xforms')]),
        # Summary
        (outputnode, summary, [('confounds', 'confounds_file')]),
    ])

    # for standard EPI data, pass along correct file
    if not multiecho:
        workflow.connect([
            (inputnode, func_derivatives_wf, [('bold_file',
                                               'inputnode.source_file')]),
            (bold_bold_trans_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_split, bold_t1_trans_wf, [('out_files',
                                             'inputnode.bold_split')]),
        ])
    else:  # for meepi, create and use optimal combination
        workflow.connect([
            # update name source for optimal combination
            (inputnode, func_derivatives_wf,
             [(('bold_file', combine_meepi_source), 'inputnode.source_file')]),
            (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold',
                                                       'inputnode.in_file')]),
            (bold_t2s_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold',
                                              'inputnode.bold_split')]),
        ])

    if fmaps:
        from ..fieldmap.unwarp import init_fmap_unwarp_report_wf
        # Report on BOLD correction
        fmap_unwarp_report_wf = init_fmap_unwarp_report_wf()
        workflow.connect([
            (inputnode, fmap_unwarp_report_wf, [('t1_seg', 'inputnode.in_seg')
                                                ]),
            (bold_reference_wf, fmap_unwarp_report_wf,
             [('outputnode.ref_image', 'inputnode.in_pre')]),
            (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold',
                                                   'inputnode.in_xfm')]),
            (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.bold_ref',
                                                   'inputnode.in_post')]),
        ])

        # Overwrite ``out_path_base`` of unwarping DataSinks
        for node in fmap_unwarp_report_wf.list_node_names():
            if node.split('.')[-1].startswith('ds_'):
                fmap_unwarp_report_wf.get_node(
                    node).interface.out_path_base = 'fmriprep'

        if force_syn and fmaps[0]['suffix'] != 'syn':
            syn_unwarp_report_wf = init_fmap_unwarp_report_wf(
                name='syn_unwarp_report_wf', forcedsyn=True)
            workflow.connect([
                (inputnode, syn_unwarp_report_wf, [('t1_seg',
                                                    'inputnode.in_seg')]),
                (bold_reference_wf, syn_unwarp_report_wf,
                 [('outputnode.ref_image', 'inputnode.in_pre')]),
                (bold_reg_wf, syn_unwarp_report_wf,
                 [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
                (bold_sdc_wf, syn_unwarp_report_wf,
                 [('outputnode.syn_bold_ref', 'inputnode.in_post')]),
            ])

            # Overwrite ``out_path_base`` of unwarping DataSinks
            for node in syn_unwarp_report_wf.list_node_names():
                if node.split('.')[-1].startswith('ds_'):
                    syn_unwarp_report_wf.get_node(
                        node).interface.out_path_base = 'fmriprep'

    # Map final BOLD mask into T1w space (if required)
    if 'T1w' in output_spaces or 'anat' in output_spaces:
        from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as
                                                  ApplyTransforms)

        boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                  float=True),
                                  name='boldmask_to_t1w',
                                  mem_gb=0.1)
        workflow.connect([
            (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1',
                                             'transforms')]),
            (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1',
                                                  'reference_image')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]),
            (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]),
        ])

    if set(['func', 'run', 'bold', 'boldref',
            'sbref']).intersection(output_spaces):
        workflow.connect([
            (bold_bold_trans_wf, outputnode, [('outputnode.bold',
                                               'bold_native')]),
            (bold_bold_trans_wf, func_derivatives_wf,
             [('outputnode.bold_ref', 'inputnode.bold_native_ref'),
              ('outputnode.bold_mask', 'inputnode.bold_mask_native')]),
        ])

    if volume_std_spaces:
        # Apply transforms in 1 shot
        # Only use uncompressed output if AROMA is to be run
        bold_std_trans_wf = init_bold_std_trans_wf(
            freesurfer=freesurfer,
            mem_gb=mem_gb['resampled'],
            omp_nthreads=omp_nthreads,
            standard_spaces=volume_std_spaces,
            name='bold_std_trans_wf',
            use_compression=not low_mem,
            use_fieldwarp=fmaps is not None,
        )
        workflow.connect([
            (inputnode, bold_std_trans_wf,
             [('joint_template', 'inputnode.templates'),
              ('joint_anat2std_xfm', 'inputnode.anat2std_xfm'),
              ('bold_file', 'inputnode.name_source'),
              ('t1_aseg', 'inputnode.bold_aseg'),
              ('t1_aparc', 'inputnode.bold_aparc')]),
            (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms',
                                               'inputnode.hmc_xforms')]),
            (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1',
                                               'inputnode.itk_bold_to_t1')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             bold_std_trans_wf, [('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
            (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp',
                                               'inputnode.fieldwarp')]),
            (bold_std_trans_wf, outputnode,
             [('outputnode.bold_std', 'bold_std'),
              ('outputnode.bold_std_ref', 'bold_std_ref'),
              ('outputnode.bold_mask_std', 'bold_mask_std')]),
        ])

        if freesurfer:
            workflow.connect([
                (bold_std_trans_wf, func_derivatives_wf, [
                    ('poutputnode.bold_aseg_std', 'inputnode.bold_aseg_std'),
                    ('poutputnode.bold_aparc_std', 'inputnode.bold_aparc_std'),
                ]),
                (bold_std_trans_wf, outputnode,
                 [('outputnode.bold_aseg_std', 'bold_aseg_std'),
                  ('outputnode.bold_aparc_std', 'bold_aparc_std')]),
            ])

        if 'MNI152NLin2009cAsym' in std_spaces:
            carpetplot_wf = init_carpetplot_wf(standard_spaces=std_spaces,
                                               mem_gb=mem_gb['resampled'],
                                               metadata=metadata,
                                               name='carpetplot_wf')
            workflow.connect([
                (inputnode, carpetplot_wf, [('joint_std2anat_xfm',
                                             'inputnode.std2anat_xfm')]),
                (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
                 carpetplot_wf, [('outputnode.bold', 'inputnode.bold'),
                                 ('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
                (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold',
                                               'inputnode.t1_bold_xform')]),
                (bold_confounds_wf, carpetplot_wf,
                 [('outputnode.confounds_file', 'inputnode.confounds_file')]),
            ])

        if not multiecho:
            workflow.connect([(bold_split, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])
        else:
            split_opt_comb = bold_split.clone(name='split_opt_comb')
            workflow.connect([(bold_t2s_wf, split_opt_comb,
                               [('outputnode.bold', 'in_file')]),
                              (split_opt_comb, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])

        # Artifacts resampled in MNI space can only be sinked if they
        # were actually generated. See #1348.
        # Uses the parameterized outputnode to generate all outputs
        workflow.connect([
            (bold_std_trans_wf, func_derivatives_wf, [
                ('poutputnode.templates', 'inputnode.template'),
                ('poutputnode.bold_std_ref', 'inputnode.bold_std_ref'),
                ('poutputnode.bold_std', 'inputnode.bold_std'),
                ('poutputnode.bold_mask_std', 'inputnode.bold_mask_std'),
            ]),
        ])

        if use_aroma and 'MNI152NLin6Asym' in std_spaces:  # ICA-AROMA workflow
            from .confounds import init_ica_aroma_wf

            ica_aroma_wf = init_ica_aroma_wf(
                metadata=metadata,
                mem_gb=mem_gb['resampled'],
                omp_nthreads=omp_nthreads,
                use_fieldwarp=fmaps is not None,
                err_on_aroma_warn=err_on_aroma_warn,
                aroma_melodic_dim=aroma_melodic_dim,
                name='ica_aroma_wf')

            join = pe.Node(niu.Function(output_names=["out_file"],
                                        function=_to_join),
                           name='aroma_confounds')

            mrg_conf_metadata = pe.Node(niu.Merge(2),
                                        name='merge_confound_metadata',
                                        run_without_submitting=True)
            mrg_conf_metadata2 = pe.Node(DictMerge(),
                                         name='merge_confound_metadata2',
                                         run_without_submitting=True)
            workflow.disconnect([
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_file', 'confounds'),
                ]),
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_metadata', 'confounds_metadata'),
                ]),
            ])
            workflow.connect([
                (bold_std_trans_wf, ica_aroma_wf,
                 [('outputnode.bold_std', 'inputnode.bold_std'),
                  ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'),
                  ('outputnode.templates', 'inputnode.templates')]),
                (inputnode, ica_aroma_wf, [('bold_file',
                                            'inputnode.name_source')]),
                (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file',
                                              'inputnode.movpar_file')]),
                (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols',
                                                    'inputnode.skip_vols')]),
                (bold_confounds_wf, join, [('outputnode.confounds_file',
                                            'in_file')]),
                (bold_confounds_wf, mrg_conf_metadata,
                 [('outputnode.confounds_metadata', 'in1')]),
                (ica_aroma_wf, join, [('outputnode.aroma_confounds',
                                       'join_file')]),
                (ica_aroma_wf, mrg_conf_metadata,
                 [('outputnode.aroma_metadata', 'in2')]),
                (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),
                (ica_aroma_wf, outputnode,
                 [('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                  ('outputnode.melodic_mix', 'melodic_mix'),
                  ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')
                  ]),
                (join, outputnode, [('out_file', 'confounds')]),
                (mrg_conf_metadata2, outputnode, [('out_dict',
                                                   'confounds_metadata')]),
            ])

    # SURFACES ##################################################################################
    surface_spaces = [
        space for space in output_spaces.keys() if space.startswith('fs')
    ]
    if freesurfer and surface_spaces:
        LOGGER.log(25, 'Creating BOLD surface-sampling workflow.')
        bold_surf_wf = init_bold_surf_wf(mem_gb=mem_gb['resampled'],
                                         output_spaces=surface_spaces,
                                         medial_surface_nan=medial_surface_nan,
                                         name='bold_surf_wf')
        workflow.connect([
            (inputnode, bold_surf_wf,
             [('t1_preproc', 'inputnode.t1_preproc'),
              ('subjects_dir', 'inputnode.subjects_dir'),
              ('subject_id', 'inputnode.subject_id'),
              ('t1_2_fsnative_forward_transform',
               'inputnode.t1_2_fsnative_forward_transform')]),
            (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1',
                                               'inputnode.source_file')]),
            (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        ])

        if cifti_output:
            from niworkflows.interfaces.utility import KeySelect
            bold_surf_wf.__desc__ += """\
*Grayordinates* files [@hcppipelines], which combine surface-sampled
data and volume-sampled data, were also generated.
"""
            select_std = pe.Node(KeySelect(fields=['bold_std']),
                                 name='select_std',
                                 run_without_submitting=True)
            select_std.inputs.key = 'MNI152NLin2009cAsym'

            gen_cifti = pe.MapNode(GenerateCifti(),
                                   iterfield=["surface_target", "gifti_files"],
                                   name="gen_cifti")
            gen_cifti.inputs.TR = metadata.get("RepetitionTime")
            gen_cifti.inputs.surface_target = list(cifti_spaces)

            workflow.connect([
                (bold_std_trans_wf, select_std,
                 [('outputnode.templates', 'keys'),
                  ('outputnode.bold_std', 'bold_std')]),
                (bold_surf_wf, gen_cifti, [('outputnode.surfaces',
                                            'gifti_files')]),
                (inputnode, gen_cifti, [('subjects_dir', 'subjects_dir')]),
                (select_std, gen_cifti, [('bold_std', 'bold_file')]),
                (gen_cifti, outputnode, [('out_file', 'bold_cifti'),
                                         ('variant', 'cifti_variant'),
                                         ('variant_key', 'cifti_variant_key')
                                         ]),
            ])

    # REPORTING ############################################################
    ds_report_summary = pe.Node(DerivativesDataSink(desc='summary',
                                                    keep_dtype=True),
                                name='ds_report_summary',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

    ds_report_validation = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, desc='validation', keep_dtype=True),
                                   name='ds_report_validation',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (summary, ds_report_summary, [('out_report', 'in_file')]),
        (bold_reference_wf, ds_report_validation,
         [('outputnode.validation_report', 'in_file')]),
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = ref_file

    return workflow
示例#19
0
def init_rodent_brain_extraction_wf(
    ants_affine_init=False,
    factor=20,
    arc=0.12,
    step=4,
    grid=(0, 4, 4),
    debug=False,
    interim_checkpoints=True,
    mem_gb=3.0,
    mri_scheme="T2w",
    name="rodent_brain_extraction_wf",
    omp_nthreads=None,
    output_dir=None,
    template_id="Fischer344",
    template_specs=None,
    use_float=True,
):
    """
    Build an atlas-based brain extraction pipeline for rodent T1w and T2w MRI data.

    Parameters
    ----------
    ants_affine_init : :obj:`bool`, optional
        Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images.

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_files", "in_mask"]),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["out_corrected", "out_brain", "out_mask"]),
        name="outputnode",
    )

    template_specs = template_specs or {}
    if template_id == "WHS" and "resolution" not in template_specs:
        template_specs["resolution"] = 2

    # Find a suitable target template in TemplateFlow
    tpl_target_path = get_template(
        template_id,
        suffix=mri_scheme,
        **template_specs,
    )
    if not tpl_target_path:
        raise RuntimeError(
            f"An instance of template <tpl-{template_id}> with MR scheme '{mri_scheme}'"
            " could not be found.")

    tpl_brainmask_path = get_template(
        template_id,
        atlas=None,
        hemi=None,
        desc="brain",
        suffix="probseg",
        **template_specs,
    ) or get_template(
        template_id,
        atlas=None,
        hemi=None,
        desc="brain",
        suffix="mask",
        **template_specs,
    )

    tpl_regmask_path = get_template(
        template_id,
        atlas=None,
        desc="BrainCerebellumExtraction",
        suffix="mask",
        **template_specs,
    )

    denoise = pe.Node(DenoiseImage(dimension=3, copy_header=True),
                      name="denoise",
                      n_procs=omp_nthreads)

    # Resample template to a controlled, isotropic resolution
    res_tmpl = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS, smooth=True),
                       name="res_tmpl")

    # Create Laplacian images
    lap_tmpl = pe.Node(ImageMath(operation="Laplacian", copy_header=True),
                       name="lap_tmpl")
    tmpl_sigma = pe.Node(niu.Function(function=_lap_sigma),
                         name="tmpl_sigma",
                         run_without_submitting=True)
    norm_lap_tmpl = pe.Node(niu.Function(function=_norm_lap),
                            name="norm_lap_tmpl")

    lap_target = pe.Node(ImageMath(operation="Laplacian", copy_header=True),
                         name="lap_target")
    target_sigma = pe.Node(niu.Function(function=_lap_sigma),
                           name="target_sigma",
                           run_without_submitting=True)
    norm_lap_target = pe.Node(niu.Function(function=_norm_lap),
                              name="norm_lap_target")

    # Set up initial spatial normalization
    ants_params = "testing" if debug else "precise"
    norm = pe.Node(
        Registration(from_file=pkgr_fn(
            "nirodents",
            f"data/artsBrainExtraction_{ants_params}_{mri_scheme}.json")),
        name="norm",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm.inputs.float = use_float

    # main workflow
    wf = pe.Workflow(name)

    # truncate target intensity for N4 correction
    clip_target = pe.Node(IntensityClip(p_min=15, p_max=99.9),
                          name="clip_target")

    # truncate template intensity to match target
    clip_tmpl = pe.Node(IntensityClip(p_min=5, p_max=98), name="clip_tmpl")
    clip_tmpl.inputs.in_file = _pop(tpl_target_path)

    # set INU bspline grid based on voxel size
    bspline_grid = pe.Node(niu.Function(function=_bspline_grid),
                           name="bspline_grid")

    # INU correction of the target image
    init_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=False,
            copy_header=True,
            n_iterations=[50] * (4 - debug),
            convergence_threshold=1e-7,
            shrink_factor=4,
            rescale_intensities=True,
        ),
        n_procs=omp_nthreads,
        name="init_n4",
    )
    clip_inu = pe.Node(IntensityClip(p_min=1, p_max=99.8), name="clip_inu")

    # Create a buffer interface as a cache for the actual inputs to registration
    buffernode = pe.Node(niu.IdentityInterface(fields=["hires_target"]),
                         name="buffernode")

    # Merge image nodes
    mrg_target = pe.Node(niu.Merge(2), name="mrg_target")
    mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")

    # fmt: off
    wf.connect([
        # Target image massaging
        (inputnode, denoise, [(("in_files", _pop), "input_image")]),
        (inputnode, bspline_grid, [(("in_files", _pop), "in_file")]),
        (bspline_grid, init_n4, [("out", "args")]),
        (denoise, clip_target, [("output_image", "in_file")]),
        (clip_target, init_n4, [("out_file", "input_image")]),
        (init_n4, clip_inu, [("output_image", "in_file")]),
        (clip_inu, target_sigma, [("out_file", "in_file")]),
        (clip_inu, buffernode, [("out_file", "hires_target")]),
        (buffernode, lap_target, [("hires_target", "op1")]),
        (target_sigma, lap_target, [("out", "op2")]),
        (lap_target, norm_lap_target, [("output_image", "in_file")]),
        (buffernode, mrg_target, [("hires_target", "in1")]),
        (norm_lap_target, mrg_target, [("out", "in2")]),
        # Template massaging
        (clip_tmpl, res_tmpl, [("out_file", "in_file")]),
        (res_tmpl, tmpl_sigma, [("out_file", "in_file")]),
        (res_tmpl, lap_tmpl, [("out_file", "op1")]),
        (tmpl_sigma, lap_tmpl, [("out", "op2")]),
        (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]),
        (res_tmpl, mrg_tmpl, [("out_file", "in1")]),
        (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]),
        # Setup inputs to spatial normalization
        (mrg_target, norm, [("out", "moving_image")]),
        (mrg_tmpl, norm, [("out", "fixed_image")]),
    ])
    # fmt: on

    # Graft a template registration-mask if present
    if tpl_regmask_path:
        hires_mask = pe.Node(
            ApplyTransforms(
                input_image=_pop(tpl_regmask_path),
                transforms="identity",
                interpolation="Gaussian",
                float=True,
            ),
            name="hires_mask",
            mem_gb=1,
        )

        # fmt: off
        wf.connect([
            (res_tmpl, hires_mask, [("out_file", "reference_image")]),
            (hires_mask, norm, [("output_image", "fixed_image_masks")]),
        ])
        # fmt: on

    # Finally project brain mask and refine INU correction
    map_brainmask = pe.Node(
        ApplyTransforms(interpolation="Gaussian", float=True),
        name="map_brainmask",
        mem_gb=1,
    )
    map_brainmask.inputs.input_image = str(tpl_brainmask_path)

    thr_brainmask = pe.Node(Binarize(thresh_low=0.50), name="thr_brainmask")

    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 4,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    final_mask = pe.Node(ApplyMask(), name="final_mask")

    # fmt: off
    wf.connect([
        (inputnode, map_brainmask, [(("in_files", _pop), "reference_image")]),
        (bspline_grid, final_n4, [("out", "args")]),
        (denoise, final_n4, [("output_image", "input_image")]),
        # Project template's brainmask into subject space
        (norm, map_brainmask, [("reverse_transforms", "transforms"),
                               ("reverse_invert_flags",
                                "invert_transform_flags")]),
        (map_brainmask, thr_brainmask, [("output_image", "in_file")]),
        # take a second pass of N4
        (map_brainmask, final_n4, [("output_image", "mask_image")]),
        (final_n4, final_mask, [("output_image", "in_file")]),
        (thr_brainmask, final_mask, [("out_mask", "in_mask")]),
        (final_n4, outputnode, [("output_image", "out_corrected")]),
        (thr_brainmask, outputnode, [("out_mask", "out_mask")]),
        (final_mask, outputnode, [("out_file", "out_brain")]),
    ])
    # fmt: on

    if interim_checkpoints:
        final_apply = pe.Node(
            ApplyTransforms(interpolation="BSpline", float=True),
            name="final_apply",
            mem_gb=1,
        )
        final_report = pe.Node(
            SimpleBeforeAfter(after_label="target",
                              before_label=f"tpl-{template_id}"),
            name="final_report",
        )
        # fmt: off
        wf.connect([
            (inputnode, final_apply, [(("in_files", _pop), "reference_image")
                                      ]),
            (res_tmpl, final_apply, [("out_file", "input_image")]),
            (norm, final_apply, [("reverse_transforms", "transforms"),
                                 ("reverse_invert_flags",
                                  "invert_transform_flags")]),
            (final_apply, final_report, [("output_image", "before")]),
            (outputnode, final_report, [("out_corrected", "after"),
                                        ("out_mask", "wm_seg")]),
        ])
        # fmt: on

    if ants_affine_init:
        # Initialize transforms with antsAI
        lowres_tmpl = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS, smooth=True),
                              name="lowres_tmpl")
        lowres_trgt = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS, smooth=True),
                              name="lowres_trgt")

        init_aff = pe.Node(
            AI(
                convergence=(100, 1e-6, 10),
                metric=("Mattes", 32, "Random", 0.25),
                principal_axes=False,
                search_factor=(factor, arc),
                search_grid=(step, grid),
                transform=("Affine", 0.1),
                verbose=True,
            ),
            name="init_aff",
            n_procs=omp_nthreads,
        )
        # fmt: off
        wf.connect([
            (clip_inu, lowres_trgt, [("out_file", "in_file")]),
            (lowres_trgt, init_aff, [("out_file", "moving_image")]),
            (clip_tmpl, lowres_tmpl, [("out_file", "in_file")]),
            (lowres_tmpl, init_aff, [("out_file", "fixed_image")]),
            (init_aff, norm, [("output_transform", "initial_moving_transform")
                              ]),
        ])
        # fmt: on

        if tpl_regmask_path:
            lowres_mask = pe.Node(
                ApplyTransforms(
                    input_image=_pop(tpl_regmask_path),
                    transforms="identity",
                    interpolation="MultiLabel",
                ),
                name="lowres_mask",
                mem_gb=1,
            )
            # fmt: off
            wf.connect([
                (lowres_tmpl, lowres_mask, [("out_file", "reference_image")]),
                (lowres_mask, init_aff, [("output_image", "fixed_image_mask")
                                         ]),
            ])
            # fmt: on

        if interim_checkpoints:
            init_apply = pe.Node(
                ApplyTransforms(interpolation="BSpline",
                                invert_transform_flags=[True]),
                name="init_apply",
                mem_gb=1,
            )
            init_mask = pe.Node(
                ApplyTransforms(interpolation="Gaussian",
                                invert_transform_flags=[True]),
                name="init_mask",
                mem_gb=1,
            )
            init_mask.inputs.input_image = str(tpl_brainmask_path)
            init_report = pe.Node(
                SimpleBeforeAfter(
                    out_report="init_report.svg",
                    before_label="target",
                    after_label="template",
                ),
                name="init_report",
            )
            # fmt: off
            wf.connect([
                (lowres_trgt, init_apply, [("out_file", "reference_image")]),
                (lowres_tmpl, init_apply, [("out_file", "input_image")]),
                (init_aff, init_apply, [("output_transform", "transforms")]),
                (lowres_trgt, init_report, [("out_file", "before")]),
                (init_apply, init_report, [("output_image", "after")]),
                (lowres_trgt, init_mask, [("out_file", "reference_image")]),
                (init_aff, init_mask, [("output_transform", "transforms")]),
                (init_mask, init_report, [("output_image", "wm_seg")]),
            ])
            # fmt: on
    else:
        norm.inputs.initial_moving_transform_com = 1

    if output_dir:
        ds_final_inu = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir),
            desc="preproc",
            compress=True,
        ),
                               name="ds_final_inu",
                               run_without_submitting=True)
        ds_final_msk = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir),
            desc="brain",
            suffix="mask",
            compress=True,
        ),
                               name="ds_final_msk",
                               run_without_submitting=True)

        # fmt: off
        wf.connect([
            (inputnode, ds_final_inu, [("in_files", "source_file")]),
            (inputnode, ds_final_msk, [("in_files", "source_file")]),
            (outputnode, ds_final_inu, [("out_corrected", "in_file")]),
            (outputnode, ds_final_msk, [("out_mask", "in_file")]),
        ])
        # fmt: on

        if interim_checkpoints:
            ds_report = pe.Node(DerivativesDataSink(
                base_directory=str(output_dir),
                desc="brain",
                suffix="mask",
                datatype="figures"),
                                name="ds_report",
                                run_without_submitting=True)
            # fmt: off
            wf.connect([
                (inputnode, ds_report, [("in_files", "source_file")]),
                (final_report, ds_report, [("out_report", "in_file")]),
            ])
            # fmt: on

        if ants_affine_init and interim_checkpoints:
            ds_report_init = pe.Node(DerivativesDataSink(
                base_directory=str(output_dir),
                desc="init",
                suffix="mask",
                datatype="figures"),
                                     name="ds_report_init",
                                     run_without_submitting=True)
            # fmt: off
            wf.connect([
                (inputnode, ds_report_init, [("in_files", "source_file")]),
                (init_report, ds_report_init, [("out_report", "in_file")]),
            ])
            # fmt: on

    return wf
示例#20
0
def init_bold_confs_wf(mem_gb, metadata, name="bold_confs_wf"):
    """
    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.

    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``)
    #. DVARS - original and standardized variants (``dvars``, ``std_dvars``)
    #. Framewise displacement, based on head-motion parameters
       (``framewise_displacement``)
    #. Temporal CompCor (``t_comp_cor_XX``)
    #. Anatomical CompCor (``a_comp_cor_XX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``cosine_XX``)
    #. Non-steady-state volumes (``non_steady_state_XX``)
    #. Estimated head-motion parameters, in mm and rad
       (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``)


    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_bold_confs_wf
        wf = init_bold_confs_wf(
            mem_gb=1,
            metadata={})

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        metadata : dict
            BIDS metadata for BOLD file
        name : str
            Name of workflow (default: ``bold_confs_wf``)

    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        movpar_file
            SPM-formatted motion parameters file
        skip_vols
            number of non steady state volumes
        t1_mask
            Mask of the skull-stripped template image
        t1_tpms
            List of tissue probability maps in T1w space
        t1_bold_xform
            Affine matrix that maps the T1w space into alignment with
            the native BOLD space

    **Outputs**

        confounds_file
            TSV of all aggregated confounds
        rois_report
            Reportlet visualizing white-matter/CSF mask used for aCompCor,
            the ROI for tCompCor and the BOLD brain mask.

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
Several confounding time-series were calculated based on the
*preprocessed BOLD*: framewise displacement (FD), DVARS and
three region-wise global signals.
FD and DVARS are calculated for each functional run, both using their
implementations in *Nipype* [following the definitions by @power_fd_dvars].
The three global signals are extracted within the CSF, the WM, and
the whole-brain masks.
Additionally, a set of physiological regressors were extracted to
allow for component-based noise correction [*CompCor*, @compcor].
Principal components are estimated after high-pass filtering the
*preprocessed BOLD* time-series (using a discrete cosine filter with
128s cut-off) for the two *CompCor* variants: temporal (tCompCor)
and anatomical (aCompCor).
Six tCompCor components are then calculated from the top 5% variable
voxels within a mask covering the subcortical regions.
This subcortical mask is obtained by heavily eroding the brain mask,
which ensures it does not include cortical GM regions.
For aCompCor, six components are calculated within the intersection of
the aforementioned mask and the union of CSF and WM masks calculated
in T1w space, after their projection to the native space of each
functional run (using the inverse BOLD-to-T1w transformation).
The head-motion estimates calculated in the correction step were also
placed within the corresponding confounds file.
"""
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'movpar_file', 'skip_vols', 't1_mask', 't1_tpms',
        't1_bold_xform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['confounds_file']),
                         name='outputnode')

    # Get masks ready in T1w space
    acc_tpm = pe.Node(AddTPMs(indices=[0, 2]),
                      name='tpms_add_csf_wm')  # acc stands for aCompCor
    csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi')
    wm_roi = pe.Node(
        TPM2ROI(erode_prop=0.6,
                mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='wm_roi')
    acc_roi = pe.Node(
        TPM2ROI(erode_prop=0.6,
                mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='acc_roi')

    # Map ROIs in T1w space into BOLD space
    csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='csf_tfm',
                      mem_gb=0.1)
    wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                     float=True),
                     name='wm_tfm',
                     mem_gb=0.1)
    acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='acc_tfm',
                      mem_gb=0.1)
    tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                      float=True),
                      name='tcc_tfm',
                      mem_gb=0.1)

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_nstd=True,
                                     save_std=True,
                                     remove_zerovariance=True),
                    name="dvars",
                    mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp",
                    mem_gb=mem_gb)

    # a/t-CompCor
    tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv',
                                header_prefix='t_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                percentile_threshold=.05),
                       name="tcompcor",
                       mem_gb=mem_gb)

    acompcor = pe.Node(ACompCor(components_file='acompcor.tsv',
                                header_prefix='a_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True),
                       name="acompcor",
                       mem_gb=mem_gb)

    # Set TR if present
    if 'RepetitionTime' in metadata:
        tcompcor.inputs.repetition_time = metadata['RepetitionTime']
        acompcor.inputs.repetition_time = metadata['RepetitionTime']

    # Global and segment regressors
    mrg_lbl = pe.Node(niu.Merge(3),
                      name='merge_rois',
                      run_without_submitting=True)
    signals = pe.Node(SignalExtraction(
        class_labels=["csf", "white_matter", "global_signal"]),
                      name="signals",
                      mem_gb=mem_gb)

    # Arrange confounds
    add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]),
                               name="add_dvars_header",
                               mem_gb=0.01,
                               run_without_submitting=True)
    add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]),
                                   name="add_std_dvars_header",
                                   mem_gb=0.01,
                                   run_without_submitting=True)
    add_motion_headers = pe.Node(AddTSVHeader(
        columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
                                 name="add_motion_headers",
                                 mem_gb=0.01,
                                 run_without_submitting=True)
    concat = pe.Node(GatherConfounds(),
                     name="concat",
                     mem_gb=0.01,
                     run_without_submitting=True)

    # Generate reportlet
    mrg_compcor = pe.Node(niu.Merge(2),
                          name='merge_compcor',
                          run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(colors=['r', 'b', 'magenta'],
                                 generate_report=True),
                        name='rois_plot',
                        mem_gb=mem_gb)

    ds_report_bold_rois = pe.Node(DerivativesDataSink(suffix='rois'),
                                  name='ds_report_bold_rois',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    def _pick_csf(files):
        return files[0]

    def _pick_wm(files):
        return files[-1]

    workflow.connect([
        # Massage ROIs (in T1w space)
        (inputnode, acc_tpm, [('t1_tpms', 'in_files')]),
        (inputnode, csf_roi, [(('t1_tpms', _pick_csf), 'in_tpm'),
                              ('t1_mask', 'in_mask')]),
        (inputnode, wm_roi, [(('t1_tpms', _pick_wm), 'in_tpm'),
                             ('t1_mask', 'in_mask')]),
        (inputnode, acc_roi, [('t1_mask', 'in_mask')]),
        (acc_tpm, acc_roi, [('out_file', 'in_tpm')]),
        # Map ROIs to BOLD
        (inputnode, csf_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, csf_tfm, [('roi_file', 'input_image')]),
        (inputnode, wm_tfm, [('bold_mask', 'reference_image'),
                             ('t1_bold_xform', 'transforms')]),
        (wm_roi, wm_tfm, [('roi_file', 'input_image')]),
        (inputnode, acc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (acc_roi, acc_tfm, [('roi_file', 'input_image')]),
        (inputnode, tcc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (acc_tfm, acc_msk, [('output_image', 'roi_file')]),
        (acc_msk, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (csf_tfm, csf_msk, [('output_image', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (wm_tfm, wm_msk, [('output_image', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
        (dvars, add_dvars_header, [('out_nstd', 'in_file')]),
        (dvars, add_std_dvars_header, [('out_std', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_motion_headers, concat, [('out_file', 'motion')]),
        (add_dvars_header, concat, [('out_file', 'dvars')]),
        (add_std_dvars_header, concat, [('out_file', 'std_dvars')]),

        # Set outputs
        (concat, outputnode, [('confounds_file', 'confounds_file')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]),
    ])

    return workflow
示例#21
0
def init_bold_mni_trans_wf(template,
                           freesurfer,
                           mem_gb,
                           omp_nthreads,
                           name='bold_mni_trans_wf',
                           template_out_grid='2mm',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images to the MNI template in a "single shot"
    from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_mni_trans_wf
        wf = init_bold_mni_trans_wf(template='MNI152NLin2009cAsym',
                                    freesurfer=True,
                                    mem_gb=3,
                                    omp_nthreads=1,
                                    template_out_grid='native')

    **Parameters**

        template : str
            Name of template targeted by ``template`` output space
        freesurfer : bool
            Enable sampling of FreeSurfer files
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization.
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        bold_split
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        bold_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_mni
            BOLD series, resampled to template space
        bold_mni_ref
            Reference, contrast-enhanced summary of the BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space
        bold_aseg_mni
            FreeSurfer's ``aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        bold_aparc_mni
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD time-series were resampled to {tpl} standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=template)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 't1_2_mni_forward_transform', 'name_source',
        'bold_split', 'bold_mask', 'bold_aseg', 'bold_aparc', 'hmc_xforms',
        'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_mni', 'bold_mni_ref', 'bold_mask_mni', 'bold_aseg_mni',
        'bold_aparc_mni'
    ]),
                         name='outputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)
    # Account for template aliases
    template_name = TEMPLATE_ALIASES.get(template, template)
    # Template path
    template_dir = get_template(template_name)

    gen_ref.inputs.fixed_image = str(
        template_dir / ('tpl-%s_space-MNI_res-01_T1w.nii.gz' % template_name))

    mask_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_mni_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_mni_tfm, [('bold_mask', 'input_image')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('itk_bold_to_t1', _aslist), 'in2')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (mask_mni_tfm, outputnode, [('output_image', 'bold_mask_mni')]),
    ])

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    bold_to_mni_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_mni_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(omp_nthreads=omp_nthreads,
                                           pre_mask=True)

    workflow.connect([
        (inputnode, merge_xforms, [('t1_2_mni_forward_transform', 'in1'),
                                   (('itk_bold_to_t1', _aslist), 'in2')]),
        (merge_xforms, bold_to_mni_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_mni_transform, [('bold_split', 'input_image')]),
        (bold_to_mni_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_mni_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
        (merge, outputnode, [('out_file', 'bold_mni')]),
        (gen_final_ref, outputnode, [('outputnode.ref_image', 'bold_mni_ref')
                                     ]),
    ])

    if template_out_grid == 'native':
        workflow.connect([
            (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
            (gen_ref, bold_to_mni_transform, [('out_file', 'reference_image')
                                              ]),
        ])
    elif template_out_grid in ['1mm', '2mm']:
        res = int(template_out_grid[0])
        mask_mni_tfm.inputs.reference_image = str(
            template_dir / ('tpl-%s_space-MNI_res-%02d_brainmask.nii.gz' %
                            (template_name, res)))
        bold_to_mni_transform.inputs.reference_image = str(
            template_dir / ('tpl-%s_space-MNI_res-%02d_T1w.nii.gz' %
                            (template_name, res)))
    else:
        mask_mni_tfm.inputs.reference_image = template_out_grid
        bold_to_mni_transform.inputs.reference_image = template_out_grid

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               float=True),
                               name='aseg_mni_tfm',
                               mem_gb=1)
        aparc_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                float=True),
                                name='aparc_mni_tfm',
                                mem_gb=1)

        workflow.connect([
            (inputnode, aseg_mni_tfm, [('bold_aseg', 'input_image'),
                                       ('t1_2_mni_forward_transform',
                                        'transforms')]),
            (inputnode, aparc_mni_tfm, [('bold_aparc', 'input_image'),
                                        ('t1_2_mni_forward_transform',
                                         'transforms')]),
            (aseg_mni_tfm, outputnode, [('output_image', 'bold_aseg_mni')]),
            (aparc_mni_tfm, outputnode, [('output_image', 'bold_aparc_mni')]),
        ])
        if template_out_grid == 'native':
            workflow.connect([
                (gen_ref, aseg_mni_tfm, [('out_file', 'reference_image')]),
                (gen_ref, aparc_mni_tfm, [('out_file', 'reference_image')]),
            ])
        elif template_out_grid in ['1mm', '2mm']:
            res = int(template_out_grid[0])
            aseg_mni_tfm.inputs.reference_image = str(
                template_dir / ('tpl-%s_space-MNI_res-%02d_brainmask.nii.gz' %
                                (template_name, res)))
            aparc_mni_tfm.inputs.reference_image = str(
                template_dir / ('tpl-%s_space-MNI_res-%02d_T1w.nii.gz' %
                                (template_name, res)))
        else:
            aseg_mni_tfm.inputs.reference_image = template_out_grid
            aparc_mni_tfm.inputs.reference_image = template_out_grid

    return workflow
示例#22
0
def init_carpetplot_wf(mem_gb, metadata, name="bold_carpet_wf"):
    """

    Resamples the MNI parcellation (ad-hoc parcellation derived from the
    Harvard-Oxford template and others).

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        metadata : dict
            BIDS metadata for BOLD file
        name : str
            Name of workflow (default: ``bold_carpet_wf``)

    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        confounds_file
            TSV of all aggregated confounds
        t1_bold_xform
            Affine matrix that maps the T1w space into alignment with
            the native BOLD space
        t1_2_mni_reverse_transform
            ANTs-compatible affine-and-warp transform file

    **Outputs**

        out_carpetplot
            Path of the generated SVG file

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'confounds_file', 't1_bold_xform',
        't1_2_mni_reverse_transform'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_carpetplot']),
                         name='outputnode')

    # List transforms
    mrg_xfms = pe.Node(niu.Merge(2), name='mrg_xfms')

    # Warp segmentation into EPI space
    resample_parc = pe.Node(ApplyTransforms(
        float=True,
        input_image=str(
            get_template('MNI152NLin2009cAsym') /
            'tpl-MNI152NLin2009cAsym_space-MNI_res-01_label-carpet_atlas.nii.gz'
        ),
        dimension=3,
        default_value=0,
        interpolation='MultiLabel'),
                            name='resample_parc')

    # Carpetplot and confounds plot
    conf_plot = pe.Node(FMRISummary(tr=metadata['RepetitionTime'],
                                    confounds_list=[
                                        ('global_signal', None, 'GS'),
                                        ('csf', None, 'GSCSF'),
                                        ('white_matter', None, 'GSWM'),
                                        ('std_dvars', None, 'DVARS'),
                                        ('framewise_displacement', 'mm', 'FD')
                                    ]),
                        name='conf_plot',
                        mem_gb=mem_gb)
    ds_report_bold_conf = pe.Node(DerivativesDataSink(suffix='carpetplot'),
                                  name='ds_report_bold_conf',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow = Workflow(name=name)
    workflow.connect([
        (inputnode, mrg_xfms, [('t1_bold_xform', 'in1'),
                               ('t1_2_mni_reverse_transform', 'in2')]),
        (inputnode, resample_parc, [('bold_mask', 'reference_image')]),
        (mrg_xfms, resample_parc, [('out', 'transforms')]),
        # Carpetplot
        (inputnode, conf_plot, [('bold', 'in_func'), ('bold_mask', 'in_mask'),
                                ('confounds_file', 'confounds_file')]),
        (resample_parc, conf_plot, [('output_image', 'in_segm')]),
        (conf_plot, ds_report_bold_conf, [('out_file', 'in_file')]),
        (conf_plot, outputnode, [('out_file', 'out_carpetplot')]),
    ])
    return workflow
示例#23
0
def anat_qc_workflow(name="anatMRIQC"):
    """
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images

    .. workflow::

        import os.path as op
        from mriqc.workflows.anatomical import anat_qc_workflow
        from mriqc.testing import mock_config
        with mock_config():
            wf = anat_qc_workflow()

    """

    dataset = config.workflow.inputs.get(
        "T1w", []) + config.workflow.inputs.get("T2w", [])

    message = BUILDING_WORKFLOW.format(
        detail=(f"for {len(dataset)} NIfTI files." if len(dataset) > 2 else
                f"({' and '.join(('<%s>' % v for v in dataset))})."), )
    config.loggers.workflow.info(message)

    # Initialize workflow
    workflow = pe.Workflow(name=name)

    # Define workflow, inputs and outputs
    # 0. Get data
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]),
                        name="inputnode")
    inputnode.iterables = [("in_file", dataset)]

    outputnode = pe.Node(niu.IdentityInterface(fields=["out_json"]),
                         name="outputnode")

    # 1. Reorient anatomical image
    to_ras = pe.Node(ConformImage(check_dtype=False), name="conform")
    # 2. species specific skull-stripping
    if config.workflow.species.lower() == "human":
        skull_stripping = synthstrip_wf(
            omp_nthreads=config.nipype.omp_nthreads)
        ss_bias_field = "outputnode.bias_image"
    else:
        from nirodents.workflows.brainextraction import init_rodent_brain_extraction_wf

        skull_stripping = init_rodent_brain_extraction_wf(
            template_id=config.workflow.template_id)
        ss_bias_field = "final_n4.bias_image"
    # 3. Head mask
    hmsk = headmsk_wf()
    # 4. Spatial Normalization, using ANTs
    norm = spatial_normalization()
    # 5. Air mask (with and without artifacts)
    amw = airmsk_wf()
    # 6. Brain tissue segmentation
    if config.workflow.species.lower() == "human":
        segment = pe.Node(
            fsl.FAST(segments=True, out_basename="segment"),
            name="segmentation",
            mem_gb=5,
        )
        seg_in_file = "in_files"
        dseg_out = "tissue_class_map"
        pve_out = "partial_volume_files"
    else:
        from niworkflows.interfaces.fixes import ApplyTransforms

        tpms = [
            str(tpm) for tpm in get_template(config.workflow.template_id,
                                             label=["CSF", "GM", "WM"],
                                             suffix="probseg")
        ]

        xfm_tpms = pe.MapNode(
            ApplyTransforms(
                dimension=3,
                default_value=0,
                float=True,
                interpolation="Gaussian",
                output_image="prior.nii.gz",
            ),
            iterfield=["input_image"],
            name="xfm_tpms",
        )
        xfm_tpms.inputs.input_image = tpms

        format_tpm_names = pe.Node(
            niu.Function(
                input_names=["in_files"],
                output_names=["file_format"],
                function=_format_tpm_names,
                execution={
                    "keep_inputs": True,
                    "remove_unnecessary_outputs": False
                },
            ),
            name="format_tpm_names",
        )

        segment = pe.Node(
            ants.Atropos(
                initialization="PriorProbabilityImages",
                number_of_tissue_classes=3,
                prior_weighting=0.1,
                mrf_radius=[1, 1, 1],
                mrf_smoothing_factor=0.01,
                save_posteriors=True,
                out_classified_image_name="segment.nii.gz",
                output_posteriors_name_template="segment_%02d.nii.gz",
            ),
            name="segmentation",
            mem_gb=5,
        )
        seg_in_file = "intensity_images"
        dseg_out = "classified_image"
        pve_out = "posteriors"

    # 7. Compute IQMs
    iqmswf = compute_iqms()
    # Reports
    repwf = individual_reports()

    # Connect all nodes
    # fmt: off
    workflow.connect([
        (inputnode, to_ras, [("in_file", "in_file")]),
        (inputnode, iqmswf, [("in_file", "inputnode.in_file")]),
        (inputnode, norm, [(("in_file", _get_mod), "inputnode.modality")]),
        (to_ras, skull_stripping, [("out_file", "inputnode.in_files")]),
        (skull_stripping, segment, [("outputnode.out_brain", seg_in_file)]),
        (skull_stripping, hmsk, [("outputnode.out_corrected",
                                  "inputnode.in_file")]),
        (segment, hmsk, [(dseg_out, "inputnode.in_segm")]),
        (skull_stripping, norm,
         [("outputnode.out_corrected", "inputnode.moving_image"),
          ("outputnode.out_mask", "inputnode.moving_mask")]),
        (norm, amw, [("outputnode.inverse_composite_transform",
                      "inputnode.inverse_composite_transform")]),
        (norm, iqmswf, [("outputnode.inverse_composite_transform",
                         "inputnode.inverse_composite_transform")]),
        (norm, repwf, ([("outputnode.out_report", "inputnode.mni_report")])),
        (to_ras, amw, [("out_file", "inputnode.in_file")]),
        (skull_stripping, amw, [("outputnode.out_mask", "inputnode.in_mask")]),
        (hmsk, amw, [("outputnode.out_file", "inputnode.head_mask")]),
        (to_ras, iqmswf, [("out_file", "inputnode.in_ras")]),
        (skull_stripping, iqmswf,
         [("outputnode.out_corrected", "inputnode.inu_corrected"),
          (ss_bias_field, "inputnode.in_inu"),
          ("outputnode.out_mask", "inputnode.brainmask")]),
        (amw, iqmswf, [("outputnode.air_mask", "inputnode.airmask"),
                       ("outputnode.hat_mask", "inputnode.hatmask"),
                       ("outputnode.art_mask", "inputnode.artmask"),
                       ("outputnode.rot_mask", "inputnode.rotmask")]),
        (segment, iqmswf, [(dseg_out, "inputnode.segmentation"),
                           (pve_out, "inputnode.pvms")]),
        (hmsk, iqmswf, [("outputnode.out_file", "inputnode.headmask")]),
        (to_ras, repwf, [("out_file", "inputnode.in_ras")]),
        (skull_stripping, repwf,
         [("outputnode.out_corrected", "inputnode.inu_corrected"),
          ("outputnode.out_mask", "inputnode.brainmask")]),
        (hmsk, repwf, [("outputnode.out_file", "inputnode.headmask")]),
        (amw, repwf, [("outputnode.air_mask", "inputnode.airmask"),
                      ("outputnode.art_mask", "inputnode.artmask"),
                      ("outputnode.rot_mask", "inputnode.rotmask")]),
        (segment, repwf, [(dseg_out, "inputnode.segmentation")]),
        (iqmswf, repwf, [("outputnode.noisefit", "inputnode.noisefit")]),
        (iqmswf, repwf, [("outputnode.out_file", "inputnode.in_iqms")]),
        (iqmswf, outputnode, [("outputnode.out_file", "out_json")]),
    ])

    if config.workflow.species.lower() == 'human':
        workflow.connect([
            (inputnode, segment, [(("in_file", _get_imgtype), "img_type")]),
        ])
    else:
        workflow.connect([
            (skull_stripping, xfm_tpms, [("outputnode.out_brain",
                                          "reference_image")]),
            (norm, xfm_tpms, [("outputnode.inverse_composite_transform",
                               "transforms")]),
            (xfm_tpms, format_tpm_names, [('output_image', 'in_files')]),
            (format_tpm_names, segment, [(('file_format', _pop), 'prior_image')
                                         ]),
            (skull_stripping, segment, [("outputnode.out_mask", "mask_image")
                                        ]),
        ])
    # fmt: on

    # Upload metrics
    if not config.execution.no_sub:
        from ..interfaces.webapi import UploadIQMs

        upldwf = pe.Node(UploadIQMs(), name="UploadMetrics")
        upldwf.inputs.url = config.execution.webapi_url
        upldwf.inputs.strict = config.execution.upload_strict
        if config.execution.webapi_port:
            upldwf.inputs.port = config.execution.webapi_port

        # fmt: off
        workflow.connect([
            (iqmswf, upldwf, [("outputnode.out_file", "in_iqms")]),
            (upldwf, repwf, [("api_id", "inputnode.api_id")]),
        ])
        # fmt: on

    return workflow
示例#24
0
def init_bold_reg_wf(freesurfer,
                     use_bbr,
                     bold2t1w_dof,
                     mem_gb,
                     omp_nthreads,
                     name='bold_reg_wf',
                     use_compression=True,
                     use_fieldwarp=False,
                     write_report=True):
    """
    This workflow registers the reference BOLD image to T1-space, using a
    boundary-based registration (BBR) cost function.

    If FreeSurfer-based preprocessing is enabled, the ``bbregister`` utility
    is used to align the BOLD images to the reconstructed subject, and the
    resulting transform is adjusted to target the T1 space.
    If FreeSurfer-based preprocessing is disabled, FSL FLIRT is used with the
    BBR cost function to directly target the T1 space.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.registration import init_bold_reg_wf
        wf = init_bold_reg_wf(freesurfer=True,
                              mem_gb=3,
                              omp_nthreads=1,
                              use_bbr=True,
                              bold2t1w_dof=9)

    **Parameters**

        freesurfer : bool
            Enable FreeSurfer functional registration (bbregister)
        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_reg_wf``)
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to T1
        write_report : bool
            Whether a reportlet should be stored

    **Inputs**

        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        ref_bold_brain
            Reference image to which BOLD series is aligned
            If ``fieldwarp == True``, ``ref_bold_brain`` should be unwarped
        ref_bold_mask
            Skull-stripping mask of reference image
        t1_preproc
            Bias-corrected structural template image
        t1_brain
            Skull-stripped ``t1_preproc``
        t1_mask
            Mask of the skull-stripped template image
        t1_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        t1_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        t1_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_split
            Individual 3D BOLD volumes, not motion corrected
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_reverse_transform
            LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        itk_t1_to_bold
            Affine transform from T1 space to BOLD space (ITK format)
        bold_t1
            Motion-corrected BOLD series in T1 space
        bold_mask_t1
            BOLD mask in T1 space
        bold_aseg_t1
            FreeSurfer's ``aseg.mgz`` atlas, in T1w-space at the BOLD resolution
            (only if ``recon-all`` was run).
        bold_aparc_t1
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in T1w-space at the BOLD resolution
            (only if ``recon-all`` was run).
        fallback
            Boolean indicating whether BBR was rejected (mri_coreg registration returned)


    **Subworkflows**

        * :py:func:`~fmriprep.workflows.util.init_bbreg_wf`
        * :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`

    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'ref_bold_brain', 'ref_bold_mask', 't1_preproc',
        't1_brain', 't1_mask', 't1_seg', 't1_aseg', 't1_aparc', 'bold_split',
        'hmc_xforms', 'subjects_dir', 'subject_id',
        't1_2_fsnative_reverse_transform', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 'itk_t1_to_bold', 'fallback', 'bold_t1',
        'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1'
    ]),
                         name='outputnode')

    if freesurfer:
        bbr_wf = init_bbreg_wf(use_bbr=use_bbr,
                               bold2t1w_dof=bold2t1w_dof,
                               omp_nthreads=omp_nthreads)
    else:
        bbr_wf = init_fsl_bbr_wf(use_bbr=use_bbr, bold2t1w_dof=bold2t1w_dof)

    workflow.connect([
        (inputnode, bbr_wf, [('ref_bold_brain', 'inputnode.in_file'),
                             ('t1_2_fsnative_reverse_transform',
                              'inputnode.t1_2_fsnative_reverse_transform'),
                             ('subjects_dir', 'inputnode.subjects_dir'),
                             ('subject_id', 'inputnode.subject_id'),
                             ('t1_seg', 'inputnode.t1_seg'),
                             ('t1_brain', 'inputnode.t1_brain')]),
        (bbr_wf, outputnode, [('outputnode.itk_bold_to_t1', 'itk_bold_to_t1'),
                              ('outputnode.itk_t1_to_bold', 'itk_t1_to_bold'),
                              ('outputnode.fallback', 'fallback')]),
    ])

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB

    mask_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_t1w_tfm',
                           mem_gb=0.1)

    workflow.connect([
        (inputnode, gen_ref, [('ref_bold_brain', 'moving_image'),
                              ('t1_brain', 'fixed_image'),
                              ('t1_mask', 'fov_mask')]),
        (inputnode, mask_t1w_tfm, [('ref_bold_mask', 'input_image')]),
        (gen_ref, mask_t1w_tfm, [('out_file', 'reference_image')]),
        (bbr_wf, mask_t1w_tfm, [('outputnode.itk_bold_to_t1', 'transforms')]),
        (mask_t1w_tfm, outputnode, [('output_image', 'bold_mask_t1')]),
    ])

    if freesurfer:
        # Resample aseg and aparc in T1w space (no transforms needed)
        aseg_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               transforms='identity',
                                               float=True),
                               name='aseg_t1w_tfm',
                               mem_gb=0.1)
        aparc_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                transforms='identity',
                                                float=True),
                                name='aparc_t1w_tfm',
                                mem_gb=0.1)

        workflow.connect([
            (inputnode, aseg_t1w_tfm, [('t1_aseg', 'input_image')]),
            (inputnode, aparc_t1w_tfm, [('t1_aparc', 'input_image')]),
            (gen_ref, aseg_t1w_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_t1w_tfm, [('out_file', 'reference_image')]),
            (aseg_t1w_tfm, outputnode, [('output_image', 'bold_aseg_t1')]),
            (aparc_t1w_tfm, outputnode, [('output_image', 'bold_aparc_t1')]),
        ])

    # Merge transforms placing the head motion correction last
    nforms = 2 + int(use_fieldwarp)
    merge_xforms = pe.Node(niu.Merge(nforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in2')])])

    bold_to_t1w_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_t1w_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb)

    workflow.connect([
        (bbr_wf, merge_xforms, [('outputnode.itk_bold_to_t1', 'in1')]),
        (merge_xforms, bold_to_t1w_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (merge, outputnode, [('out_file', 'bold_t1')]),
        (inputnode, bold_to_t1w_transform, [('bold_split', 'input_image')]),
        (gen_ref, bold_to_t1w_transform, [('out_file', 'reference_image')]),
        (bold_to_t1w_transform, merge, [('out_files', 'in_files')]),
    ])

    if write_report:
        ds_report_reg = pe.Node(DerivativesDataSink(),
                                name='ds_report_reg',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

        def _bold_reg_suffix(fallback, freesurfer):
            if fallback:
                return 'coreg' if freesurfer else 'flirt'
            return 'bbr' if freesurfer else 'flt_bbr'

        workflow.connect([
            (bbr_wf, ds_report_reg, [('outputnode.out_report', 'in_file'),
                                     (('outputnode.fallback', _bold_reg_suffix,
                                       freesurfer), 'suffix')]),
        ])

    return workflow
示例#25
0
def init_3dQwarp_wf(omp_nthreads=1, debug=False, name="pepolar_estimate_wf"):
    """
    Create the PEPOLAR field estimation workflow based on AFNI's ``3dQwarp``.

    This workflow takes in two EPI files that MUST have opposed
    :abbr:`PE (phase-encoding)` direction.
    Therefore, EPIs with orthogonal PE directions are not supported.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.pepolar import init_3dQwarp_wf
            wf = init_3dQwarp_wf()

    Parameters
    ----------
    debug : :obj:`bool`
        Whether a fast configuration of topup (less accurate) should be applied.
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    in_data : :obj:`list` of :obj:`str`
        A list of two EPI files, the first of which will be taken as reference.

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of the first element of ``in_data``.

    """
    from nipype.interfaces import afni
    from niworkflows.interfaces.header import CopyHeader
    from niworkflows.interfaces.fixes import (
        FixHeaderRegistration as Registration,
        FixHeaderApplyTransforms as ApplyTransforms,
    )
    from niworkflows.interfaces.freesurfer import StructuralReference
    from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf
    from ...utils.misc import front as _front, last as _last
    from ...interfaces.utils import Flatten, ConvertWarp

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""{_PEPOLAR_DESC} \
with `3dQwarp` (@afni; AFNI {''.join(['%02d' % v for v in afni.Info().version() or []])}).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=["in_data", "metadata"]),
                        name="inputnode")

    outputnode = pe.Node(niu.IdentityInterface(fields=["fmap", "fmap_ref"]),
                         name="outputnode")

    flatten = pe.Node(Flatten(), name="flatten")
    sort_pe = pe.Node(
        niu.Function(function=_sorted_pe,
                     output_names=["sorted", "qwarp_args"]),
        name="sort_pe",
        run_without_submitting=True,
    )

    merge_pes = pe.MapNode(
        StructuralReference(
            auto_detect_sensitivity=True,
            initial_timepoint=1,
            fixed_timepoint=True,  # Align to first image
            intensity_scaling=True,
            # 7-DOF (rigid + intensity)
            no_iteration=True,
            subsample_threshold=200,
            out_file="template.nii.gz",
        ),
        name="merge_pes",
        iterfield=["in_files"],
    )

    pe0_wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=omp_nthreads,
                                                 name="pe0_wf")
    pe1_wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=omp_nthreads,
                                                 name="pe1_wf")

    align_pes = pe.Node(
        Registration(
            from_file=_pkg_fname("sdcflows", "data/translation_rigid.json"),
            output_warped_image=True,
        ),
        name="align_pes",
        n_procs=omp_nthreads,
    )

    qwarp = pe.Node(
        afni.QwarpPlusMinus(
            blur=[-1, -1],
            environ={"OMP_NUM_THREADS": f"{min(omp_nthreads, 4)}"},
            minpatch=9,
            nopadWARP=True,
            noweight=True,
            pblur=[0.05, 0.05],
        ),
        name="qwarp",
        n_procs=min(omp_nthreads, 4),
    )

    to_ants = pe.Node(ConvertWarp(), name="to_ants", mem_gb=0.01)

    cphdr_warp = pe.Node(CopyHeader(), name="cphdr_warp", mem_gb=0.01)

    unwarp_reference = pe.Node(
        ApplyTransforms(
            dimension=3,
            float=True,
            interpolation="LanczosWindowedSinc",
        ),
        name="unwarp_reference",
    )

    # fmt: off
    workflow.connect([
        (inputnode, flatten, [("in_data", "in_data"),
                              ("metadata", "in_meta")]),
        (flatten, sort_pe, [("out_list", "inlist")]),
        (sort_pe, qwarp, [("qwarp_args", "args")]),
        (sort_pe, merge_pes, [("sorted", "in_files")]),
        (merge_pes, pe0_wf, [(("out_file", _front), "inputnode.in_file")]),
        (merge_pes, pe1_wf, [(("out_file", _last), "inputnode.in_file")]),
        (pe0_wf, align_pes, [("outputnode.skull_stripped_file", "fixed_image")
                             ]),
        (pe1_wf, align_pes, [("outputnode.skull_stripped_file", "moving_image")
                             ]),
        (pe0_wf, qwarp, [("outputnode.skull_stripped_file", "in_file")]),
        (align_pes, qwarp, [("warped_image", "base_file")]),
        (inputnode, cphdr_warp, [(("in_data", _front), "hdr_file")]),
        (qwarp, cphdr_warp, [("source_warp", "in_file")]),
        (cphdr_warp, to_ants, [("out_file", "in_file")]),
        (to_ants, unwarp_reference, [("out_file", "transforms")]),
        (inputnode, unwarp_reference, [("in_reference", "reference_image"),
                                       ("in_reference", "input_image")]),
        (unwarp_reference, outputnode, [("output_image", "fmap_ref")]),
        (to_ants, outputnode, [("out_file", "fmap")]),
    ])
    # fmt: on
    return workflow
示例#26
0
def init_bold_std_trans_wf(
    freesurfer,
    mem_gb,
    omp_nthreads,
    spaces,
    name='bold_std_trans_wf',
    use_compression=True,
    use_fieldwarp=False,
):
    """
    Sample fMRI into standard space with a single-step resampling of the original BOLD series.

    .. important::
        This workflow provides two outputnodes.
        One output node (with name ``poutputnode``) will be parameterized in a Nipype sense
        (see `Nipype iterables
        <https://miykael.github.io/nipype_tutorial/notebooks/basic_iteration.html>`__), and a
        second node (``outputnode``) will collapse the parameterized outputs into synchronous
        lists of the output fields listed below.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from niworkflows.utils.spaces import SpatialReferences
            from fmriprep_rodents.workflows.bold import init_bold_std_trans_wf
            wf = init_bold_std_trans_wf(
                freesurfer=True,
                mem_gb=3,
                omp_nthreads=1,
                spaces=SpatialReferences(
                    spaces=['MNI152Lin',
                            ('MNIPediatricAsym', {'cohort': '6'})],
                    checkpoint=True),
            )

    Parameters
    ----------
    freesurfer : :obj:`bool`
        Whether to generate FreeSurfer's aseg/aparc segmentations on BOLD space.
    mem_gb : :obj:`float`
        Size of BOLD file in GB
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    spaces : :py:class:`~niworkflows.utils.spaces.SpatialReferences`
        A container for storing, organizing, and parsing spatial normalizations. Composed of
        :py:class:`~niworkflows.utils.spaces.Reference` objects representing spatial references.
        Each ``Reference`` contains a space, which is a string of either TemplateFlow template IDs
        (e.g., ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNIPediatricAsym``), nonstandard references
        (e.g., ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or a custom template located in
        the TemplateFlow root directory. Each ``Reference`` may also contain a spec, which is a
        dictionary with template specifications (e.g., a specification of ``{'resolution': 2}``
        would lead to resampling on a 2mm resolution of the space).
    name : :obj:`str`
        Name of workflow (default: ``bold_std_trans_wf``)
    use_compression : :obj:`bool`
        Save registered BOLD series as ``.nii.gz``
    use_fieldwarp : :obj:`bool`
        Include SDC warp in single-shot transform from BOLD to MNI

    Inputs
    ------
    anat2std_xfm
        List of anatomical-to-standard space transforms generated during
        spatial normalization.
    bold_aparc
        FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    bold_aseg
        FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    bold_mask
        Skull-stripping mask of reference image
    bold_split
        Individual 3D volumes, not motion corrected
    fieldwarp
        a :abbr:`DFM (displacements field map)` in ITK format
    hmc_xforms
        List of affine transforms aligning each volume to ``ref_image`` in ITK format
    itk_bold_to_t1
        Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
    name_source
        BOLD series NIfTI file
        Used to recover original information lost during processing
    templates
        List of templates that were applied as targets during
        spatial normalization.

    Outputs
    -------
    bold_std
        BOLD series, resampled to template space
    bold_std_ref
        Reference, contrast-enhanced summary of the BOLD series, resampled to template space
    bold_mask_std
        BOLD series mask in template space
    bold_aseg_std
        FreeSurfer's ``aseg.mgz`` atlas, in template space at the BOLD resolution
        (only if ``recon-all`` was run)
    bold_aparc_std
        FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution
        (only if ``recon-all`` was run)
    template
        Template identifiers synchronized correspondingly to previously
        described outputs.

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.func.util import init_bold_reference_wf
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.itk import MultiApplyTransforms
    from niworkflows.interfaces.utility import KeySelect
    from niworkflows.interfaces.utils import GenerateSamplingReference
    from niworkflows.interfaces.nilearn import Merge
    from niworkflows.utils.spaces import format_reference

    workflow = Workflow(name=name)
    output_references = spaces.cached.get_spaces(nonstandard=False, dim=(3, ))
    std_vol_references = [(s.fullname, s.spec) for s in spaces.references
                          if s.standard and s.dim == 3]

    if len(output_references) == 1:
        workflow.__desc__ = """\
The BOLD time-series were resampled into standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=output_references[0])
    elif len(output_references) > 1:
        workflow.__desc__ = """\
The BOLD time-series were resampled into several standard spaces,
correspondingly generating the following *spatially-normalized,
preprocessed BOLD runs*: {tpl}.
""".format(tpl=', '.join(output_references))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'anat2std_xfm',
        'bold_aparc',
        'bold_aseg',
        'bold_mask',
        'bold_split',
        'fieldwarp',
        'hmc_xforms',
        'itk_bold_to_t1',
        'name_source',
        'templates',
    ]),
                        name='inputnode')

    iterablesource = pe.Node(niu.IdentityInterface(fields=['std_target']),
                             name='iterablesource')
    # Generate conversions for every template+spec at the input
    iterablesource.iterables = [('std_target', std_vol_references)]

    split_target = pe.Node(niu.Function(
        function=_split_spec,
        input_names=['in_target'],
        output_names=['space', 'template', 'spec']),
                           run_without_submitting=True,
                           name='split_target')

    select_std = pe.Node(KeySelect(fields=['anat2std_xfm']),
                         name='select_std',
                         run_without_submitting=True)

    select_tpl = pe.Node(niu.Function(function=_select_template),
                         name='select_tpl',
                         run_without_submitting=True)

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)

    mask_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                           name='mask_std_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    nxforms = 3 + use_fieldwarp
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    bold_to_std_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_std_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target standard space
    gen_final_ref = init_bold_reference_wf(omp_nthreads=omp_nthreads,
                                           pre_mask=True)

    workflow.connect([
        (iterablesource, split_target, [('std_target', 'in_target')]),
        (iterablesource, select_tpl, [('std_target', 'template')]),
        (inputnode, select_std, [('anat2std_xfm', 'anat2std_xfm'),
                                 ('templates', 'keys')]),
        (inputnode, mask_std_tfm, [('bold_mask', 'input_image')]),
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, merge_xforms, [(('itk_bold_to_t1', _aslist), 'in2')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, mask_merge_tfms, [(('itk_bold_to_t1', _aslist), 'in2')]),
        (inputnode, bold_to_std_transform, [('bold_split', 'input_image')]),
        (split_target, select_std, [('space', 'key')]),
        (select_std, merge_xforms, [('anat2std_xfm', 'in1')]),
        (select_std, mask_merge_tfms, [('anat2std_xfm', 'in1')]),
        (split_target, gen_ref, [(('spec', _is_native), 'keep_native')]),
        (select_tpl, gen_ref, [('out', 'fixed_image')]),
        (merge_xforms, bold_to_std_transform, [('out', 'transforms')]),
        (gen_ref, bold_to_std_transform, [('out_file', 'reference_image')]),
        (gen_ref, mask_std_tfm, [('out_file', 'reference_image')]),
        (mask_merge_tfms, mask_std_tfm, [('out', 'transforms')]),
        (mask_std_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
        (bold_to_std_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
    ])

    output_names = [
        'bold_mask_std',
        'bold_std',
        'bold_std_ref',
        'spatial_reference',
        'template',
    ] + freesurfer * ['bold_aseg_std', 'bold_aparc_std']

    poutputnode = pe.Node(niu.IdentityInterface(fields=output_names),
                          name='poutputnode')
    workflow.connect([
        # Connecting outputnode
        (iterablesource, poutputnode, [(('std_target', format_reference),
                                        'spatial_reference')]),
        (merge, poutputnode, [('out_file', 'bold_std')]),
        (gen_final_ref, poutputnode, [('outputnode.ref_image', 'bold_std_ref')
                                      ]),
        (mask_std_tfm, poutputnode, [('output_image', 'bold_mask_std')]),
        (select_std, poutputnode, [('key', 'template')]),
    ])

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                               name='aseg_std_tfm',
                               mem_gb=1)
        aparc_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                                name='aparc_std_tfm',
                                mem_gb=1)

        workflow.connect([
            (inputnode, aseg_std_tfm, [('bold_aseg', 'input_image')]),
            (inputnode, aparc_std_tfm, [('bold_aparc', 'input_image')]),
            (select_std, aseg_std_tfm, [('anat2std_xfm', 'transforms')]),
            (select_std, aparc_std_tfm, [('anat2std_xfm', 'transforms')]),
            (gen_ref, aseg_std_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_std_tfm, [('out_file', 'reference_image')]),
            (aseg_std_tfm, poutputnode, [('output_image', 'bold_aseg_std')]),
            (aparc_std_tfm, poutputnode, [('output_image', 'bold_aparc_std')]),
        ])

    # Connect parametric outputs to a Join outputnode
    outputnode = pe.JoinNode(niu.IdentityInterface(fields=output_names),
                             name='outputnode',
                             joinsource='iterablesource')
    workflow.connect([
        (poutputnode, outputnode, [(f, f) for f in output_names]),
    ])
    return workflow
示例#27
0
def init_syn_sdc_wf(omp_nthreads, epi_pe=None,
                    atlas_threshold=3, name='syn_sdc_wf'):
    """
    Build the *fieldmap-less* susceptibility-distortion estimation workflow.

    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.syn import init_syn_sdc_wf
            wf = init_syn_sdc_wf(
                epi_pe='j',
                omp_nthreads=8)

    Inputs
    ------
    in_reference
        reference image
    in_reference_brain
        skull-stripped reference image
    t1w_brain
        skull-stripped, bias-corrected structural image
    std2anat_xfm
        inverse registration transform of T1w image to MNI template

    Outputs
    -------
    out_reference
        the ``in_reference`` image after unwarping
    out_reference_brain
        the ``in_reference_brain`` image after unwarping
    out_warp
        the corresponding :abbr:`DFM (displacements field map)` compatible with
        ANTs
    out_mask
        mask of the unwarped input file

    References
    ----------
    .. [Treiber2016] Treiber, J. M. et al. (2016) Characterization and Correction
        of Geometric Distortions in 814 Diffusion Weighted Images,
        PLoS ONE 11(3): e0152472. doi:`10.1371/journal.pone.0152472
        <https://doi.org/10.1371/journal.pone.0152472>`_.
    .. [Wang2017] Wang S, et al. (2017) Evaluation of Field Map and Nonlinear
        Registration Methods for Correction of Susceptibility Artifacts
        in Diffusion MRI. Front. Neuroinform. 11:17.
        doi:`10.3389/fninf.2017.00017
        <https://doi.org/10.3389/fninf.2017.00017>`_.
    .. [Huntenburg2014] Huntenburg, J. M. (2014) Evaluating Nonlinear
        Coregistration of BOLD EPI and T1w Images. Berlin: Master
        Thesis, Freie Universität. `PDF
        <http://pubman.mpdl.mpg.de/pubman/item/escidoc:2327525:5/component/escidoc:2327523/master_thesis_huntenburg_4686947.pdf>`_.

    """
    if epi_pe is None or epi_pe[0] not in ['i', 'j']:
        LOGGER.warning('Incorrect phase-encoding direction, assuming PA (posterior-to-anterior).')
        epi_pe = 'j'

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the BOLD reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration` (ANTs {ants_ver}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
""".format(ants_ver=Registration().version or '<ver>')
    inputnode = pe.Node(
        niu.IdentityInterface(['in_reference', 'in_reference_brain',
                               't1w_brain', 'std2anat_xfm']),
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(['out_reference', 'out_reference_brain',
                               'out_mask', 'out_warp']),
        name='outputnode')

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = resource_filename('sdcflows', 'data/fmap_atlas.nii.gz')
    # Registration specifications
    affine_transform = resource_filename('sdcflows', 'data/affine.json')
    syn_transform = resource_filename('sdcflows', 'data/susceptibility_syn.json')

    invert_t1w = pe.Node(Rescale(invert=True), name='invert_t1w',
                         mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1', n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref', n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3), name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
    transform_list.inputs.in3 = resource_filename(
        'sdcflows', 'data/fmap_atlas_2_MNI152NLin2009cAsym_affine.mat')

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref', n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(
        fsl.maths.MathsCommand(args='-thr {:.8g} -bin'.format(atlas_threshold),
                               output_datatype='char'),
        name='threshold_atlas', mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2), name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(epi_pe[0] == 'i'), int(epi_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(
        Registration(from_file=syn_transform, restrict_deformation=restrict),
        name='syn', n_procs=omp_nthreads)

    unwarp_ref = pe.Node(ApplyTransforms(
        dimension=3, float=True, interpolation='LanczosWindowedSinc'),
        name='unwarp_ref')

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1w_brain', 'in_file'),
                                 ('in_reference', 'ref_file')]),
        (inputnode, ref_2_t1, [('in_reference_brain', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('in_reference', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [
            ('std2anat_xfm', 'in2')]),
        (inputnode, atlas_2_ref, [('in_reference', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('in_reference_brain', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (syn, unwarp_ref, [('forward_transforms', 'transforms')]),
        (inputnode, unwarp_ref, [('in_reference', 'reference_image'),
                                 ('in_reference', 'input_image')]),
        (unwarp_ref, skullstrip_bold_wf, [
            ('output_image', 'inputnode.in_file')]),
        (unwarp_ref, outputnode, [('output_image', 'out_reference')]),
        (skullstrip_bold_wf, outputnode, [
            ('outputnode.skull_stripped_file', 'out_reference_brain'),
            ('outputnode.mask_file', 'out_mask')]),
    ])

    return workflow
示例#28
0
def init_fmap_unwarp_report_wf(reportlets_dir, name='fmap_unwarp_report_wf'):
    """
    This workflow generates and saves a reportlet showing the effect of fieldmap
    unwarping a BOLD image.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.base import init_fmap_unwarp_report_wf
        wf = init_fmap_unwarp_report_wf(reportlets_dir='.')

    **Parameters**

        reportlets_dir : str
            Directory in which to save reportlets
        name : str, optional
            Workflow name (default: fmap_unwarp_report_wf)

    **Inputs**

        in_pre
            Reference image, before unwarping
        in_post
            Reference image, after unwarping
        in_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        in_xfm
            Affine transform from T1 space to BOLD space (ITK format)

    """

    from niworkflows.nipype.pipeline import engine as pe
    from niworkflows.nipype.interfaces import utility as niu
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms

    from niworkflows.interfaces import SimpleBeforeAfter
    from ...interfaces.images import extract_wm
    from ...interfaces import DerivativesDataSink

    DEFAULT_MEMORY_MIN_GB = 0.01

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_pre', 'in_post', 'in_seg', 'in_xfm', 'name_source']),
                        name='inputnode')

    map_seg = pe.Node(ApplyTransforms(dimension=3,
                                      float=True,
                                      interpolation='NearestNeighbor'),
                      name='map_seg',
                      mem_gb=0.3)

    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    bold_rpt_ds = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                              suffix='variant-hmcsdc_preproc'),
                          name='bold_rpt_ds',
                          mem_gb=DEFAULT_MEMORY_MIN_GB,
                          run_without_submitting=True)
    workflow.connect([
        (inputnode, bold_rpt, [('in_post', 'after'), ('in_pre', 'before')]),
        (inputnode, bold_rpt_ds, [('name_source', 'source_file')]),
        (bold_rpt, bold_rpt_ds, [('out_report', 'in_file')]),
        (inputnode, map_seg, [('in_post', 'reference_image'),
                              ('in_seg', 'input_image'),
                              ('in_xfm', 'transforms')]),
        (map_seg, sel_wm, [('output_image', 'in_seg')]),
        (sel_wm, bold_rpt, [('out', 'wm_seg')]),
    ])

    return workflow
示例#29
0
def init_bold_mni_trans_wf(template,
                           mem_gb,
                           omp_nthreads,
                           name='bold_mni_trans_wf',
                           template_out_grid='2mm',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images to the MNI template in a "single shot"
    from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_mni_trans_wf
        wf = init_bold_mni_trans_wf(template='MNI152NLin2009cAsym',
                                    mem_gb=3,
                                    omp_nthreads=1,
                                    template_out_grid='native')

    **Parameters**

        template : str
            Name of template targeted by ``template`` output space
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization.
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        bold_split
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_mni
            BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 't1_2_mni_forward_transform', 'name_source',
        'bold_split', 'bold_mask', 'hmc_xforms', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['bold_mni', 'bold_mask_mni']),
        name='outputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)
    template_str = nid.TEMPLATE_MAP[template]
    gen_ref.inputs.fixed_image = op.join(nid.get_dataset(template_str),
                                         '1mm_T1.nii.gz')

    mask_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_mni_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    workflow.connect([
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('itk_bold_to_t1', _aslist), 'in2')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (mask_mni_tfm, outputnode, [('output_image', 'bold_mask_mni')]),
        (inputnode, mask_mni_tfm, [('bold_mask', 'input_image')])
    ])

    bold_to_mni_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_mni_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    workflow.connect([
        (inputnode, merge_xforms, [('t1_2_mni_forward_transform', 'in1'),
                                   (('itk_bold_to_t1', _aslist), 'in2')]),
        (merge_xforms, bold_to_mni_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_mni_transform, [('bold_split', 'input_image')]),
        (bold_to_mni_transform, merge, [('out_files', 'in_files')]),
        (merge, outputnode, [('out_file', 'bold_mni')]),
    ])

    if template_out_grid == 'native':
        workflow.connect([
            (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
            (gen_ref, bold_to_mni_transform, [('out_file', 'reference_image')
                                              ]),
        ])
    elif template_out_grid == '1mm' or template_out_grid == '2mm':
        mask_mni_tfm.inputs.reference_image = op.join(
            nid.get_dataset(template_str),
            '%s_brainmask.nii.gz' % template_out_grid)
        bold_to_mni_transform.inputs.reference_image = op.join(
            nid.get_dataset(template_str), '%s_T1.nii.gz' % template_out_grid)
    else:
        mask_mni_tfm.inputs.reference_image = template_out_grid
        bold_to_mni_transform.inputs.reference_image = template_out_grid
    return workflow
示例#30
0
def init_fmap_unwarp_report_wf(name='fmap_unwarp_report_wf', forcedsyn=False):
    """
    Save a reportlet showing how SDC unwarping performed.

    This workflow generates and saves a reportlet showing the effect of fieldmap
    unwarping a BOLD image.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.unwarp import init_fmap_unwarp_report_wf
        wf = init_fmap_unwarp_report_wf()

    **Parameters**

        name : str, optional
            Workflow name (default: fmap_unwarp_report_wf)
        forcedsyn : bool, optional
            Whether SyN-SDC was forced.

    **Inputs**

        in_pre
            Reference image, before unwarping
        in_post
            Reference image, after unwarping
        in_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        in_xfm
            Affine transform from T1 space to BOLD space (ITK format)

    """
    from niworkflows.interfaces import SimpleBeforeAfter
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from niworkflows.interfaces.images import extract_wm

    DEFAULT_MEMORY_MIN_GB = 0.01

    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_pre', 'in_post', 'in_seg', 'in_xfm']),
                        name='inputnode')

    map_seg = pe.Node(ApplyTransforms(dimension=3,
                                      float=True,
                                      interpolation='MultiLabel'),
                      name='map_seg',
                      mem_gb=0.3)

    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    ds_report_sdc = pe.Node(DerivativesDataSink(
        desc='sdc' if not forcedsyn else 'forcedsyn', suffix='bold'),
                            name='ds_report_sdc',
                            mem_gb=DEFAULT_MEMORY_MIN_GB,
                            run_without_submitting=True)

    workflow.connect([
        (inputnode, bold_rpt, [('in_post', 'after'), ('in_pre', 'before')]),
        (bold_rpt, ds_report_sdc, [('out_report', 'in_file')]),
        (inputnode, map_seg, [('in_post', 'reference_image'),
                              ('in_seg', 'input_image'),
                              ('in_xfm', 'transforms')]),
        (map_seg, sel_wm, [('output_image', 'in_seg')]),
        (sel_wm, bold_rpt, [('out', 'wm_seg')]),
    ])

    return workflow