Example #1
0
preprocessing.connect(recon_all, 'subjects_dir', surfregister, 'subjects_dir')

isotropic_surface_smooth = pe.MapNode(interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)),
                                      iterfield=['in_file'],
                                      name="isotropic_surface_smooth")
preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth,
                      'reg_file')
preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth,
                      "in_file")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth,
                      "surface_fwhm")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "vol_fwhm")
preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth,
                      'subjects_dir')

merge_smoothed_files = pe.Node(interface=util.Merge(3),
                               name='merge_smoothed_files')
preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files',
                      merge_smoothed_files, 'in1')
preprocessing.connect(anisotropic_voxel_smooth, 'outputnode.smoothed_files',
                      merge_smoothed_files, 'in2')
preprocessing.connect(isotropic_surface_smooth, 'smoothed_file',
                      merge_smoothed_files, 'in3')

select_smoothed_files = pe.Node(interface=util.Select(),
                                name="select_smoothed_files")
preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files,
                      'inlist')


def chooseindex(roi):
Example #2
0
preproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file')
preproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2')
"""
Determine the mean image from each functional run
"""

meanfunc2 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                suffix='_mean'),
                       iterfield=['in_file'],
                       name='meanfunc2')
preproc.connect(maskfunc2, 'out_file', meanfunc2, 'in_file')
"""
Merge the median values with the mean functional images into a coupled list
"""

mergenode = pe.Node(interface=util.Merge(2, axis='hstack'), name='merge')
preproc.connect(meanfunc2, 'out_file', mergenode, 'in1')
preproc.connect(medianval, 'out_stat', mergenode, 'in2')
"""
Smooth each run using SUSAN with the brightness threshold set to 75% of the
median value for each run and a mask consituting the mean functional
"""

smooth = pe.MapNode(interface=fsl.SUSAN(),
                    iterfield=['in_file', 'brightness_threshold', 'usans'],
                    name='smooth')
"""
Define a function to get the brightness threshold for SUSAN
"""

Example #3
0
def init_func_preproc_wf(bold_file):
    """
    This workflow controls the functional preprocessing stages of *fMRIPrep*.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.tests import mock_config
            from fmriprep import config
            from fmriprep.workflows.bold.base import init_func_preproc_wf
            with mock_config():
                bold_file = config.execution.bids_dir / 'sub-01' / 'func' \
                    / 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz'
                wf = init_func_preproc_wf(str(bold_file))

    Parameters
    ----------
    bold_file
        BOLD series NIfTI file

    Inputs
    ------
    bold_file
        BOLD series NIfTI file
    t1w_preproc
        Bias-corrected structural template image
    t1w_mask
        Mask of the skull-stripped template image
    t1w_dseg
        Segmentation of preprocessed structural image, including
        gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
    t1w_asec
        Segmentation of structural image, done with FreeSurfer.
    t1w_aparc
        Parcellation of structural image, done with FreeSurfer.
    t1w_tpms
        List of tissue probability maps in T1w space
    template
        List of templates to target
    anat2std_xfm
        List of transform files, collated with templates
    std2anat_xfm
        List of inverse transform files, collated with templates
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
    fsnative2t1w_xfm
        LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w

    Outputs
    -------
    bold_t1
        BOLD series, resampled to T1w space
    bold_mask_t1
        BOLD series mask in T1w space
    bold_std
        BOLD series, resampled to template space
    bold_mask_std
        BOLD series mask in template space
    confounds
        TSV of confounds
    surfaces
        BOLD series, resampled to FreeSurfer surfaces
    aroma_noise_ics
        Noise components identified by ICA-AROMA
    melodic_mix
        FSL MELODIC mixing matrix
    bold_cifti
        BOLD CIFTI image
    cifti_variant
        combination of target spaces for `bold_cifti`

    See Also
    --------

    * :py:func:`~niworkflows.func.util.init_bold_reference_wf`
    * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf`
    * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf`
    * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf`
    * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf`
    * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf`
    * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf`
    * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf`
    * :py:func:`~sdcflows.workflows.fmap.init_fmap_wf`
    * :py:func:`~sdcflows.workflows.pepolar.init_pepolar_unwarp_wf`
    * :py:func:`~sdcflows.workflows.phdiff.init_phdiff_wf`
    * :py:func:`~sdcflows.workflows.syn.init_syn_sdc_wf`
    * :py:func:`~sdcflows.workflows.unwarp.init_sdc_unwarp_wf`

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.func.util import init_bold_reference_wf
    from niworkflows.interfaces.nibabel import ApplyMask
    from niworkflows.interfaces.utility import KeySelect
    from niworkflows.interfaces.utils import DictMerge
    from sdcflows.workflows.base import init_sdc_estimate_wf, fieldmap_wrangler

    ref_file = bold_file
    mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1}
    bold_tlen = 10
    multiecho = isinstance(bold_file, list)

    # Have some options handy
    layout = config.execution.layout
    omp_nthreads = config.nipype.omp_nthreads
    freesurfer = config.workflow.run_reconall
    spaces = config.workflow.spaces

    if multiecho:
        tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file]
        ref_file = dict(zip(tes, bold_file))[min(tes)]

    if os.path.isfile(ref_file):
        bold_tlen, mem_gb = _create_mem_gb(ref_file)

    wf_name = _get_wf_name(ref_file)
    config.loggers.workflow.debug(
        'Creating bold processing workflow for "%s" (%.2f GB / %d TRs). '
        'Memory resampled/largemem=%.2f/%.2f GB.', ref_file,
        mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem'])

    sbref_file = None
    # Find associated sbref, if possible
    entities = layout.parse_file_entities(ref_file)
    entities['suffix'] = 'sbref'
    entities['extension'] = ['nii', 'nii.gz']  # Overwrite extensions
    files = layout.get(return_type='file', **entities)
    refbase = os.path.basename(ref_file)
    if 'sbref' in config.workflow.ignore:
        config.loggers.workflow.info("Single-band reference files ignored.")
    elif files and multiecho:
        config.loggers.workflow.warning(
            "Single-band reference found, but not supported in "
            "multi-echo workflows at this time. Ignoring.")
    elif files:
        sbref_file = files[0]
        sbbase = os.path.basename(sbref_file)
        if len(files) > 1:
            config.loggers.workflow.warning(
                "Multiple single-band reference files found for {}; using "
                "{}".format(refbase, sbbase))
        else:
            config.loggers.workflow.info(
                "Using single-band reference file %s.", sbbase)
    else:
        config.loggers.workflow.info("No single-band-reference found for %s.",
                                     refbase)

    metadata = layout.get_metadata(ref_file)

    # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn)
    fmaps = None
    if 'fieldmaps' not in config.workflow.ignore:
        fmaps = fieldmap_wrangler(layout,
                                  ref_file,
                                  use_syn=config.workflow.use_syn,
                                  force_syn=config.workflow.force_syn)
    elif config.workflow.use_syn or config.workflow.force_syn:
        # If fieldmaps are not enabled, activate SyN-SDC in unforced (False) mode
        fmaps = {'syn': False}

    # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort'
    run_stc = (bool(metadata.get("SliceTiming"))
               and 'slicetiming' not in config.workflow.ignore
               and (_get_series_len(ref_file) > 4 or "TooShort"))

    # Check if MEEPI for T2* coregistration target
    if config.workflow.t2s_coreg and not multiecho:
        config.loggers.workflow.warning(
            "No multiecho BOLD images found for T2* coregistration. "
            "Using standard EPI-T1 coregistration.")
        config.workflow.t2s_coreg = False

    # By default, force-bbr for t2s_coreg unless user specifies otherwise
    if config.workflow.t2s_coreg and config.workflow.use_bbr is None:
        config.workflow.use_bbr = True

    # Build workflow
    workflow = Workflow(name=wf_name)
    workflow.__postdesc__ = """\
All resamplings can be performed with *a single interpolation
step* by composing all the pertinent transformations (i.e. head-motion
transform matrices, susceptibility distortion correction when available,
and co-registrations to anatomical and output spaces).
Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs),
configured with Lanczos interpolation to minimize the smoothing
effects of other kernels [@lanczos].
Non-gridded (surface) resamplings were performed using `mri_vol2surf`
(FreeSurfer).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_file', 'subjects_dir', 'subject_id', 't1w_preproc', 't1w_mask',
        't1w_dseg', 't1w_tpms', 't1w_aseg', 't1w_aparc', 'anat2std_xfm',
        'std2anat_xfm', 'template', 't1w2fsnative_xfm', 'fsnative2t1w_xfm'
    ]),
                        name='inputnode')
    inputnode.inputs.bold_file = bold_file
    if sbref_file is not None:
        from niworkflows.interfaces.images import ValidateImage
        val_sbref = pe.Node(ValidateImage(in_file=sbref_file),
                            name='val_sbref')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1',
        'bold_aparc_t1', 'bold_std', 'bold_std_ref', 'bold_mask_std',
        'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti',
        'cifti_variant', 'cifti_metadata', 'cifti_density', 'surfaces',
        'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file',
        'confounds_metadata'
    ]),
                         name='outputnode')

    # Generate a brain-masked conversion of the t1w
    t1w_brain = pe.Node(ApplyMask(), name='t1w_brain')

    # BOLD buffer: an identity used as a pointer to either the original BOLD
    # or the STC'ed one for further use.
    boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']),
                         name='boldbuffer')

    summary = pe.Node(FunctionalSummary(
        slice_timing=run_stc,
        registration=('FSL', 'FreeSurfer')[freesurfer],
        registration_dof=config.workflow.bold2t1w_dof,
        registration_init=config.workflow.bold2t1w_init,
        pe_direction=metadata.get("PhaseEncodingDirection"),
        tr=metadata.get("RepetitionTime")),
                      name='summary',
                      mem_gb=config.DEFAULT_MEMORY_MIN_GB,
                      run_without_submitting=True)
    summary.inputs.dummy_scans = config.workflow.dummy_scans

    func_derivatives_wf = init_func_derivatives_wf(
        bids_root=layout.root,
        cifti_output=config.workflow.cifti_output,
        freesurfer=freesurfer,
        metadata=metadata,
        output_dir=str(config.execution.output_dir),
        spaces=spaces,
        use_aroma=config.workflow.use_aroma,
    )

    workflow.connect([
        (outputnode, func_derivatives_wf, [
            ('bold_t1', 'inputnode.bold_t1'),
            ('bold_t1_ref', 'inputnode.bold_t1_ref'),
            ('bold_aseg_t1', 'inputnode.bold_aseg_t1'),
            ('bold_aparc_t1', 'inputnode.bold_aparc_t1'),
            ('bold_mask_t1', 'inputnode.bold_mask_t1'),
            ('bold_native', 'inputnode.bold_native'),
            ('confounds', 'inputnode.confounds'),
            ('surfaces', 'inputnode.surf_files'),
            ('aroma_noise_ics', 'inputnode.aroma_noise_ics'),
            ('melodic_mix', 'inputnode.melodic_mix'),
            ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'),
            ('bold_cifti', 'inputnode.bold_cifti'),
            ('cifti_variant', 'inputnode.cifti_variant'),
            ('cifti_metadata', 'inputnode.cifti_metadata'),
            ('cifti_density', 'inputnode.cifti_density'),
            ('confounds_metadata', 'inputnode.confounds_metadata'),
        ]),
    ])

    # Generate a tentative boldref
    bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads)
    bold_reference_wf.inputs.inputnode.dummy_scans = config.workflow.dummy_scans
    if sbref_file is not None:
        workflow.connect([
            (val_sbref, bold_reference_wf, [('out_file',
                                             'inputnode.sbref_file')]),
        ])

    # Top-level BOLD splitter
    bold_split = pe.Node(FSLSplit(dimension='t'),
                         name='bold_split',
                         mem_gb=mem_gb['filesize'] * 3)

    # HMC on the BOLD
    bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf',
                                   mem_gb=mem_gb['filesize'],
                                   omp_nthreads=omp_nthreads)

    # calculate BOLD registration to T1w
    bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf',
                                   freesurfer=freesurfer,
                                   use_bbr=config.workflow.use_bbr,
                                   bold2t1w_dof=config.workflow.bold2t1w_dof,
                                   bold2t1w_init=config.workflow.bold2t1w_init,
                                   mem_gb=mem_gb['resampled'],
                                   omp_nthreads=omp_nthreads,
                                   use_compression=False)

    # apply BOLD registration to T1w
    bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf',
                                             freesurfer=freesurfer,
                                             use_fieldwarp=bool(fmaps),
                                             multiecho=multiecho,
                                             mem_gb=mem_gb['resampled'],
                                             omp_nthreads=omp_nthreads,
                                             use_compression=False)

    # get confounds
    bold_confounds_wf = init_bold_confs_wf(
        mem_gb=mem_gb['largemem'],
        metadata=metadata,
        regressors_all_comps=config.workflow.regressors_all_comps,
        regressors_fd_th=config.workflow.regressors_fd_th,
        regressors_dvars_th=config.workflow.regressors_dvars_th,
        name='bold_confounds_wf')
    bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False]

    # Apply transforms in 1 shot
    # Only use uncompressed output if AROMA is to be run
    bold_bold_trans_wf = init_bold_preproc_trans_wf(
        mem_gb=mem_gb['resampled'],
        omp_nthreads=omp_nthreads,
        use_compression=not config.execution.low_mem,
        use_fieldwarp=bool(fmaps),
        name='bold_bold_trans_wf')
    bold_bold_trans_wf.inputs.inputnode.name_source = ref_file

    # SLICE-TIME CORRECTION (or bypass) #############################################
    if run_stc is True:  # bool('TooShort') == True, so check True explicitly
        bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata)
        workflow.connect([
            (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols',
                                               'inputnode.skip_vols')]),
            (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]),
        ])
        if not multiecho:
            workflow.connect([(bold_reference_wf, bold_stc_wf, [
                ('outputnode.bold_file', 'inputnode.bold_file')
            ])])
        else:  # for meepi, iterate through stc_wf for all workflows
            meepi_echos = boldbuffer.clone(name='meepi_echos')
            meepi_echos.iterables = ('bold_file', bold_file)
            workflow.connect([(meepi_echos, bold_stc_wf,
                               [('bold_file', 'inputnode.bold_file')])])
    elif not multiecho:  # STC is too short or False
        # bypass STC from original BOLD to the splitter through boldbuffer
        workflow.connect([(bold_reference_wf, boldbuffer,
                           [('outputnode.bold_file', 'bold_file')])])
    else:
        # for meepi, iterate over all meepi echos to boldbuffer
        boldbuffer.iterables = ('bold_file', bold_file)

    # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ##########################
    bold_sdc_wf = init_sdc_estimate_wf(fmaps,
                                       metadata,
                                       omp_nthreads=omp_nthreads,
                                       debug=config.execution.debug)

    # MULTI-ECHO EPI DATA #############################################
    if multiecho:
        from niworkflows.func.util import init_skullstrip_bold_wf
        skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf')

        inputnode.inputs.bold_file = ref_file  # Replace reference w first echo

        join_echos = pe.JoinNode(
            niu.IdentityInterface(fields=['bold_files']),
            joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'),
            joinfield=['bold_files'],
            name='join_echos')

        # create optimal combination, adaptive T2* map
        bold_t2s_wf = init_bold_t2s_wf(echo_times=tes,
                                       mem_gb=mem_gb['resampled'],
                                       omp_nthreads=omp_nthreads,
                                       t2s_coreg=config.workflow.t2s_coreg,
                                       name='bold_t2smap_wf')

        workflow.connect([
            (skullstrip_bold_wf, join_echos,
             [('outputnode.skull_stripped_file', 'bold_files')]),
            (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]),
        ])

    # MAIN WORKFLOW STRUCTURE #######################################################
    workflow.connect([
        (inputnode, t1w_brain, [('t1w_preproc', 'in_file'),
                                ('t1w_mask', 'in_mask')]),
        # Generate early reference
        (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]),
        # BOLD buffer has slice-time corrected if it was run, original otherwise
        (boldbuffer, bold_split, [('bold_file', 'in_file')]),
        # HMC
        (bold_reference_wf, bold_hmc_wf,
         [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'),
          ('outputnode.bold_file', 'inputnode.bold_file')]),
        (bold_reference_wf, summary, [('outputnode.algo_dummy_scans',
                                       'algo_dummy_scans')]),
        # EPI-T1 registration workflow
        (
            inputnode,
            bold_reg_wf,
            [
                ('t1w_dseg', 'inputnode.t1w_dseg'),
                # Undefined if --fs-no-reconall, but this is safe
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id'),
                ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')
            ]),
        (t1w_brain, bold_reg_wf, [('out_file', 'inputnode.t1w_brain')]),
        (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'),
                                       ('t1w_mask', 'inputnode.t1w_mask'),
                                       ('t1w_aseg', 'inputnode.t1w_aseg'),
                                       ('t1w_aparc', 'inputnode.t1w_aparc')]),
        (t1w_brain, bold_t1_trans_wf, [('out_file', 'inputnode.t1w_brain')]),
        # unused if multiecho, but this is safe
        (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms',
                                          'inputnode.hmc_xforms')]),
        (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1',
                                          'inputnode.itk_bold_to_t1')]),
        (bold_t1_trans_wf, outputnode,
         [('outputnode.bold_t1', 'bold_t1'),
          ('outputnode.bold_t1_ref', 'bold_t1_ref'),
          ('outputnode.bold_aseg_t1', 'bold_aseg_t1'),
          ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]),
        (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]),
        # SDC (or pass-through workflow)
        (t1w_brain, bold_sdc_wf, [('out_file', 'inputnode.t1w_brain')]),
        (bold_reference_wf, bold_sdc_wf,
         [('outputnode.ref_image', 'inputnode.epi_file'),
          ('outputnode.ref_image_brain', 'inputnode.epi_brain'),
          ('outputnode.bold_mask', 'inputnode.epi_mask')]),
        (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp',
                                          'inputnode.fieldwarp')]),
        (bold_sdc_wf, bold_bold_trans_wf,
         [('outputnode.out_warp', 'inputnode.fieldwarp'),
          ('outputnode.epi_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction')
                                ]),
        # Connect bold_confounds_wf
        (inputnode, bold_confounds_wf, [('t1w_tpms', 'inputnode.t1w_tpms'),
                                        ('t1w_mask', 'inputnode.t1w_mask')]),
        (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file',
                                           'inputnode.movpar_file')]),
        (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold',
                                           'inputnode.t1_bold_xform')]),
        (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols',
                                                 'inputnode.skip_vols')]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_file', 'confounds'),
        ]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_metadata', 'confounds_metadata'),
        ]),
        # Connect bold_bold_trans_wf
        (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')]
         ),
        (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms',
                                            'inputnode.hmc_xforms')]),
        # Summary
        (outputnode, summary, [('confounds', 'confounds_file')]),
    ])

    if not config.workflow.t2s_coreg:
        workflow.connect([
            (bold_sdc_wf, bold_reg_wf, [('outputnode.epi_brain',
                                         'inputnode.ref_bold_brain')]),
            (bold_sdc_wf, bold_t1_trans_wf,
             [('outputnode.epi_brain', 'inputnode.ref_bold_brain'),
              ('outputnode.epi_mask', 'inputnode.ref_bold_mask')]),
        ])
    else:
        workflow.connect([
            # For t2s_coreg, replace EPI-to-T1w registration inputs
            (bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain',
                                         'inputnode.ref_bold_brain')]),
            (bold_t2s_wf, bold_t1_trans_wf,
             [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
              ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]),
        ])

    # for standard EPI data, pass along correct file
    if not multiecho:
        workflow.connect([
            (inputnode, func_derivatives_wf, [('bold_file',
                                               'inputnode.source_file')]),
            (bold_bold_trans_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_split, bold_t1_trans_wf, [('out_files',
                                             'inputnode.bold_split')]),
        ])
    else:  # for meepi, create and use optimal combination
        workflow.connect([
            # update name source for optimal combination
            (inputnode, func_derivatives_wf,
             [(('bold_file', combine_meepi_source), 'inputnode.source_file')]),
            (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold',
                                                       'inputnode.in_file')]),
            (bold_t2s_wf, bold_confounds_wf,
             [('outputnode.bold', 'inputnode.bold'),
              ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold',
                                              'inputnode.bold_split')]),
        ])

    if fmaps:
        from sdcflows.workflows.outputs import init_sdc_unwarp_report_wf
        # Report on BOLD correction
        fmap_unwarp_report_wf = init_sdc_unwarp_report_wf()
        workflow.connect([
            (inputnode, fmap_unwarp_report_wf, [('t1w_dseg',
                                                 'inputnode.in_seg')]),
            (bold_reference_wf, fmap_unwarp_report_wf,
             [('outputnode.ref_image', 'inputnode.in_pre')]),
            (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold',
                                                   'inputnode.in_xfm')]),
            (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.epi_corrected',
                                                   'inputnode.in_post')]),
        ])

        # Overwrite ``out_path_base`` of unwarping DataSinks
        for node in fmap_unwarp_report_wf.list_node_names():
            if node.split('.')[-1].startswith('ds_'):
                fmap_unwarp_report_wf.get_node(
                    node).interface.out_path_base = 'fmriprep'

        for node in bold_sdc_wf.list_node_names():
            if node.split('.')[-1].startswith('ds_'):
                bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep'

        if 'syn' in fmaps:
            sdc_select_std = pe.Node(KeySelect(fields=['std2anat_xfm']),
                                     name='sdc_select_std',
                                     run_without_submitting=True)
            sdc_select_std.inputs.key = 'MNI152NLin2009cAsym'
            workflow.connect([
                (inputnode, sdc_select_std, [('std2anat_xfm', 'std2anat_xfm'),
                                             ('template', 'keys')]),
                (sdc_select_std, bold_sdc_wf, [('std2anat_xfm',
                                                'inputnode.std2anat_xfm')]),
            ])

        if fmaps.get('syn') is True:  # SyN forced
            syn_unwarp_report_wf = init_sdc_unwarp_report_wf(
                name='syn_unwarp_report_wf', forcedsyn=True)
            workflow.connect([
                (inputnode, syn_unwarp_report_wf, [('t1w_dseg',
                                                    'inputnode.in_seg')]),
                (bold_reference_wf, syn_unwarp_report_wf,
                 [('outputnode.ref_image', 'inputnode.in_pre')]),
                (bold_reg_wf, syn_unwarp_report_wf,
                 [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
                (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_ref',
                                                      'inputnode.in_post')]),
            ])

            # Overwrite ``out_path_base`` of unwarping DataSinks
            for node in syn_unwarp_report_wf.list_node_names():
                if node.split('.')[-1].startswith('ds_'):
                    syn_unwarp_report_wf.get_node(
                        node).interface.out_path_base = 'fmriprep'

    # Map final BOLD mask into T1w space (if required)
    nonstd_spaces = set(spaces.get_nonstandard())
    if nonstd_spaces.intersection(('T1w', 'anat')):
        from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as
                                                  ApplyTransforms)

        boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                  float=True),
                                  name='boldmask_to_t1w',
                                  mem_gb=0.1)
        workflow.connect([
            (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1',
                                             'transforms')]),
            (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1',
                                                  'reference_image')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]),
            (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]),
        ])

    if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')):
        workflow.connect([
            (bold_bold_trans_wf, outputnode, [('outputnode.bold',
                                               'bold_native')]),
            (bold_bold_trans_wf, func_derivatives_wf,
             [('outputnode.bold_ref', 'inputnode.bold_native_ref'),
              ('outputnode.bold_mask', 'inputnode.bold_mask_native')]),
        ])

    if spaces.get_spaces(nonstandard=False, dim=(3, )):
        # Apply transforms in 1 shot
        # Only use uncompressed output if AROMA is to be run
        bold_std_trans_wf = init_bold_std_trans_wf(
            freesurfer=freesurfer,
            mem_gb=mem_gb['resampled'],
            omp_nthreads=omp_nthreads,
            spaces=spaces,
            name='bold_std_trans_wf',
            use_compression=not config.execution.low_mem,
            use_fieldwarp=bool(fmaps),
        )
        workflow.connect([
            (inputnode, bold_std_trans_wf,
             [('template', 'inputnode.templates'),
              ('anat2std_xfm', 'inputnode.anat2std_xfm'),
              ('bold_file', 'inputnode.name_source'),
              ('t1w_aseg', 'inputnode.bold_aseg'),
              ('t1w_aparc', 'inputnode.bold_aparc')]),
            (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms',
                                               'inputnode.hmc_xforms')]),
            (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1',
                                               'inputnode.itk_bold_to_t1')]),
            (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
             bold_std_trans_wf, [('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
            (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp',
                                               'inputnode.fieldwarp')]),
            (bold_std_trans_wf, outputnode,
             [('outputnode.bold_std', 'bold_std'),
              ('outputnode.bold_std_ref', 'bold_std_ref'),
              ('outputnode.bold_mask_std', 'bold_mask_std')]),
        ])

        if freesurfer:
            workflow.connect([
                (bold_std_trans_wf, func_derivatives_wf, [
                    ('outputnode.bold_aseg_std', 'inputnode.bold_aseg_std'),
                    ('outputnode.bold_aparc_std', 'inputnode.bold_aparc_std'),
                ]),
                (bold_std_trans_wf, outputnode,
                 [('outputnode.bold_aseg_std', 'bold_aseg_std'),
                  ('outputnode.bold_aparc_std', 'bold_aparc_std')]),
            ])

        if not multiecho:
            workflow.connect([(bold_split, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])
        else:
            split_opt_comb = bold_split.clone(name='split_opt_comb')
            workflow.connect([(bold_t2s_wf, split_opt_comb,
                               [('outputnode.bold', 'in_file')]),
                              (split_opt_comb, bold_std_trans_wf,
                               [('out_files', 'inputnode.bold_split')])])

        # func_derivatives_wf internally parametrizes over snapshotted spaces.
        workflow.connect([
            (bold_std_trans_wf, func_derivatives_wf, [
                ('outputnode.template', 'inputnode.template'),
                ('outputnode.spatial_reference',
                 'inputnode.spatial_reference'),
                ('outputnode.bold_std_ref', 'inputnode.bold_std_ref'),
                ('outputnode.bold_std', 'inputnode.bold_std'),
                ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'),
            ]),
        ])

        if config.workflow.use_aroma:  # ICA-AROMA workflow
            from .confounds import init_ica_aroma_wf
            ica_aroma_wf = init_ica_aroma_wf(
                mem_gb=mem_gb['resampled'],
                metadata=metadata,
                omp_nthreads=omp_nthreads,
                use_fieldwarp=bool(fmaps),
                err_on_aroma_warn=config.workflow.aroma_err_on_warn,
                aroma_melodic_dim=config.workflow.aroma_melodic_dim,
                name='ica_aroma_wf')

            join = pe.Node(niu.Function(output_names=["out_file"],
                                        function=_to_join),
                           name='aroma_confounds')

            mrg_conf_metadata = pe.Node(niu.Merge(2),
                                        name='merge_confound_metadata',
                                        run_without_submitting=True)
            mrg_conf_metadata2 = pe.Node(DictMerge(),
                                         name='merge_confound_metadata2',
                                         run_without_submitting=True)
            workflow.disconnect([
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_file', 'confounds'),
                ]),
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_metadata', 'confounds_metadata'),
                ]),
            ])
            workflow.connect([
                (inputnode, ica_aroma_wf, [('bold_file',
                                            'inputnode.name_source')]),
                (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file',
                                              'inputnode.movpar_file')]),
                (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols',
                                                    'inputnode.skip_vols')]),
                (bold_confounds_wf, join, [('outputnode.confounds_file',
                                            'in_file')]),
                (bold_confounds_wf, mrg_conf_metadata,
                 [('outputnode.confounds_metadata', 'in1')]),
                (ica_aroma_wf, join, [('outputnode.aroma_confounds',
                                       'join_file')]),
                (ica_aroma_wf, mrg_conf_metadata,
                 [('outputnode.aroma_metadata', 'in2')]),
                (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),
                (ica_aroma_wf, outputnode,
                 [('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                  ('outputnode.melodic_mix', 'melodic_mix'),
                  ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')
                  ]),
                (join, outputnode, [('out_file', 'confounds')]),
                (mrg_conf_metadata2, outputnode, [('out_dict',
                                                   'confounds_metadata')]),
                (bold_std_trans_wf, ica_aroma_wf,
                 [('outputnode.bold_std', 'inputnode.bold_std'),
                  ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'),
                  ('outputnode.spatial_reference',
                   'inputnode.spatial_reference')]),
            ])

    # SURFACES ##################################################################################
    # Freesurfer
    freesurfer_spaces = spaces.get_fs_spaces()
    if freesurfer and freesurfer_spaces:
        config.loggers.workflow.debug(
            'Creating BOLD surface-sampling workflow.')
        bold_surf_wf = init_bold_surf_wf(
            mem_gb=mem_gb['resampled'],
            surface_spaces=freesurfer_spaces,
            medial_surface_nan=config.workflow.medial_surface_nan,
            name='bold_surf_wf')
        workflow.connect([
            (inputnode, bold_surf_wf,
             [('t1w_preproc', 'inputnode.t1w_preproc'),
              ('subjects_dir', 'inputnode.subjects_dir'),
              ('subject_id', 'inputnode.subject_id'),
              ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm')]),
            (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1',
                                               'inputnode.source_file')]),
            (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
            (bold_surf_wf, func_derivatives_wf, [('outputnode.target',
                                                  'inputnode.surf_refs')]),
        ])

        # CIFTI output
        if config.workflow.cifti_output:
            from .resampling import init_bold_grayords_wf
            bold_grayords_wf = init_bold_grayords_wf(
                grayord_density=config.workflow.cifti_output,
                mem_gb=mem_gb['resampled'],
                repetition_time=metadata['RepetitionTime'])

            workflow.connect([
                (inputnode, bold_grayords_wf, [('subjects_dir',
                                                'inputnode.subjects_dir')]),
                (bold_std_trans_wf, bold_grayords_wf,
                 [('outputnode.bold_std', 'inputnode.bold_std'),
                  ('outputnode.spatial_reference',
                   'inputnode.spatial_reference')]),
                (bold_surf_wf, bold_grayords_wf, [
                    ('outputnode.surfaces', 'inputnode.surf_files'),
                    ('outputnode.target', 'inputnode.surf_refs'),
                ]),
                (bold_grayords_wf, outputnode,
                 [('outputnode.cifti_bold', 'bold_cifti'),
                  ('outputnode.cifti_variant', 'cifti_variant'),
                  ('outputnode.cifti_metadata', 'cifti_metadata'),
                  ('outputnode.cifti_density', 'cifti_density')]),
            ])

    if spaces.get_spaces(nonstandard=False, dim=(3, )):
        carpetplot_wf = init_carpetplot_wf(
            mem_gb=mem_gb['resampled'],
            metadata=metadata,
            cifti_output=config.workflow.cifti_output,
            name='carpetplot_wf')

        if config.workflow.cifti_output:
            workflow.connect(bold_grayords_wf, 'outputnode.cifti_bold',
                             carpetplot_wf, 'inputnode.cifti_bold')
        else:
            # Xform to 'MNI152NLin2009cAsym' is always computed.
            carpetplot_select_std = pe.Node(KeySelect(
                fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'),
                                            name='carpetplot_select_std',
                                            run_without_submitting=True)

            workflow.connect([
                (inputnode, carpetplot_select_std, [('std2anat_xfm',
                                                     'std2anat_xfm'),
                                                    ('template', 'keys')]),
                (carpetplot_select_std, carpetplot_wf,
                 [('std2anat_xfm', 'inputnode.std2anat_xfm')]),
                (bold_bold_trans_wf if not multiecho else bold_t2s_wf,
                 carpetplot_wf, [('outputnode.bold', 'inputnode.bold'),
                                 ('outputnode.bold_mask',
                                  'inputnode.bold_mask')]),
                (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold',
                                               'inputnode.t1_bold_xform')]),
            ])

        workflow.connect([(bold_confounds_wf, carpetplot_wf, [
            ('outputnode.confounds_file', 'inputnode.confounds_file')
        ])])

    # REPORTING ############################################################
    reportlets_dir = str(config.execution.work_dir / 'reportlets')
    ds_report_summary = pe.Node(DerivativesDataSink(desc='summary',
                                                    keep_dtype=True),
                                name='ds_report_summary',
                                run_without_submitting=True,
                                mem_gb=config.DEFAULT_MEMORY_MIN_GB)

    ds_report_validation = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, desc='validation', keep_dtype=True),
                                   name='ds_report_validation',
                                   run_without_submitting=True,
                                   mem_gb=config.DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (summary, ds_report_summary, [('out_report', 'in_file')]),
        (bold_reference_wf, ds_report_validation,
         [('outputnode.validation_report', 'in_file')]),
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = ref_file

    return workflow
Example #4
0
# Smooth each run using SUSAn with the brightness threshold set to 75%
# of the median value for each run and a mask constituting the mean functional
smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield = ['in_file'],
                           name='smooth_median')
psb6351_wf.connect(maskfunc, 'out_file', smooth_median, 'in_file')
psb6351_wf.connect(fs_voltransform, 'transformed_file', smooth_median, 'mask_file')

# Calculate the mean functional
smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean',
                                            suffix='_mean'),
                             iterfield=['in_file'],
                             name='smooth_meanfunc')
psb6351_wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file')

smooth_merge = pe.Node(util.Merge(2, axis='hstack'),
                       name='smooth_merge')
psb6351_wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1')
psb6351_wf.connect(smooth_median, 'out_stat', smooth_merge, 'in2')

# Below is the code for smoothing using the susan algorithm from FSL that
# limits smoothing based on different tissue classes
smooth = pe.MapNode(fsl.SUSAN(),
                    iterfield=['in_file', 'brightness_threshold', 'usans', 'fwhm'],
                    name='smooth')
smooth.inputs.fwhm=[2.0, 4.0, 6.0, 8.0, 10.0, 12.0]
psb6351_wf.connect(maskfunc, 'out_file', smooth, 'in_file')
psb6351_wf.connect(smooth_median, ('out_stat', getbtthresh), smooth, 'brightness_threshold')
psb6351_wf.connect(smooth_merge, ('out', getusans), smooth, 'usans')

# Below is the node that collects all the data and saves
Example #5
0
# of the median value for each run and a mask constituting the mean
# functional
smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file'],
                           name='susan_smooth_median')
preproc_wf.connect(maskfunc, 'out_file', smooth_median, 'in_file')
preproc_wf.connect(fs_threshold2, ('binary_file', pickfirst), smooth_median,
                   'mask_file')

smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean',
                                            suffix='_mean'),
                             iterfield=['in_file'],
                             name='susan_smooth_meanfunc')
preproc_wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file')

smooth_merge = pe.Node(util.Merge(2, axis='hstack'), name='susan_smooth_merge')
preproc_wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1')
preproc_wf.connect(smooth_median, 'out_stat', smooth_merge, 'in2')

susan_smooth = pe.MapNode(
    fsl.SUSAN(),
    iterfield=['in_file', 'brightness_threshold', 'usans'],
    name='susan_smooth')
susan_smooth.inputs.fwhm = 6.
preproc_wf.connect(maskfunc, 'out_file', susan_smooth, 'in_file')
preproc_wf.connect(smooth_median, ('out_stat', getbtthresh), susan_smooth,
                   'brightness_threshold')
preproc_wf.connect(smooth_merge, ('out', getusans), susan_smooth, 'usans')

# Mask the smoothed data with the dilated mask
maskfunc2 = pe.MapNode(fsl.ImageMaths(suffix='_mask', op_string='-mas'),
Example #6
0
def init_bold_std_trans_wf(
    freesurfer,
    mem_gb,
    omp_nthreads,
    standard_spaces,
    name='bold_std_trans_wf',
    use_compression=True,
    use_fieldwarp=False
):
    """
    This workflow samples functional images into standard space with a single
    resampling of the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from collections import OrderedDict
        from fmriprep.workflows.bold import init_bold_std_trans_wf
        wf = init_bold_std_trans_wf(
            freesurfer=True,
            mem_gb=3,
            omp_nthreads=1,
            standard_spaces=OrderedDict([('MNI152Lin', {}),
                                         ('fsaverage', {'density': '10k'})]),
        )

    **Parameters**

        freesurfer : bool
            Whether to generate FreeSurfer's aseg/aparc segmentations on BOLD space.
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        standard_spaces : OrderedDict
            Ordered dictionary where keys are TemplateFlow ID strings (e.g.,
            ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNI152NLin2009cAsym``, or ``fsLR``),
            or paths pointing to custom templates organized in a TemplateFlow-like structure.
            Values of the dictionary aggregate modifiers (e.g., the value for the key ``MNI152Lin``
            could be ``{'resolution': 2}`` if one wants the resampling to be done on the 2mm
            resolution version of the selected template).
        name : str
            Name of workflow (default: ``bold_std_trans_wf``)
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        anat2std_xfm
            List of anatomical-to-standard space transforms generated during
            spatial normalization.
        bold_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_mask
            Skull-stripping mask of reference image
        bold_split
            Individual 3D volumes, not motion corrected
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        templates
            List of templates that were applied as targets during
            spatial normalization.

    **Outputs** - Two outputnodes are available. One output node (with name ``poutputnode``)
    will be parameterized in a Nipype sense (see `Nipype iterables
    <https://miykael.github.io/nipype_tutorial/notebooks/basic_iteration.html>`__), and a
    second node (``outputnode``) will collapse the parameterized outputs into synchronous
    lists of the following fields:

        bold_std
            BOLD series, resampled to template space
        bold_std_ref
            Reference, contrast-enhanced summary of the BOLD series, resampled to template space
        bold_mask_std
            BOLD series mask in template space
        bold_aseg_std
            FreeSurfer's ``aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        bold_aparc_std
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        templates
            Template identifiers synchronized correspondingly to previously
            described outputs.

    """

    # Filter ``standard_spaces``
    vol_std_spaces = [k for k in standard_spaces.keys() if not k.startswith('fs')]

    workflow = Workflow(name=name)

    if len(vol_std_spaces) == 1:
        workflow.__desc__ = """\
The BOLD time-series were resampled into standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=vol_std_spaces)
    else:
        workflow.__desc__ = """\
The BOLD time-series were resampled into several standard spaces,
correspondingly generating the following *spatially-normalized,
preprocessed BOLD runs*: {tpl}.
""".format(tpl=', '.join(vol_std_spaces))

    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            'anat2std_xfm',
            'bold_aparc',
            'bold_aseg',
            'bold_mask',
            'bold_split',
            'fieldwarp',
            'hmc_xforms',
            'itk_bold_to_t1',
            'name_source',
            'templates',
        ]),
        name='inputnode'
    )

    select_std = pe.Node(KeySelect(
        fields=['resolution', 'anat2std_xfm']),
        name='select_std', run_without_submitting=True)

    select_std.inputs.resolution = [v.get('resolution') or v.get('res') or 'native'
                                    for k, v in list(standard_spaces.items())
                                    if k in vol_std_spaces]
    select_std.iterables = ('key', vol_std_spaces)

    select_tpl = pe.Node(niu.Function(function=_select_template),
                         name='select_tpl', run_without_submitting=True)
    select_tpl.inputs.template_specs = standard_spaces

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)

    mask_std_tfm = pe.Node(
        ApplyTransforms(interpolation='MultiLabel', float=True),
        name='mask_std_tfm',
        mem_gb=1
    )

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2), name='mask_merge_tfms', run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, select_std, [('templates', 'keys'),
                                 ('anat2std_xfm', 'anat2std_xfm')]),
        (inputnode, mask_std_tfm, [('bold_mask', 'input_image')]),
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_merge_tfms, [(('itk_bold_to_t1', _aslist), 'in2')]),
        (select_std, select_tpl, [('key', 'template')]),
        (select_std, mask_merge_tfms, [('anat2std_xfm', 'in1')]),
        (select_std, gen_ref, [(('resolution', _is_native), 'keep_native')]),
        (select_tpl, gen_ref, [('out', 'fixed_image')]),
        (mask_merge_tfms, mask_std_tfm, [('out', 'transforms')]),
        (gen_ref, mask_std_tfm, [('out_file', 'reference_image')]),
    ])

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms), name='merge_xforms',
                           run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms', 'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    bold_to_std_transform = pe.Node(
        MultiApplyTransforms(interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
        name='bold_to_std_transform', mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression), name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(
        omp_nthreads=omp_nthreads, pre_mask=True)

    workflow.connect([
        (inputnode, merge_xforms, [
            (('itk_bold_to_t1', _aslist), 'in2')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_std_transform, [('bold_split', 'input_image')]),
        (select_std, merge_xforms, [('anat2std_xfm', 'in1')]),
        (merge_xforms, bold_to_std_transform, [('out', 'transforms')]),
        (gen_ref, bold_to_std_transform, [('out_file', 'reference_image')]),
        (bold_to_std_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_std_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')]),
    ])

    # Connect output nodes
    output_names = ['bold_std', 'bold_std_ref', 'bold_mask_std', 'templates']
    if freesurfer:
        output_names += ['bold_aseg_std', 'bold_aparc_std']

    # poutputnode - parametric output node
    poutputnode = pe.Node(niu.IdentityInterface(fields=output_names),
                          name='poutputnode')

    workflow.connect([
        (gen_final_ref, poutputnode, [('outputnode.ref_image', 'bold_std_ref')]),
        (merge, poutputnode, [('out_file', 'bold_std')]),
        (mask_std_tfm, poutputnode, [('output_image', 'bold_mask_std')]),
        (select_std, poutputnode, [('key', 'templates')]),
    ])

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_std_tfm = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True),
            name='aseg_std_tfm', mem_gb=1)
        aparc_std_tfm = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True),
            name='aparc_std_tfm', mem_gb=1)

        workflow.connect([
            (inputnode, aseg_std_tfm, [('bold_aseg', 'input_image')]),
            (inputnode, aparc_std_tfm, [('bold_aparc', 'input_image')]),
            (select_std, aseg_std_tfm, [('anat2std_xfm', 'transforms')]),
            (select_std, aparc_std_tfm, [('anat2std_xfm', 'transforms')]),
            (gen_ref, aseg_std_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_std_tfm, [('out_file', 'reference_image')]),
            (aseg_std_tfm, poutputnode, [('output_image', 'bold_aseg_std')]),
            (aparc_std_tfm, poutputnode, [('output_image', 'bold_aparc_std')]),
        ])

    # Connect outputnode to the parameterized outputnode
    outputnode = pe.JoinNode(niu.IdentityInterface(fields=output_names),
                             name='outputnode', joinsource='select_std')
    workflow.connect([
        (poutputnode, outputnode, [(f, f) for f in output_names])
    ])

    return workflow
Example #7
0
def init_bold_preproc_trans_wf(mem_gb, omp_nthreads,
                               name='bold_preproc_trans_wf',
                               use_compression=True,
                               use_fieldwarp=False,
                               split_file=False,
                               interpolation='LanczosWindowedSinc'):
    """
    This workflow resamples the input fMRI in its native (original)
    space in a "single shot" from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_preproc_trans_wf
        wf = init_bold_preproc_trans_wf(mem_gb=3, omp_nthreads=1)

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_std_trans_wf``)
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI
        split_file : bool
            Whether the input file should be splitted (it is a 4D file)
            or it is a list of 3D files (default ``False``, do not split)
        interpolation : str
            Interpolation type to be used by ANTs' ``applyTransforms``
            (default ``'LanczosWindowedSinc'``)

    **Inputs**

        bold_file
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold
            BOLD series, resampled in native space, including all preprocessing
        bold_mask
            BOLD series mask calculated with the new time-series
        bold_ref
            BOLD reference image: an average-like 3D image of the time-series
        bold_ref_brain
            Same as ``bold_ref``, but once the brain mask has been applied

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD time-series (including slice-timing correction when applied)
were resampled onto their original, native space by applying
{transforms}.
These resampled BOLD time-series will be referred to as *preprocessed
BOLD in original space*, or just *preprocessed BOLD*.
""".format(transforms="""\
a single, composite transform to correct for head-motion and
susceptibility distortions""" if use_fieldwarp else """\
the transforms to correct for head-motion""")

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'bold_file', 'bold_mask', 'hmc_xforms', 'fieldwarp']),
        name='inputnode'
    )

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['bold', 'bold_mask', 'bold_ref', 'bold_ref_brain']),
        name='outputnode')

    bold_transform = pe.Node(
        MultiApplyTransforms(interpolation=interpolation, float=True, copy_dtype=True),
        name='bold_transform', mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression), name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a new BOLD reference
    bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads)
    bold_reference_wf.__desc__ = None  # Unset description to avoid second appearance

    workflow.connect([
        (inputnode, merge, [('name_source', 'header_source')]),
        (bold_transform, merge, [('out_files', 'in_files')]),
        (merge, bold_reference_wf, [('out_file', 'inputnode.bold_file')]),
        (merge, outputnode, [('out_file', 'bold')]),
        (bold_reference_wf, outputnode, [
            ('outputnode.ref_image', 'bold_ref'),
            ('outputnode.ref_image_brain', 'bold_ref_brain'),
            ('outputnode.bold_mask', 'bold_mask')]),
    ])

    # Input file is not splitted
    if split_file:
        bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split',
                             mem_gb=mem_gb * 3)
        workflow.connect([
            (inputnode, bold_split, [('bold_file', 'in_file')]),
            (bold_split, bold_transform, [
                ('out_files', 'input_image'),
                (('out_files', _first), 'reference_image'),
            ])
        ])
    else:
        workflow.connect([
            (inputnode, bold_transform, [('bold_file', 'input_image'),
                                         (('bold_file', _first), 'reference_image')]),
        ])

    if use_fieldwarp:
        merge_xforms = pe.Node(niu.Merge(2), name='merge_xforms',
                               run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, merge_xforms, [('fieldwarp', 'in1'),
                                       ('hmc_xforms', 'in2')]),
            (merge_xforms, bold_transform, [('out', 'transforms')]),
        ])
    else:
        def _aslist(val):
            return [val]
        workflow.connect([
            (inputnode, bold_transform, [(('hmc_xforms', _aslist), 'transforms')]),
        ])

    # Code ready to generate a pre/post processing report
    # bold_bold_report_wf = init_bold_preproc_report_wf(
    #     mem_gb=mem_gb['resampled'],
    #     reportlets_dir=reportlets_dir
    # )
    # workflow.connect([
    #     (inputnode, bold_bold_report_wf, [
    #         ('bold_file', 'inputnode.name_source'),
    #         ('bold_file', 'inputnode.in_pre')]),  # This should be after STC
    #     (bold_bold_trans_wf, bold_bold_report_wf, [
    #         ('outputnode.bold', 'inputnode.in_post')]),
    # ])

    return workflow
Example #8
0
def init_bold_mni_trans_wf(template,
                           freesurfer,
                           mem_gb,
                           omp_nthreads,
                           name='bold_mni_trans_wf',
                           template_out_grid='2mm',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images to the MNI template in a "single shot"
    from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_mni_trans_wf
        wf = init_bold_mni_trans_wf(template='MNI152NLin2009cAsym',
                                    freesurfer=True,
                                    mem_gb=3,
                                    omp_nthreads=1,
                                    template_out_grid='native')

    **Parameters**

        template : str
            Name of template targeted by ``template`` output space
        freesurfer : bool
            Enable sampling of FreeSurfer files
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization.
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        bold_split
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        bold_aseg
            FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        bold_aparc
            FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
            (only if ``recon-all`` was run).
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_mni
            BOLD series, resampled to template space
        bold_mni_ref
            Reference, contrast-enhanced summary of the BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space
        bold_aseg_mni
            FreeSurfer's ``aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)
        bold_aparc_mni
            FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the BOLD resolution
            (only if ``recon-all`` was run)

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD time-series were resampled to {tpl} standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=template)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 't1_2_mni_forward_transform', 'name_source',
        'bold_split', 'bold_mask', 'bold_aseg', 'bold_aparc', 'hmc_xforms',
        'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_mni', 'bold_mni_ref', 'bold_mask_mni', 'bold_aseg_mni',
        'bold_aparc_mni'
    ]),
                         name='outputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)
    # Account for template aliases
    template_name = TEMPLATE_ALIASES.get(template, template)
    # Template path
    template_dir = get_template(template_name)

    gen_ref.inputs.fixed_image = str(
        template_dir / ('tpl-%s_space-MNI_res-01_T1w.nii.gz' % template_name))

    mask_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_mni_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_mni_tfm, [('bold_mask', 'input_image')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('itk_bold_to_t1', _aslist), 'in2')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (mask_mni_tfm, outputnode, [('output_image', 'bold_mask_mni')]),
    ])

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    bold_to_mni_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_mni_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target T1w space
    gen_final_ref = init_bold_reference_wf(omp_nthreads=omp_nthreads,
                                           pre_mask=True)

    workflow.connect([
        (inputnode, merge_xforms, [('t1_2_mni_forward_transform', 'in1'),
                                   (('itk_bold_to_t1', _aslist), 'in2')]),
        (merge_xforms, bold_to_mni_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_mni_transform, [('bold_split', 'input_image')]),
        (bold_to_mni_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.bold_file')]),
        (mask_mni_tfm, gen_final_ref, [('output_image', 'inputnode.bold_mask')
                                       ]),
        (merge, outputnode, [('out_file', 'bold_mni')]),
        (gen_final_ref, outputnode, [('outputnode.ref_image', 'bold_mni_ref')
                                     ]),
    ])

    if template_out_grid == 'native':
        workflow.connect([
            (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
            (gen_ref, bold_to_mni_transform, [('out_file', 'reference_image')
                                              ]),
        ])
    elif template_out_grid in ['1mm', '2mm']:
        res = int(template_out_grid[0])
        mask_mni_tfm.inputs.reference_image = str(
            template_dir / ('tpl-%s_space-MNI_res-%02d_brainmask.nii.gz' %
                            (template_name, res)))
        bold_to_mni_transform.inputs.reference_image = str(
            template_dir / ('tpl-%s_space-MNI_res-%02d_T1w.nii.gz' %
                            (template_name, res)))
    else:
        mask_mni_tfm.inputs.reference_image = template_out_grid
        bold_to_mni_transform.inputs.reference_image = template_out_grid

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                               float=True),
                               name='aseg_mni_tfm',
                               mem_gb=1)
        aparc_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                float=True),
                                name='aparc_mni_tfm',
                                mem_gb=1)

        workflow.connect([
            (inputnode, aseg_mni_tfm, [('bold_aseg', 'input_image'),
                                       ('t1_2_mni_forward_transform',
                                        'transforms')]),
            (inputnode, aparc_mni_tfm, [('bold_aparc', 'input_image'),
                                        ('t1_2_mni_forward_transform',
                                         'transforms')]),
            (aseg_mni_tfm, outputnode, [('output_image', 'bold_aseg_mni')]),
            (aparc_mni_tfm, outputnode, [('output_image', 'bold_aparc_mni')]),
        ])
        if template_out_grid == 'native':
            workflow.connect([
                (gen_ref, aseg_mni_tfm, [('out_file', 'reference_image')]),
                (gen_ref, aparc_mni_tfm, [('out_file', 'reference_image')]),
            ])
        elif template_out_grid in ['1mm', '2mm']:
            res = int(template_out_grid[0])
            aseg_mni_tfm.inputs.reference_image = str(
                template_dir / ('tpl-%s_space-MNI_res-%02d_brainmask.nii.gz' %
                                (template_name, res)))
            aparc_mni_tfm.inputs.reference_image = str(
                template_dir / ('tpl-%s_space-MNI_res-%02d_T1w.nii.gz' %
                                (template_name, res)))
        else:
            aseg_mni_tfm.inputs.reference_image = template_out_grid
            aparc_mni_tfm.inputs.reference_image = template_out_grid

    return workflow
Example #9
0
def generic(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    functional_registration_method="composite",
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask="",
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='generic',
    params={},
    phase_dictionary=GENERIC_PHASES,
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Generic preprocessing and registration workflow for small animal data in BIDS format.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	functional_registration_method : {'composite','functional','structural'}, optional
		How to register the functional scan to the template.
		Values mean the following: 'composite' that it will be registered to the structural scan which will in turn be registered to the template, 'functional' that it will be registered directly, 'structural' that it will be registered exactly as the structural scan.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name`(as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    find_physio = pe.Node(
        name='find_physio',
        interface=util.Function(
            function=corresponding_physiofile,
            input_names=inspect.getargspec(corresponding_physiofile)[0],
            output_names=['physiofile', 'meta_physiofile']))

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (get_f_scan, find_physio, [('nii_path', 'nii_path')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (find_physio, datasink, [('physiofile', 'func.@physio')]),
        (find_physio, datasink, [('meta_physiofile', 'func.@meta_physio')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    s_biascorrect, f_biascorrect = real_size_nodes()

    if structural_scan_types.any():
        s_data_selection = deepcopy(data_selection)
        for match in structural_match.keys():
            s_data_selection = s_data_selection.loc[
                s_data_selection[match].isin(structural_match[match])]

        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_bids_scan,
                input_names=inspect.getargspec(get_bids_scan)[0],
                output_names=[
                    'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name',
                    'events_name', 'subject_session', 'metadata_filename',
                    'dict_slice', 'ind_type'
                ]))
        get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = s_data_selection
        get_s_scan.inputs.bids_base = bids_base

        s_register, s_warp, f_register, f_warp = generic_registration(
            template,
            structural_mask=registration_mask,
            phase_dictionary=phase_dictionary,
        )
        #TODO: incl. in func registration
        if autorotate:
            s_rotated = autorotate(template)
            workflow_connections.extend([
                (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                (s_rotated, s_register, [('out_file', 'moving_image')]),
            ])
        else:
            workflow_connections.extend([
                (s_biascorrect, s_register, [('output_image', 'moving_image')
                                             ]),
                (s_register, s_warp, [('composite_transform', 'transforms')]),
                (get_s_scan, s_warp, [('nii_path', 'input_image')]),
                (s_warp, datasink, [('output_image', 'anat')]),
            ])

        workflow_connections.extend([
            (get_f_scan, get_s_scan, [('subject_session', 'selector')]),
            (get_s_scan, s_warp, [('nii_name', 'output_image')]),
            (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (s_register, merge, [('composite_transform', 'in1')]),
            (f_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:
        workflow_connections.extend([
            (get_f_scan, f_warp, [('nii_name', 'output_image')]),
            (f_warp, datasink, [('output_image', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Example #10
0
def individual_reports(name='ReportsWorkflow'):
    """
    Write out individual reportlets.

    .. workflow::

        from mriqc.workflows.functional import individual_reports
        from mriqc.testing import mock_config
        with mock_config():
            wf = individual_reports()

    """
    from niworkflows.interfaces.plotting import FMRISummary
    from ..interfaces import PlotMosaic, Spikes, PlotSpikes
    from ..interfaces.reports import IndividualReport

    verbose = config.execution.verbose_reports
    mem_gb = config.workflow.biggest_file_gb

    pages = 5
    extra_pages = int(verbose) * 4

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_iqms', 'in_ras', 'hmc_epi', 'epi_mean', 'brainmask', 'hmc_fd',
        'fd_thres', 'epi_parc', 'in_dvars', 'in_stddev', 'outliers',
        'in_spikes', 'in_fft', 'mni_report', 'ica_report'
    ]),
                        name='inputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    spmask = pe.Node(niu.Function(input_names=['in_file', 'in_mask'],
                                  output_names=['out_file', 'out_plot'],
                                  function=spikes_mask),
                     name='SpikesMask',
                     mem_gb=mem_gb * 3.5)

    spikes_bg = pe.Node(Spikes(no_zscore=True, detrend=False),
                        name='SpikesFinderBgMask',
                        mem_gb=mem_gb * 2.5)

    bigplot = pe.Node(FMRISummary(), name='BigPlot', mem_gb=mem_gb * 3.5)
    workflow.connect([
        (inputnode, spikes_bg, [('in_ras', 'in_file')]),
        (inputnode, spmask, [('in_ras', 'in_file')]),
        (inputnode, bigplot, [('hmc_epi', 'in_func'), ('brainmask', 'in_mask'),
                              ('hmc_fd', 'fd'), ('fd_thres', 'fd_thres'),
                              ('in_dvars', 'dvars'), ('epi_parc', 'in_segm'),
                              ('outliers', 'outliers')]),
        (spikes_bg, bigplot, [('out_tsz', 'in_spikes_bg')]),
        (spmask, spikes_bg, [('out_file', 'in_mask')]),
    ])

    mosaic_mean = pe.Node(PlotMosaic(out_file='plot_func_mean_mosaic1.svg',
                                     cmap='Greys_r'),
                          name='PlotMosaicMean')

    mosaic_stddev = pe.Node(PlotMosaic(
        out_file='plot_func_stddev_mosaic2_stddev.svg', cmap='viridis'),
                            name='PlotMosaicSD')

    mplots = pe.Node(niu.Merge(pages + extra_pages +
                               int(config.workflow.fft_spikes_detector) +
                               int(config.workflow.ica)),
                     name='MergePlots')
    rnode = pe.Node(IndividualReport(), name='GenerateReport')

    # Link images that should be reported
    dsplots = pe.Node(nio.DataSink(base_directory=str(
        config.execution.output_dir),
                                   parameterization=False),
                      name='dsplots',
                      run_without_submitting=True)

    workflow.connect([
        (inputnode, rnode, [('in_iqms', 'in_iqms')]),
        (inputnode, mosaic_mean, [('epi_mean', 'in_file')]),
        (inputnode, mosaic_stddev, [('in_stddev', 'in_file')]),
        (mosaic_mean, mplots, [('out_file', 'in1')]),
        (mosaic_stddev, mplots, [('out_file', 'in2')]),
        (bigplot, mplots, [('out_file', 'in3')]),
        (mplots, rnode, [('out', 'in_plots')]),
        (rnode, dsplots, [('out_file', '@html_report')]),
    ])

    if config.workflow.fft_spikes_detector:
        mosaic_spikes = pe.Node(PlotSpikes(out_file='plot_spikes.svg',
                                           cmap='viridis',
                                           title='High-Frequency spikes'),
                                name='PlotSpikes')

        workflow.connect([(inputnode, mosaic_spikes, [('in_ras', 'in_file'),
                                                      ('in_spikes',
                                                       'in_spikes'),
                                                      ('in_fft', 'in_fft')]),
                          (mosaic_spikes, mplots, [('out_file', 'in4')])])

    if config.workflow.ica:
        page_number = 4 + config.workflow.fft_spikes_detector
        workflow.connect([(inputnode, mplots, [('ica_report',
                                                'in%d' % page_number)])])

    if not verbose:
        return workflow

    mosaic_zoom = pe.Node(PlotMosaic(out_file='plot_anat_mosaic1_zoomed.svg',
                                     cmap='Greys_r'),
                          name='PlotMosaicZoomed')

    mosaic_noise = pe.Node(PlotMosaic(out_file='plot_anat_mosaic2_noise.svg',
                                      only_noise=True,
                                      cmap='viridis_r'),
                           name='PlotMosaicNoise')

    # Verbose-reporting goes here
    from ..interfaces.viz import PlotContours

    plot_bmask = pe.Node(PlotContours(display_mode='z',
                                      levels=[.5],
                                      colors=['r'],
                                      cut_coords=10,
                                      out_file='bmask'),
                         name='PlotBrainmask')

    workflow.connect([
        (inputnode, plot_bmask, [('epi_mean', 'in_file'),
                                 ('brainmask', 'in_contours')]),
        (inputnode, mosaic_zoom, [('epi_mean', 'in_file'),
                                  ('brainmask', 'bbox_mask_file')]),
        (inputnode, mosaic_noise, [('epi_mean', 'in_file')]),
        (mosaic_zoom, mplots, [('out_file', 'in%d' % (pages + 1))]),
        (mosaic_noise, mplots, [('out_file', 'in%d' % (pages + 2))]),
        (plot_bmask, mplots, [('out_file', 'in%d' % (pages + 3))]),
        (inputnode, mplots, [('mni_report', 'in%d' % (pages + 4))]),
    ])
    return workflow
Example #11
0
def create_wf_collect_transforms(map_node,
                                 name='create_wf_collect_transforms'):
    """
    DOCSTRINGS

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    collect_transforms_wf : nipype.pipeline.engine.Workflow

    Notes
    -----
    
    Workflow Inputs::
    
        inputspec.transform_file : string (nifti file)
            Output matrix of FSL-based functional to anatomical registration
        inputspec.reference_file : string (nifti file)
            File of skull-stripped anatomical brain to be used in affine
            conversion
        inputspec.source_file : string (nifti file)
            Should match the input of the apply warp (in_file) unless you are
            applying the warp to a 4-d file, in which case this file should
            be a mean_functional file

    Workflow Outputs::
    
        outputspec.itk_transform : string (nifti file)
            Converted affine transform in ITK format usable with ANTS
    
    """

    collect_transforms_wf = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'warp_file', 'linear_affine', 'linear_rigid', 'fsl_to_itk_affine'
    ]),
                        name='inputspec')

    # converts FSL-format .mat affine xfm into ANTS-format .txt
    # .mat affine comes from Func->Anat registration

    if map_node == 0:
        collect_transforms = pe.Node(util.Merge(4), name='collect_transforms')

    elif map_node == 1:
        collect_transforms = pe.MapNode(util.Merge(4),
                                        name='collect_transforms_mapnode',
                                        iterfield=['in4'])

    outputspec = pe.Node(
        util.IdentityInterface(fields=['transformation_series']),
        name='outputspec')

    # Field file from anatomical nonlinear registration
    collect_transforms_wf.connect(inputspec, 'warp_file', collect_transforms,
                                  'in1')

    # affine transformation from anatomical registration
    collect_transforms_wf.connect(inputspec, 'linear_affine',
                                  collect_transforms, 'in2')

    # affine transformation from anatomical registration
    collect_transforms_wf.connect(inputspec, 'linear_rigid',
                                  collect_transforms, 'in3')

    # Premat from Func->Anat linear reg and bbreg (if bbreg is enabled)
    collect_transforms_wf.connect(inputspec, 'fsl_to_itk_affine',
                                  collect_transforms, 'in4')

    collect_transforms_wf.connect(collect_transforms, 'out', outputspec,
                                  'transformation_series')

    return collect_transforms_wf
Example #12
0
def init_workflow(
    workdir, freesurfer=False, no_compose_transforms=False, skull_strip_algorithm="ants"
):
    """
    initialize nipype workflow

    :param spec
    """

    logger = logging.getLogger("pipeline")

    spec = loadspec(workdir=workdir)
    database = Database(files=spec.files)
    uuid = uuid5(spec.uuid, database.sha1())

    workflow = uncacheobj(workdir, "workflow", uuid)
    if workflow is not None:
        return workflow

    # create workflow
    workflow = pe.Workflow(name="nipype", base_dir=workdir)
    workflow.uuid = uuid
    uuidstr = str(uuid)[:8]
    logger.info(f"New workflow: {uuidstr}")
    workflow.config["execution"].update(
        {
            "crashdump_dir": workflow.base_dir,
            "poll_sleep_duration": 0.1,
            "use_relative_paths": False,
            "check_version": False,
        }
    )

    # helpers
    memcalc = memcalc_from_database(database)
    cache = Cache()

    subjectlevelworkflow = pe.Workflow(name=f"subjectlevel")
    workflow.add_nodes([subjectlevelworkflow])

    firstlevel_analyses = [analysis for analysis in spec.analyses if analysis.level == "first"]
    firstlevel_analysis_tagdicts = [
        analysis.tags.get_tagdict(study_entities) for analysis in firstlevel_analyses
    ]

    subjectlevel_analyses = [
        analysis
        for analysis in spec.analyses
        if analysis.level == "higher" and analysis.across != "subject"
    ]

    grouplevel_analyses = [
        analysis
        for analysis in spec.analyses
        if analysis.level == "higher" and analysis.across == "subject"
    ]

    analysisendpoints = {analysis.name: [] for analysis in spec.analyses}

    subjects = database.get_tagval_set("subject")
    for subject in subjects:
        subjectmetadata = {"subject": subject}

        subjectfiles = database.get(subject=subject)
        boldfiles = database.filter(subjectfiles, datatype="func", suffix="bold")

        nboldfiles = len(boldfiles)
        if nboldfiles == 0:
            logger.warn(f'Found {nboldfiles} BOLD files for subject "{subject}", skipping')
            continue

        subjectworkflow = pe.Workflow(name=f"subject_{subject}")
        subjectlevelworkflow.add_nodes([subjectworkflow])

        t1wfiles = database.filter(subjectfiles, datatype="anat", suffix="T1w")
        nt1wfiles = len(t1wfiles)
        if nt1wfiles == 0:
            logger.warn(f'Found {nt1wfiles} T1w files for subject "{subject}", skipping')
            continue

        t1wfile = t1wfiles.pop()
        if nt1wfiles > 1:
            logger.warn(f'Found {nt1wfiles} T1w files for subject "{subject}", using "{t1wfile}"')
        anat_preproc_wf = cache.get(
            init_anat_preproc_wf,
            argtuples=[
                ("workdir", workdir),
                ("no_compose_transforms", no_compose_transforms),
                ("freesurfer", freesurfer),
                ("skull_strip_algorithm", skull_strip_algorithm),
            ],
        )
        anat_preproc_wf.get_node("inputnode").inputs.t1w = t1wfile
        anat_preproc_wf.get_node("inputnode").inputs.metadata = subjectmetadata
        subjectworkflow.add_nodes([anat_preproc_wf])

        anat_report_wf = cache.get(
            init_anat_report_wf, argtuples=[("workdir", workdir), ("memcalc", memcalc)]
        )
        anat_report_wf.get_node("inputnode").inputs.metadata = subjectmetadata
        connect_anat_report_wf_attrs_from_anat_preproc_wf(
            subjectworkflow, anat_preproc_wf, anat_report_wf,
        )

        if len(firstlevel_analyses) == 0:
            continue

        subjectanalysisendpoints = {analysis.name: [] for analysis in spec.analyses}
        for boldfile in boldfiles:
            # make name
            boldfilemetadata = subjectmetadata.copy()
            has_direction = True
            if database.get_tagval(boldfile, "direction") is not None:
                tmplstr = database.get_tmplstr(boldfile)
                entities_in_path = get_entities_in_path(tmplstr)
                has_direction = "direction" in entities_in_path
            name = "bold"
            for entity in study_entities:
                value = database.get_tagval(boldfile, entity)
                if value is not None and (entity != "direction" or has_direction):
                    name += "_"
                    name += f"{entity}_{value}"
                    boldfilemetadata[entity] = value
            # workflow
            boldfileworkflow = pe.Workflow(name=name)
            fmap_type, fmaps, fmapmetadata = get_fmaps(boldfile, database)
            boldfilemetadata.update(fmapmetadata)

            repetition_time = database.get_tagval(boldfile, "repetition_time")
            if repetition_time is None:
                repetition_time = get_repetition_time(boldfile)
            assert (
                repetition_time > 0.01
            ), f'Repetition time value "{repetition_time}" is too low for file "{boldfile}"'
            boldfilemetadata["RepetitionTime"] = repetition_time

            func_preproc_wf = cache.get(
                init_func_preproc_wf,
                argtuples=[("workdir", workdir), ("fmap_type", fmap_type), ("memcalc", memcalc)],
            )
            boldfileworkflow.add_nodes([func_preproc_wf])
            func_preproc_inputnode = func_preproc_wf.get_node("inputnode")
            func_preproc_inputnode.inputs.bold_file = boldfile
            func_preproc_inputnode.inputs.fmaps = fmaps
            func_preproc_inputnode.inputs.metadata = boldfilemetadata
            connect_func_wf_attrs_from_anat_preproc_wf(
                subjectworkflow,
                anat_preproc_wf,
                boldfileworkflow,
                in_nodename=f"{func_preproc_wf.name}.inputnode",
            )
            func_report_wf = None
            for analysis, tagdict in zip(firstlevel_analyses, firstlevel_analysis_tagdicts):
                if not database.matches(boldfile, **tagdict):
                    continue
                # get analysis workflow
                analysisworkflow, boldfilevariants = cache.get(
                    init_firstlevel_analysis_wf,
                    argtuples=[("analysis", analysis), ("memcalc", memcalc)],
                )
                # workflow input variants
                bold_filt_wf = None
                for attrnames, variant in boldfilevariants:
                    name = make_variant_bold_filt_wf_name(variant)
                    variant_bold_filt_wf = boldfileworkflow.get_node(name)
                    if variant_bold_filt_wf is None:
                        variant_bold_filt_wf = cache.get(
                            init_bold_filt_wf,
                            argtuples=[("variant", variant), ("memcalc", memcalc)],
                        )
                        boldfileworkflow.add_nodes([variant_bold_filt_wf])
                        variant_bold_filt_wf.get_node(
                            "inputnode"
                        ).inputs.metadata = boldfilemetadata
                        connect_filt_wf_attrs_from_anat_preproc_wf(
                            subjectworkflow,
                            anat_preproc_wf,
                            boldfileworkflow,
                            in_nodename=f"{variant_bold_filt_wf.name}.inputnode",
                        )
                        connect_filt_wf_attrs_from_func_preproc_wf(
                            boldfileworkflow, func_preproc_wf, variant_bold_filt_wf
                        )
                    if bold_filt_wf is None:  # use first variant bold_filt_wf
                        bold_filt_wf = variant_bold_filt_wf
                    for i, attrname in enumerate(attrnames):
                        boldfileworkflow.connect(
                            variant_bold_filt_wf,
                            f"outputnode.out{i+1}",
                            analysisworkflow,
                            f"inputnode.{attrname}",
                        )
                boldfileworkflow.connect(
                    bold_filt_wf, "outputnode.mask_file", analysisworkflow, "inputnode.mask_file",
                )
                connect_firstlevel_analysis_extra_args(
                    analysisworkflow, analysis, database, boldfile
                )
                # use first variant to create func_report_wf
                if func_report_wf is None:
                    func_report_wf = cache.get(
                        init_func_report_wf, argtuples=[("workdir", workdir), ("memcalc", memcalc)]
                    )
                    func_report_wf.get_node("inputnode").inputs.metadata = boldfilemetadata
                    connect_func_report_wf_attrs_from_filt_wf(
                        boldfileworkflow, bold_filt_wf, func_report_wf
                    )
                    connect_func_report_wf_attrs_from_func_preproc_wf(
                        boldfileworkflow, func_preproc_wf, func_report_wf
                    )
                    connect_func_report_wf_attrs_from_anat_preproc_wf(
                        subjectworkflow,
                        anat_preproc_wf,
                        boldfileworkflow,
                        in_nodename=f"{func_report_wf.name}.inputnode",
                    )
                boldfileworkflow.connect(
                    func_report_wf, "outputnode.metadata", analysisworkflow, "inputnode.metadata",
                )
                # sink outputs
                endpoint = (boldfileworkflow, f"{analysisworkflow.name}.{analysisoutattr}")
                make_resultdict_datasink(
                    boldfileworkflow,
                    workdir,
                    (analysisworkflow, analysisoutattr),
                    name=f"{analysisworkflow.name}_resultdictdatasink",
                )
                if analysis.type == "atlas_based_connectivity" or analysis.type == "image_output":
                    pass
                else:  # FIXME don't fail with zero copes
                    subjectanalysisendpoints[analysis.name].append(endpoint)
        # subjectlevel aggregate
        for analysis in subjectlevel_analyses:
            endpoints = []
            for inputanalysisname in analysis.input:
                endpoints.extend(subjectanalysisendpoints[inputanalysisname])
            collectinputs = pe.Node(
                niu.Merge(numinputs=len(endpoints)), name=f"collectinputs_{analysis.name}",
            )
            for i, endpoint in enumerate(endpoints):
                subjectworkflow.connect(*endpoint, collectinputs, f"in{i+1}")
            analysisworkflow = cache.get(
                init_higherlevel_analysis_wf,
                argtuples=[("analysis", analysis), ("memcalc", memcalc)],
            )
            subjectworkflow.connect(collectinputs, "out", analysisworkflow, "inputnode.indicts")
            endpoint = (subjectworkflow, f"{analysisworkflow.name}.{analysisoutattr}")
            subjectanalysisendpoints[analysis.name].append(endpoint)
            make_resultdict_datasink(
                subjectworkflow,
                workdir,
                (analysisworkflow, analysisoutattr),
                name=f"{analysisworkflow.name}_resultdictdatasink",
            )
        for analysisname, endpoints in subjectanalysisendpoints.items():
            for endpoint in endpoints:
                node, attr = endpoint
                attr = f"{node.name}.{attr}"
                if node is not subjectworkflow:
                    attr = f"{subjectworkflow.name}.{attr}"
                analysisendpoints[analysisname].append((subjectlevelworkflow, attr))

    grouplevelworkflow = pe.Workflow(name=f"grouplevel")

    for analysis in grouplevel_analyses:
        endpoints = []
        for inputanalysisname in analysis.input:
            endpoints.extend(analysisendpoints[inputanalysisname])
        if len(endpoints) == 0:
            continue
        collectinputs = pe.Node(
            niu.Merge(numinputs=len(endpoints)), name=f"collectinputs_{analysis.name}",
        )
        grouplevelworkflow.add_nodes([collectinputs])
        for i, endpoint in enumerate(endpoints):
            workflow.connect(*endpoint, grouplevelworkflow, f"{collectinputs.name}.in{i+1}")
        analysisworkflow = cache.get(
            init_higherlevel_analysis_wf, argtuples=[("analysis", analysis), ("memcalc", memcalc)],
        )
        grouplevelworkflow.connect(collectinputs, "out", analysisworkflow, "inputnode.indicts")
        endpoint = (grouplevelworkflow, f"{analysisworkflow.name}.{analysisoutattr}")
        analysisendpoints[analysis.name].append(endpoint)
        make_resultdict_datasink(
            grouplevelworkflow,
            workdir,
            (analysisworkflow, analysisoutattr),
            name=f"{analysisworkflow.name}_resultdictdatasink",
        )

    cacheobj(workdir, "workflow", workflow)

    boldfiledicts = []
    for boldfile in database.get(datatype="func", suffix="bold"):
        tags_obj = database.get_tags(boldfile)
        boldfiledict = tags_obj.get_tagdict(bold_entities)
        if "direction" in boldfiledict:
            tmplstr = database.get_tmplstr(boldfile)
            entities_in_path = get_entities_in_path(tmplstr)
            if "direction" not in entities_in_path:
                del boldfiledict["direction"]
        boldfiledicts.append(boldfiledict)

    PreprocessedImgCopyOutResultHook(workdir).init_dictlistfile(boldfiledicts)

    return workflow
Example #13
0
def init_gifti_surface_wf(*, name="gifti_surface_wf"):
    r"""
    Prepare GIFTI surfaces from a FreeSurfer subjects directory.

    If midthickness (or graymid) surfaces do not exist, they are generated and
    saved to the subject directory as ``lh/rh.midthickness``.
    These, along with the gray/white matter boundary (``lh/rh.smoothwm``), pial
    sufaces (``lh/rh.pial``) and inflated surfaces (``lh/rh.inflated``) are
    converted to GIFTI files.
    Additionally, the vertex coordinates are :py:class:`recentered
    <smriprep.interfaces.NormalizeSurf>` to align with native T1w space.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.surfaces import init_gifti_surface_wf
            wf = init_gifti_surface_wf()

    Inputs
    ------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    fsnative2t1w_xfm
        LTA formatted affine transform file (inverse)

    Outputs
    -------
    surfaces
        GIFTI surfaces for gray/white matter boundary, pial surface,
        midthickness (or graymid) surface, and inflated surfaces

    """
    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(["subjects_dir", "subject_id", "fsnative2t1w_xfm"]),
        name="inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(["surfaces"]), name="outputnode")

    get_surfaces = pe.Node(nio.FreeSurferSource(), name="get_surfaces")

    midthickness = pe.MapNode(
        MakeMidthickness(thickness=True, distance=0.5, out_name="midthickness"),
        iterfield="in_file",
        name="midthickness",
    )

    save_midthickness = pe.Node(
        nio.DataSink(parameterization=False), name="save_midthickness"
    )

    surface_list = pe.Node(
        niu.Merge(4, ravel_inputs=True),
        name="surface_list",
        run_without_submitting=True,
    )
    fs2gii = pe.MapNode(
        fs.MRIsConvert(out_datatype="gii"), iterfield="in_file", name="fs2gii"
    )
    fix_surfs = pe.MapNode(NormalizeSurf(), iterfield="in_file", name="fix_surfs")

    # fmt:off
    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'),
                                      ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs2gii, [('out', 'in_file')]),
        (fs2gii, fix_surfs, [('converted', 'in_file')]),
        (inputnode, fix_surfs, [('fsnative2t1w_xfm', 'transform_file')]),
        (fix_surfs, outputnode, [('out_file', 'surfaces')]),
    ])
    # fmt:on
    return workflow
Example #14
0
def init_bold_mni_trans_wf(template,
                           mem_gb,
                           omp_nthreads,
                           name='bold_mni_trans_wf',
                           template_out_grid='2mm',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images to the MNI template in a "single shot"
    from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_mni_trans_wf
        wf = init_bold_mni_trans_wf(template='MNI152NLin2009cAsym',
                                    mem_gb=3,
                                    omp_nthreads=1,
                                    template_out_grid='native')

    **Parameters**

        template : str
            Name of template targeted by ``template`` output space
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization.
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        bold_split
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_mni
            BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD time-series were resampled to {tpl} standard space,
generating a *preprocessed BOLD run in {tpl} space*.
""".format(tpl=template)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 't1_2_mni_forward_transform', 'name_source',
        'bold_split', 'bold_mask', 'hmc_xforms', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['bold_mni', 'bold_mask_mni']),
        name='outputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)
    template_str = TEMPLATE_MAP[template]
    gen_ref.inputs.fixed_image = op.join(nid.get_dataset(template_str),
                                         '1mm_T1.nii.gz')

    mask_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_mni_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    workflow.connect([
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_mni_tfm, [('bold_mask', 'input_image')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('itk_bold_to_t1', _aslist), 'in2')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (mask_mni_tfm, outputnode, [('output_image', 'bold_mask_mni')]),
    ])

    bold_to_mni_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_mni_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    workflow.connect([
        (inputnode, merge_xforms, [('t1_2_mni_forward_transform', 'in1'),
                                   (('itk_bold_to_t1', _aslist), 'in2')]),
        (merge_xforms, bold_to_mni_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_mni_transform, [('bold_split', 'input_image')]),
        (bold_to_mni_transform, merge, [('out_files', 'in_files')]),
        (merge, outputnode, [('out_file', 'bold_mni')]),
    ])

    if template_out_grid == 'native':
        workflow.connect([
            (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
            (gen_ref, bold_to_mni_transform, [('out_file', 'reference_image')
                                              ]),
        ])
    elif template_out_grid == '1mm' or template_out_grid == '2mm':
        mask_mni_tfm.inputs.reference_image = op.join(
            nid.get_dataset(template_str),
            '%s_brainmask.nii.gz' % template_out_grid)
        bold_to_mni_transform.inputs.reference_image = op.join(
            nid.get_dataset(template_str), '%s_T1.nii.gz' % template_out_grid)
    else:
        mask_mni_tfm.inputs.reference_image = template_out_grid
        bold_to_mni_transform.inputs.reference_image = template_out_grid
    return workflow
Example #15
0
def init_asl_preproc_trans_wf(mem_gb,
                              omp_nthreads,
                              name='asl_preproc_trans_wf',
                              use_compression=True,
                              use_fieldwarp=False,
                              split_file=False,
                              interpolation='LanczosWindowedSinc'):
    """
    Resample in native (original) space.

    This workflow resamples the input fMRI in its native (original)
    space in a "single shot" from the original asl series.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from aslprep.workflows.asl import init_asl_preproc_trans_wf
            wf = init_asl_preproc_trans_wf(mem_gb=3, omp_nthreads=1)

    Parameters
    ----------
    mem_gb : :obj:`float`
        Size of asl file in GB
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    name : :obj:`str`
        Name of workflow (default: ``asl_std_trans_wf``)
    use_compression : :obj:`bool`
        Save registered asl series as ``.nii.gz``
    use_fieldwarp : :obj:`bool`
        Include SDC warp in single-shot transform from asl to MNI
    split_file : :obj:`bool`
        Whether the input file should be splitted (it is a 4D file)
        or it is a list of 3D files (default ``False``, do not split)
    interpolation : :obj:`str`
        Interpolation type to be used by ANTs' ``applyTransforms``
        (default ``'LanczosWindowedSinc'``)

    Inputs
    ------
    asl_file
        Individual 3D volumes, not motion corrected
    asl_mask
        Skull-stripping mask of reference image
    name_source
        asl series NIfTI file
        Used to recover original information lost during processing
    hmc_xforms
        List of affine transforms aligning each volume to ``ref_image`` in ITK format
    fieldwarp
        a :abbr:`DFM (displacements field map)` in ITK format

    Outputs
    -------
    asl
        asl series, resampled in native space, including all preprocessing
    asl_mask
        asl series mask calculated with the new time-series
    asl_ref
        asl reference image: an average-like 3D image of the time-series
    asl_ref_brain
        Same as ``asl_ref``, but once the brain mask has been applied

    """
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.func.util import init_asl_reference_wf
    from ...niworkflows.interfaces.itk import MultiApplyTransforms
    from ...niworkflows.interfaces.nilearn import Merge

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The asl time-series (including slice-timing correction when applied)
were resampled onto their original, native space by applying
{transforms}.
These resampled asl time-series will be referred to as *preprocessed
asl in original space*, or just *preprocessed asl*.
""".format(transforms="""\
a single, composite transform to correct for head-motion and
susceptibility distortions""" if use_fieldwarp else """\
the transforms to correct for head-motion""")

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'asl_file', 'asl_mask', 'hmc_xforms', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['asl', 'asl_mask', 'asl_ref', 'asl_ref_brain']),
                         name='outputnode')

    asl_transform = pe.Node(MultiApplyTransforms(interpolation=interpolation,
                                                 float=True,
                                                 copy_dtype=True),
                            name='asl_transform',
                            mem_gb=mem_gb * 3 * omp_nthreads,
                            n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a new asl reference
    asl_reference_wf = init_asl_reference_wf(omp_nthreads=omp_nthreads)
    asl_reference_wf.__desc__ = None  # Unset description to avoid second appearance

    workflow.connect([
        (inputnode, merge, [('name_source', 'header_source')]),
        (asl_transform, merge, [('out_files', 'in_files')]),
        (merge, asl_reference_wf, [('out_file', 'inputnode.asl_file')]),
        (merge, outputnode, [('out_file', 'asl')]),
        (asl_reference_wf, outputnode, [('outputnode.ref_image', 'asl_ref'),
                                        ('outputnode.ref_image_brain',
                                         'asl_ref_brain'),
                                        ('outputnode.asl_mask', 'asl_mask')]),
    ])

    # Input file is not splitted
    if split_file:
        asl_split = pe.Node(FSLSplit(dimension='t'),
                            name='asl_split',
                            mem_gb=mem_gb * 3)
        workflow.connect([(inputnode, asl_split, [('asl_file', 'in_file')]),
                          (asl_split, asl_transform, [
                              ('out_files', 'input_image'),
                              (('out_files', _first), 'reference_image'),
                          ])])
    else:
        workflow.connect([
            (inputnode, asl_transform, [('asl_file', 'input_image'),
                                        (('asl_file', _first),
                                         'reference_image')]),
        ])

    if use_fieldwarp:
        merge_xforms = pe.Node(niu.Merge(2),
                               name='merge_xforms',
                               run_without_submitting=True,
                               mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, merge_xforms, [('fieldwarp', 'in1'),
                                       ('hmc_xforms', 'in2')]),
            (merge_xforms, asl_transform, [('out', 'transforms')]),
        ])
    else:

        def _aslist(val):
            return [val]

        workflow.connect([
            (inputnode, asl_transform, [(('hmc_xforms', _aslist), 'transforms')
                                        ]),
        ])
    return workflow
Example #16
0
def legacy(
    bids_base,
    template,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    try:
        import nipype.interfaces.ants.legacy as antslegacy
    except ModuleNotFoundError:
        print('''
			The `nipype.interfaces.ants.legacy` was not found on this system.
			You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions:
			https://github.com/nipy/nipype/issues/3197
		''')

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Example #17
0
def individual_reports(name='ReportsWorkflow'):
    """
    Generate the components of the individual report.

    .. workflow::

        from mriqc.workflows.anatomical import individual_reports
        from mriqc.testing import mock_config
        with mock_config():
            wf = individual_reports()

    """
    from ..interfaces import PlotMosaic
    from ..interfaces.reports import IndividualReport

    verbose = True
    pages = 2
    extra_pages = int(verbose) * 7

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_ras', 'brainmask', 'headmask', 'airmask', 'artmask', 'rotmask',
        'segmentation', 'inu_corrected', 'noisefit', 'in_iqms', 'in_inu',
        'mni_report', 'api_id'
    ]),
                        name='inputnode')

    mosaic_zoom = pe.Node(PlotMosaic(out_file='plot_anat_mosaic1_zoomed.svg',
                                     cmap='Greys_r'),
                          name='PlotMosaicZoomed')

    mosaic_noise = pe.Node(PlotMosaic(out_file='plot_anat_mosaic2_noise.svg',
                                      only_noise=True,
                                      cmap='viridis_r'),
                           name='PlotMosaicNoise')

    mplots = pe.Node(niu.Merge(pages + extra_pages), name='MergePlots')
    rnode = pe.Node(IndividualReport(), name='GenerateReport')

    # Link images that should be reported
    dsplots = pe.Node(nio.DataSink(base_directory=str(
        config.execution.output_dir),
                                   parameterization=False),
                      name='dsplots',
                      run_without_submitting=True)

    workflow.connect([
        (inputnode, rnode, [('in_iqms', 'in_iqms')]),
        (inputnode, mosaic_zoom, [('in_ras', 'in_file'),
                                  ('brainmask', 'bbox_mask_file')]),
        (inputnode, mosaic_noise, [('in_ras', 'in_file')]),
        (mosaic_zoom, mplots, [('out_file', "in1")]),
        (mosaic_noise, mplots, [('out_file', "in2")]),
        (mplots, rnode, [('out', 'in_plots')]),
        (rnode, dsplots, [('out_file', "@html_report")]),
    ])

    if not verbose:
        return workflow

    from ..interfaces.viz import PlotContours

    plot_segm = pe.Node(PlotContours(display_mode='z',
                                     levels=[.5, 1.5, 2.5],
                                     cut_coords=10,
                                     colors=['r', 'g', 'b']),
                        name='PlotSegmentation')

    plot_bmask = pe.Node(PlotContours(display_mode='z',
                                      levels=[.5],
                                      colors=['r'],
                                      cut_coords=10,
                                      out_file='bmask'),
                         name='PlotBrainmask')
    plot_airmask = pe.Node(PlotContours(display_mode='x',
                                        levels=[.5],
                                        colors=['r'],
                                        cut_coords=6,
                                        out_file='airmask'),
                           name='PlotAirmask')
    plot_headmask = pe.Node(PlotContours(display_mode='x',
                                         levels=[.5],
                                         colors=['r'],
                                         cut_coords=6,
                                         out_file='headmask'),
                            name='PlotHeadmask')
    plot_artmask = pe.Node(PlotContours(display_mode='z',
                                        levels=[.5],
                                        colors=['r'],
                                        cut_coords=10,
                                        out_file='artmask',
                                        saturate=True),
                           name='PlotArtmask')

    workflow.connect([
        (inputnode, plot_segm, [('in_ras', 'in_file'),
                                ('segmentation', 'in_contours')]),
        (inputnode, plot_bmask, [('in_ras', 'in_file'),
                                 ('brainmask', 'in_contours')]),
        (inputnode, plot_headmask, [('in_ras', 'in_file'),
                                    ('headmask', 'in_contours')]),
        (inputnode, plot_airmask, [('in_ras', 'in_file'),
                                   ('airmask', 'in_contours')]),
        (inputnode, plot_artmask, [('in_ras', 'in_file'),
                                   ('artmask', 'in_contours')]),
        (inputnode, mplots, [('mni_report', f"in{pages + 1}")]),
        (plot_bmask, mplots, [('out_file', f'in{pages + 2}')]),
        (plot_segm, mplots, [('out_file', f'in{pages + 3}')]),
        (plot_artmask, mplots, [('out_file', f'in{pages + 4}')]),
        (plot_headmask, mplots, [('out_file', f'in{pages + 5}')]),
        (plot_airmask, mplots, [('out_file', f'in{pages + 6}')]),
        (inputnode, mplots, [('noisefit', f'in{pages + 7}')]),
    ])
    return workflow
def create_nuisance_modelfit_workflow(name='modelfit', f_contrasts=False):
    """
    Create an FSL  modelfitting workflow that returns also
    residual4d and sigmasquareds.

    Example
    -------

#    >>> modelfit = create_modelfit_workflow()
#    >>> modelfit.base_dir = '.'
#    >>> info = dict()
#    >>> modelfit.inputs.inputspec.session_info = info
#    >>> modelfit.inputs.inputspec.interscan_interval = 3.
#    >>> modelfit.inputs.inputspec.film_threshold = 1000
#    >>> modelfit.run() #doctest: +SKIP

    Inputs::

         inputspec.session_info : info generated by modelgen.SpecifyModel
         inputspec.interscan_interval : interscan interval
         inputspec.contrasts : list of contrasts
         inputspec.film_threshold : image threshold for FILM estimation
         inputspec.model_serial_correlations
         inputspec.bases

    Outputs::

         outputspec.copes
         outputspec.varcopes
         outputspec.dof_file
         outputspec.pfiles
         outputspec.zfiles
         outputspec.parameter_estimates
         outputspec.residual4d
         outputspec.sigmasquareds

    """

    version = 0
    if fsl.Info.version() and \
                    LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    modelfit = pe.Workflow(name=name)
    """
    Create the nodes
    """

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ]),
                        name='inputspec')
    level1design = pe.Node(interface=fsl.Level1Design(), name="level1design")
    modelgen = pe.MapNode(interface=fsl.FEATModel(),
                          name='modelgen',
                          iterfield=['fsf_file', 'ev_files'])
    if version < 507:
        modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                         mask_size=5),
                                   name='modelestimate',
                                   iterfield=['design_file', 'in_file'])
    else:
        if f_contrasts:
            iterfield = ['design_file', 'in_file', 'tcon_file', 'fcon_file']
        else:
            iterfield = ['design_file', 'in_file', 'tcon_file']
        modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True,
                                                         mask_size=5),
                                   name='modelestimate',
                                   iterfield=iterfield)

    if version < 507:
        if f_contrasts:
            iterfield = [
                'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds',
                'corrections', 'dof_file'
            ]
        else:
            iterfield = [
                'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections',
                'dof_file'
            ]
        conestimate = pe.MapNode(interface=fsl.ContrastMgr(),
                                 name='conestimate',
                                 iterfield=[
                                     'tcon_file', 'fcon_file',
                                     'param_estimates', 'sigmasquareds',
                                     'corrections', 'dof_file'
                                 ])

    if f_contrasts:
        iterfield = ['in1', 'in2']
    else:
        iterfield = ['in1']
    merge_contrasts = pe.MapNode(interface=util.Merge(2),
                                 name='merge_contrasts',
                                 iterfield=iterfield)

    ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop',
                                                  suffix='_pval'),
                         nested=True,
                         name='ztop',
                         iterfield=['in_file'])

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'zfiles',
        'parameter_estimates', 'residual4d', 'sigmasquareds'
    ]),
                         name='outputspec')
    """
    Setup the connections
    """

    modelfit.connect([
        (inputspec, level1design,
         [('interscan_interval', 'interscan_interval'),
          ('session_info', 'session_info'), ('contrasts', 'contrasts'),
          ('bases', 'bases'),
          ('model_serial_correlations', 'model_serial_correlations')]),
        (inputspec, modelestimate, [('film_threshold', 'threshold'),
                                    ('functional_data', 'in_file')]),
        (level1design, modelgen, [('fsf_files', 'fsf_file'),
                                  ('ev_files', 'ev_files')]),
        (modelgen, modelestimate, [('design_file', 'design_file')]),

        # connect also residual4d and sigmasquared
        (modelestimate, outputspec, [('param_estimates',
                                      'parameter_estimates'),
                                     ('dof_file', 'dof_file'),
                                     ('residual4d', 'residual4d'),
                                     ('sigmasquareds', 'sigmasquareds')]),
    ])
    if version < 507:
        modelfit.connect([
            (modelgen, conestimate, [('con_file', 'tcon_file'),
                                     ('fcon_file', 'fcon_file')]),
            (modelestimate, conestimate, [('param_estimates',
                                           'param_estimates'),
                                          ('sigmasquareds', 'sigmasquareds'),
                                          ('corrections', 'corrections'),
                                          ('dof_file', 'dof_file')]),
            (conestimate, outputspec, [('copes', 'copes'),
                                       ('varcopes', 'varcopes')]),
        ])
    else:
        modelfit.connect([
            (modelgen, modelestimate, [('con_file', 'tcon_file'),
                                       ('fcon_file', 'fcon_file')]),
            (modelestimate, outputspec, [('copes', 'copes'),
                                         ('varcopes', 'varcopes')]),
        ])
    return modelfit
Example #19
0
def init_bold_surf_wf(mem_gb, output_spaces, medial_surface_nan, name='bold_surf_wf'):
    """
    This workflow samples functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_surf_wf
        wf = init_bold_surf_wf(mem_gb=0.1,
                               output_spaces=['T1w', 'fsnative',
                                             'template', 'fsaverage5'],
                               medial_surface_nan=False)

    **Parameters**

        output_spaces : list
            List of output spaces functional images are to be resampled to
            Target spaces beginning with ``fs`` will be selected for resampling,
            such as ``fsaverage`` or related template spaces
            If the list contains ``fsnative``, images will be resampled to the
            individual subject's native surface
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files

    **Inputs**

        source_file
            Motion-corrected BOLD series in T1 space
        t1_preproc
            Bias-corrected structural template image
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    **Outputs**

        surfaces
            BOLD series, resampled to FreeSurfer surfaces

    """
    # Ensure volumetric spaces do not sneak into this workflow
    spaces = [space for space in output_spaces if space.startswith('fs')]

    workflow = Workflow(name=name)

    if spaces:
        workflow.__desc__ = """\
The BOLD time-series, were resampled to surfaces on the following
spaces: {out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in spaces]))
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['source_file', 't1_preproc', 'subject_id', 'subjects_dir',
                                      't1_2_fsnative_forward_transform']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']), name='outputnode')

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'], name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s', keep_ext=True),
                            iterfield='subject', name='rename_src', run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(LTAConvert(in_lta='identity.nofile', out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'), name='set_xfm_source')

    sampler = pe.MapNode(
        fs.SampleToSurface(sampling_method='average', sampling_range=(0, 1, 0.2),
                           sampling_units='frac', interp_method='trilinear', cortex_mask=True,
                           override_reg_subj=True, out_type='gii'),
        iterfield=['source_file', 'target_subject'],
        iterables=('hemi', ['lh', 'rh']),
        name='sampler', mem_gb=mem_gb * 3)

    medial_nans = pe.MapNode(MedialNaNs(), iterfield=['in_file', 'target_subject'],
                             name='medial_nans', mem_gb=DEFAULT_MEMORY_MIN_GB)

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True), name='merger',
                         joinsource='sampler', joinfield=['in1'], run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(), iterfield='in_file',
                                 name='update_metadata', mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1_2_fsnative_forward_transform', 'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if medial_surface_nan:
        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (sampler, medial_nans, [('out_file', 'in_file')]),
            (targets, medial_nans, [('out', 'target_subject')]),
            (medial_nans, merger, [('out_file', 'in1')]),
        ])
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
Example #20
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(['subjects_dir', 'subject_id']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']),
                         name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(MakeMidthickness(thickness=True,
                                               distance=0.5,
                                               out_name='midthickness'),
                              iterfield='in_file',
                              name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list',
                           run_without_submitting=True)
    fs_2_gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                          iterfield='in_file',
                          name='fs_2_gii')

    def normalize_surfs(in_file):
        """ Re-center GIFTI coordinates to fit align to native T1 space

        For midthickness surfaces, add MidThickness metadata

        Coordinate update based on:
        https://github.com/Washington-University/workbench/blob/1b79e56/src/Algorithms/AlgorithmSurfaceApplyAffine.cxx#L73-L91
        and
        https://github.com/Washington-University/Pipelines/blob/ae69b9a/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L147
        """
        import os
        import numpy as np
        import nibabel as nib
        img = nib.load(in_file)
        pointset = img.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0]
        coords = pointset.data
        c_ras_keys = ('VolGeomC_R', 'VolGeomC_A', 'VolGeomC_S')
        ras = np.array([float(pointset.metadata[key]) for key in c_ras_keys])
        # Apply C_RAS translation to coordinates
        pointset.data = (coords + ras).astype(coords.dtype)

        secondary = nib.gifti.GiftiNVPairs('AnatomicalStructureSecondary',
                                           'MidThickness')
        geom_type = nib.gifti.GiftiNVPairs('GeometricType', 'Anatomical')
        has_ass = has_geo = False
        for nvpair in pointset.meta.data:
            # Remove C_RAS translation from metadata to avoid double-dipping in FreeSurfer
            if nvpair.name in c_ras_keys:
                nvpair.value = '0.000000'
            # Check for missing metadata
            elif nvpair.name == secondary.name:
                has_ass = True
            elif nvpair.name == geom_type.name:
                has_geo = True
        fname = os.path.basename(in_file)
        # Update metadata for MidThickness/graymid surfaces
        if 'midthickness' in fname.lower() or 'graymid' in fname.lower():
            if not has_ass:
                pointset.meta.data.insert(1, secondary)
            if not has_geo:
                pointset.meta.data.insert(2, geom_type)
        img.to_filename(fname)
        return os.path.abspath(fname)

    fix_surfs = pe.MapNode(niu.Function(function=normalize_surfs),
                           iterfield='in_file',
                           name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'), ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs_2_gii, [('out', 'in_file')]),
        (fs_2_gii, fix_surfs, [('converted', 'in_file')]),
        (fix_surfs, outputnode, [('out', 'surfaces')]),
    ])

    return workflow
Example #21
0
def init_brain_extraction_wf(name='brain_extraction_wf',
                             in_template='OASIS',
                             use_float=True,
                             normalization_quality='precise',
                             omp_nthreads=None,
                             mem_gb=3.0,
                             modality='T1',
                             atropos_refine=True,
                             atropos_use_random_seed=True,
                             atropos_model=None):
    """
    A Nipype implementation of the official ANTs' ``antsBrainExtraction.sh``
    workflow (only for 3D images).

    The official workflow is built as follows (and this implementation
    follows the same organization):

      1. Step 1 performs several clerical tasks (adding padding, calculating
         the Laplacian of inputs, affine initialization) and the core
         spatial normalization.
      2. Maps the brain mask into target space using the normalization
         calculated in 1.
      3. Superstep 1b: smart binarization of the brain mask
      4. Superstep 6: apply ATROPOS and massage its outputs
      5. Superstep 7: use results from 4 to refine the brain mask


    .. workflow::
        :graph2use: orig
        :simple_form: yes
        from niworkflows.anat import init_brain_extraction_wf
        wf = init_brain_extraction_wf()


    **Parameters**

        in_template : str
            Name of the skull-stripping template ('OASIS', 'NKI', or
            path).
            The brain template from which regions will be projected
            Anatomical template created using e.g. LPBA40 data set with
            ``buildtemplateparallel.sh`` in ANTs.
            The workflow will automatically search for a brain probability
            mask created using e.g. LPBA40 data set which have brain masks
            defined, and warped to anatomical template and
            averaged resulting in a probability image.
        use_float : bool
            Whether single precision should be used
        normalization_quality : str
            Use more precise or faster registration parameters
            (default: ``precise``, other possible values: ``testing``)
        omp_nthreads : int
            Maximum number of threads an individual process may use
        mem_gb : float
            Estimated peak memory consumption of the most hungry nodes
            in the workflow
        modality : str
            Sequence type of the first input image ('T1', 'T2', or 'FLAIR')
        atropos_refine : bool
            Enables or disables the whole ATROPOS sub-workflow
        atropos_use_random_seed : bool
            Whether ATROPOS should generate a random seed based on the
            system's clock
        atropos_model : tuple or None
            Allows to specify a particular segmentation model, overwriting
            the defaults based on ``modality``
        name : str, optional
            Workflow name (default: antsBrainExtraction)


    **Inputs**

        in_files
            List of input anatomical images to be brain-extracted,
            typically T1-weighted.
            If a list of anatomical images is provided, subsequently
            specified images are used during the segmentation process.
            However, only the first image is used in the registration
            of priors.
            Our suggestion would be to specify the T1w as the first image.
        in_mask
            (optional) Mask used for registration to limit the metric
            computation to a specific region.


    **Outputs**

        bias_corrected
            The ``in_files`` input images, after :abbr:`INU (intensity non-uniformity)`
            correction.
        out_mask
            Calculated brain mask
        bias_image
            The :abbr:`INU (intensity non-uniformity)` field estimated for each
            input in ``in_files``
        out_segm
            Output segmentation by ATROPOS
        out_tpms
            Output :abbr:`TPMs (tissue probability maps)` by ATROPOS


    """
    wf = pe.Workflow(name)

    template_path = None
    if in_template == 'OASIS':
        template_path = pkgr_fn('niflow.ants.brainextraction',
                                'data/tpl-OASIS30ANTs')
    else:
        template_path = in_template

    mod = ('%sw' % modality[:2].upper()
           if modality.upper().startswith('T') else modality.upper())

    # Append template modality
    potential_targets = list(Path(template_path).glob('*_%s.nii.gz' % mod))
    if not potential_targets:
        raise ValueError(
            'No %s template was found under "%s".' % (mod, template_path))

    tpl_target_path = str(potential_targets[0])
    target_basename = '_'.join(tpl_target_path.split('_')[:-1])

    # Get probabilistic brain mask if available
    tpl_mask_path = '%s_class-brainmask_probtissue.nii.gz' % target_basename
    # Fall-back to a binary mask just in case
    if not os.path.exists(tpl_mask_path):
        tpl_mask_path = '%s_brainmask.nii.gz' % target_basename

    if not os.path.exists(tpl_mask_path):
        raise ValueError(
            'Probability map for the brain mask associated to this template '
            '"%s" not found.' % tpl_mask_path)

    if omp_nthreads is None or omp_nthreads < 1:
        omp_nthreads = cpu_count()

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']),
                        name='inputnode')

    # Try to find a registration mask, set if available
    tpl_regmask_path = '%s_label-BrainCerebellumRegistration_roi.nii.gz' % target_basename
    if os.path.exists(tpl_regmask_path):
        inputnode.inputs.in_mask = tpl_regmask_path

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bias_corrected', 'out_mask', 'bias_image', 'out_segm']),
        name='outputnode')

    trunc = pe.MapNode(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'),
                       name='truncate_images', iterfield=['op1'])
    inu_n4 = pe.MapNode(
        N4BiasFieldCorrection(
            dimension=3, save_bias=True, copy_header=True,
            n_iterations=[50] * 4, convergence_threshold=1e-7, shrink_factor=4,
            bspline_fitting_distance=200),
        n_procs=omp_nthreads, name='inu_n4', iterfield=['input_image'])

    res_tmpl = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                       apply_smoothing=True), name='res_tmpl')
    res_tmpl.inputs.input_image = tpl_target_path
    res_target = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                         apply_smoothing=True), name='res_target')

    lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                       name='lap_tmpl')
    lap_tmpl.inputs.op1 = tpl_target_path
    lap_target = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                         name='lap_target')
    mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
    mrg_tmpl.inputs.in1 = tpl_target_path
    mrg_target = pe.Node(niu.Merge(2), name='mrg_target')

    # Initialize transforms with antsAI
    init_aff = pe.Node(AI(
        metric=('Mattes', 32, 'Regular', 0.2),
        transform=('Affine', 0.1),
        search_factor=(20, 0.12),
        principal_axes=False,
        convergence=(10, 1e-6, 10),
        verbose=True),
        name='init_aff',
        n_procs=omp_nthreads)

    if parseversion(Registration().version) > Version('2.2.0'):
        init_aff.inputs.search_grid = (40, (0, 40, 40))

    # Set up spatial normalization
    norm = pe.Node(Registration(
        from_file=pkgr_fn(
            'niflow.ants.brainextraction',
            'data/antsBrainExtraction_%s.json' % normalization_quality)),
        name='norm',
        n_procs=omp_nthreads,
        mem_gb=mem_gb)
    norm.inputs.float = use_float
    fixed_mask_trait = 'fixed_image_mask'
    if parseversion(Registration().version) >= Version('2.2.0'):
        fixed_mask_trait += 's'

    map_brainmask = pe.Node(
        ApplyTransforms(interpolation='Gaussian', float=True),
        name='map_brainmask',
        mem_gb=1
    )
    map_brainmask.inputs.input_image = tpl_mask_path

    thr_brainmask = pe.Node(ThresholdImage(
        dimension=3, th_low=0.5, th_high=1.0, inside_value=1,
        outside_value=0), name='thr_brainmask')

    # Morphological dilation, radius=2
    dil_brainmask = pe.Node(ImageMath(operation='MD', op2='2'),
                            name='dil_brainmask')
    # Get largest connected component
    get_brainmask = pe.Node(ImageMath(operation='GetLargestComponent'),
                            name='get_brainmask')

    # Apply mask
    apply_mask = pe.MapNode(ApplyMask(), iterfield=['in_file'], name='apply_mask')

    wf.connect([
        (inputnode, trunc, [('in_files', 'op1')]),
        (inputnode, init_aff, [('in_mask', 'fixed_image_mask')]),
        (inputnode, norm, [('in_mask', fixed_mask_trait)]),
        (inputnode, map_brainmask, [(('in_files', _pop), 'reference_image')]),
        (trunc, inu_n4, [('output_image', 'input_image')]),
        (inu_n4, res_target, [
            (('output_image', _pop), 'input_image')]),
        (inu_n4, lap_target, [
            (('output_image', _pop), 'op1')]),
        (res_tmpl, init_aff, [('output_image', 'fixed_image')]),
        (res_target, init_aff, [('output_image', 'moving_image')]),
        (inu_n4, mrg_target, [('output_image', 'in1')]),
        (lap_tmpl, mrg_tmpl, [('output_image', 'in2')]),
        (lap_target, mrg_target, [('output_image', 'in2')]),

        (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
        (mrg_tmpl, norm, [('out', 'fixed_image')]),
        (mrg_target, norm, [('out', 'moving_image')]),
        (norm, map_brainmask, [
            ('reverse_invert_flags', 'invert_transform_flags'),
            ('reverse_transforms', 'transforms')]),
        (map_brainmask, thr_brainmask, [('output_image', 'input_image')]),
        (thr_brainmask, dil_brainmask, [('output_image', 'op1')]),
        (dil_brainmask, get_brainmask, [('output_image', 'op1')]),
        (inu_n4, apply_mask, [('output_image', 'in_file')]),
        (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
        (get_brainmask, outputnode, [('output_image', 'out_mask')]),
        (apply_mask, outputnode, [('out_file', 'bias_corrected')]),
        (inu_n4, outputnode, [('bias_image', 'bias_image')]),
    ])

    if atropos_refine:
        atropos_wf = init_atropos_wf(
            use_random_seed=atropos_use_random_seed,
            omp_nthreads=omp_nthreads,
            mem_gb=mem_gb,
            in_segmentation_model=atropos_model or list(ATROPOS_MODELS[modality].values())
        )

        wf.disconnect([
            (get_brainmask, outputnode, [('output_image', 'out_mask')]),
            (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
        ])
        wf.connect([
            (inu_n4, atropos_wf, [
                ('output_image', 'inputnode.in_files')]),
            (get_brainmask, atropos_wf, [
                ('output_image', 'inputnode.in_mask')]),
            (atropos_wf, outputnode, [
                ('outputnode.out_mask', 'out_mask')]),
            (atropos_wf, apply_mask, [
                ('outputnode.out_mask', 'mask_file')]),
            (atropos_wf, outputnode, [
                ('outputnode.out_segm', 'out_segm'),
                ('outputnode.out_tpms', 'out_tpms')])
        ])
    return wf
Example #22
0
def init_fmap_preproc_wf(
    *,
    estimators,
    omp_nthreads,
    output_dir,
    subject,
    sloppy=False,
    debug=False,
    name="fmap_preproc_wf",
):
    """
    Create and combine estimator workflows.

    Parameters
    ----------
    estimators : :obj:`list` of :py:class:`~sdcflows.fieldmaps.FieldmapEstimator`
        A list of estimators.
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    output_dir : :obj:`str`
        Directory in which to save derivatives
    subject : :obj:`str`
        Participant label for this single-subject workflow.
    debug : :obj:`bool`
        Enable debugging outputs
    sloppy : :obj:`bool`
        Enable faster but less precise calculations
    name : :obj:`str`, optional
        Workflow name (default: ``"fmap_preproc_wf"``)

    Inputs
    ------
    in_<B0FieldIdentifier>.<field> :
        The workflow generates inputs depending on the estimation strategy.

    Outputs
    -------
    out_<B0FieldIdentifier>.fmap :
        The preprocessed fieldmap.
    out_<B0FieldIdentifier>.fmap_ref :
        The preprocessed fieldmap reference.
    out_<B0FieldIdentifier>.fmap_coeff :
        The preprocessed fieldmap coefficients.

    """
    from .fit.pepolar import INPUT_FIELDS as _pepolar_fields
    from .fit.syn import INPUT_FIELDS as _syn_fields
    from .outputs import init_fmap_derivatives_wf, init_fmap_reports_wf
    from ..fieldmaps import EstimatorType

    INPUT_FIELDS = {
        EstimatorType.ANAT: _syn_fields,
        EstimatorType.PEPOLAR: _pepolar_fields,
    }

    workflow = Workflow(name=name)

    out_fields = ("fmap", "fmap_ref", "fmap_coeff", "fmap_mask", "fmap_id", "method")
    out_merge = {
        f: pe.Node(niu.Merge(len(estimators)), name=f"out_merge_{f}")
        for f in out_fields
    }
    outputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name="outputnode")

    workflow.connect(
        [
            (mergenode, outputnode, [("out", field)])
            for field, mergenode in out_merge.items()
        ]
    )

    for n, estimator in enumerate(estimators, 1):
        est_wf = estimator.get_workflow(
            omp_nthreads=omp_nthreads,
            debug=debug,
            sloppy=sloppy,
        )
        source_files = [
            str(f.path) for f in estimator.sources if f.suffix not in ("T1w", "T2w")
        ]

        out_map = pe.Node(
            niu.IdentityInterface(fields=out_fields), name=f"out_{estimator.bids_id}"
        )
        out_map.inputs.fmap_id = estimator.bids_id

        fmap_derivatives_wf = init_fmap_derivatives_wf(
            output_dir=str(output_dir),
            write_coeff=True,
            bids_fmap_id=estimator.bids_id,
            name=f"fmap_derivatives_wf_{estimator.bids_id}",
        )
        fmap_derivatives_wf.inputs.inputnode.source_files = source_files
        fmap_derivatives_wf.inputs.inputnode.fmap_meta = [
            f.metadata for f in estimator.sources
        ]

        fmap_reports_wf = init_fmap_reports_wf(
            output_dir=str(output_dir),
            fmap_type=str(estimator.method).rpartition(".")[-1].lower(),
            bids_fmap_id=estimator.bids_id,
            name=f"fmap_reports_wf_{estimator.bids_id}",
        )
        fmap_reports_wf.inputs.inputnode.source_files = source_files

        if estimator.method not in (EstimatorType.MAPPED, EstimatorType.PHASEDIFF):
            fields = INPUT_FIELDS[estimator.method]
            inputnode = pe.Node(
                niu.IdentityInterface(fields=fields),
                name=f"in_{estimator.bids_id}",
            )
            # fmt:off
            workflow.connect([
                (inputnode, est_wf, [(f, f"inputnode.{f}") for f in fields])
            ])
            # fmt:on

        # fmt:off
        workflow.connect([
            (est_wf, fmap_derivatives_wf, [
                ("outputnode.fmap", "inputnode.fieldmap"),
                ("outputnode.fmap_ref", "inputnode.fmap_ref"),
                ("outputnode.fmap_coeff", "inputnode.fmap_coeff"),
            ]),
            (est_wf, fmap_reports_wf, [
                ("outputnode.fmap", "inputnode.fieldmap"),
                ("outputnode.fmap_ref", "inputnode.fmap_ref"),
                ("outputnode.fmap_mask", "inputnode.fmap_mask"),
            ]),
            (est_wf, out_map, [
                ("outputnode.fmap", "fmap"),
                ("outputnode.fmap_ref", "fmap_ref"),
                ("outputnode.fmap_coeff", "fmap_coeff"),
                ("outputnode.fmap_mask", "fmap_mask"),
                ("outputnode.method", "method")
            ]),
        ])
        # fmt:on

        for field, mergenode in out_merge.items():
            workflow.connect(out_map, field, mergenode, f"in{n}")

    return workflow
        sampling_strategy=['Regular', 'Regular', None],
        sampling_percentage=[0.3, 0.3, None],
        number_of_iterations=[[10000, 11110, 11110], [10000, 11110, 11110],
                              [100, 30, 20]],
        convergence_threshold=[1.e-8, 1.e-8, -0.01],
        convergence_window_size=[20, 20, 5],
        use_estimate_learning_rate_once=[True, True, True],
        shrink_factors=[[3, 2, 1], [3, 2, 1], [4, 2, 1]],
        smoothing_sigmas=[[4, 2, 1], [4, 2, 1], [1, 0.5, 0]],
        sigma_units=['vox', 'vox', 'vox'],
        write_composite_transform=True,
        num_threads=ANTS_num_threads),
    name='anat2std')

# Merge registration (epi2anat) and normalisation (anat2std)
merge = pe.Node(util.Merge(3), infields=['in1', 'in2', 'in3'], name='merge')

# Split funcitonal by volumes before applying transformations
funcSplit = pe.Node(fsl.Split(dimension='t', output_type='NIFTI_GZ'),
                    name='funcSplit')

# Resample the final reference image to the resolution of the functional
resampleRef = pe.Node(ants.ApplyTransforms(args='--float',
                                           dimension=3,
                                           interpolation='BSpline',
                                           transforms='identity'),
                      name='resampleRef')

# Threshold and mask the resample reference image
threshRef = pe.Node(fsl.Threshold(thresh=10, output_type='NIFTI_GZ'),
                    name='threshRef')
Example #24
0
MRconvert.inputs.extract_at_axis = 3
MRconvert.inputs.extract_at_coordinate = [0]
threshold_b0 = pe.Node(interface=mrtrix.Threshold(),name='threshold_b0')
median3d = pe.Node(interface=mrtrix.MedianFilter3D(),name='median3d')

"""
The brain mask is also used to help identify single-fiber voxels.
This is done by passing the brain mask through two erosion steps,
multiplying the remaining mask with the fractional anisotropy map, and
thresholding the result to obtain some highly anisotropic within-brain voxels.
"""

erode_mask_firstpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_firstpass')
erode_mask_secondpass = pe.Node(interface=mrtrix.Erode(),name='erode_mask_secondpass')
MRmultiply = pe.Node(interface=mrtrix.MRMultiply(),name='MRmultiply')
MRmult_merge = pe.Node(interface=util.Merge(2), name="MRmultiply_merge")
threshold_FA = pe.Node(interface=mrtrix.Threshold(),name='threshold_FA')
threshold_FA.inputs.absolute_threshold_value = 0.7

"""
For whole-brain tracking we also require a broad white-matter seed mask.
This is created by generating a white matter mask, given a brainmask, and
thresholding it at a reasonably high level.
"""

bet = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0')
gen_WM_mask = pe.Node(interface=mrtrix.GenerateWhiteMatterMask(),name='gen_WM_mask')
threshold_wmmask = pe.Node(interface=mrtrix.Threshold(),name='threshold_wmmask')
threshold_wmmask.inputs.absolute_threshold_value = 0.4

"""
Example #25
0
def sdc_peb(name='peb_correction',
            epi_params=dict(echospacing=0.77e-3,
                            acc_factor=3,
                            enc_dir='y-',
                            epi_factor=1),
            altepi_params=dict(echospacing=0.77e-3,
                               acc_factor=3,
                               enc_dir='y',
                               epi_factor=1)):
    """
    SDC stands for susceptibility distortion correction. PEB stands for
    phase-encoding-based.

    The phase-encoding-based (PEB) method implements SDC by acquiring
    diffusion images with two different enconding directions [Andersson2003]_.
    The most typical case is acquiring with opposed phase-gradient blips
    (e.g. *A>>>P* and *P>>>A*, or equivalently, *-y* and *y*)
    as in [Chiou2000]_, but it is also possible to use orthogonal
    configurations [Cordes2000]_ (e.g. *A>>>P* and *L>>>R*,
    or equivalently *-y* and *x*).
    This workflow uses the implementation of FSL
    (`TOPUP <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/TOPUP>`_).

    Example
    -------

    >>> from nipype.workflows.dmri.fsl.artifacts import sdc_peb
    >>> peb = sdc_peb()
    >>> peb.inputs.inputnode.in_file = 'epi.nii'
    >>> peb.inputs.inputnode.alt_file = 'epi_rev.nii'
    >>> peb.inputs.inputnode.in_bval = 'diffusion.bval'
    >>> peb.inputs.inputnode.in_mask = 'mask.nii'
    >>> peb.run() # doctest: +SKIP

    .. admonition:: References

      .. [Andersson2003] Andersson JL et al., `How to correct susceptibility
        distortions in spin-echo echo-planar images: application to diffusion
        tensor imaging <https://doi.org/10.1016/S1053-8119(03)00336-7>`_.
        Neuroimage. 2003 Oct;20(2):870-88. doi: 10.1016/S1053-8119(03)00336-7

      .. [Cordes2000] Cordes D et al., Geometric distortion correction in EPI
        using two images with orthogonal phase-encoding directions, in Proc.
        ISMRM (8), p.1712, Denver, US, 2000.

      .. [Chiou2000] Chiou JY, and Nalcioglu O, A simple method to correct
        off-resonance related distortion in echo planar imaging, in Proc.
        ISMRM (8), p.1712, Denver, US, 2000.

    """

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'in_bval', 'in_mask', 'alt_file', 'ref_num']),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'out_vsm', 'out_warp']),
        name='outputnode')

    b0_ref = pe.Node(fsl.ExtractROI(t_size=1), name='b0_ref')
    b0_alt = pe.Node(fsl.ExtractROI(t_size=1), name='b0_alt')
    b0_comb = pe.Node(niu.Merge(2), name='b0_list')
    b0_merge = pe.Node(fsl.Merge(dimension='t'), name='b0_merged')

    topup = pe.Node(fsl.TOPUP(), name='topup')
    topup.inputs.encoding_direction = [
        epi_params['enc_dir'], altepi_params['enc_dir']
    ]

    readout = compute_readout(epi_params)
    topup.inputs.readout_times = [readout, compute_readout(altepi_params)]

    unwarp = pe.Node(fsl.ApplyTOPUP(in_index=[1], method='jac'), name='unwarp')

    # scaling = pe.Node(niu.Function(input_names=['in_file', 'enc_dir'],
    #                   output_names=['factor'], function=_get_zoom),
    #                   name='GetZoom')
    # scaling.inputs.enc_dir = epi_params['enc_dir']
    vsm2dfm = vsm2warp()
    vsm2dfm.inputs.inputnode.enc_dir = epi_params['enc_dir']
    vsm2dfm.inputs.inputnode.scaling = readout

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, b0_ref, [('in_file', 'in_file'),
                             (('ref_num', _checkrnum), 't_min')]),
        (inputnode, b0_alt, [('alt_file', 'in_file'),
                             (('ref_num', _checkrnum), 't_min')]),
        (b0_ref, b0_comb, [('roi_file', 'in1')]),
        (b0_alt, b0_comb, [('roi_file', 'in2')]),
        (b0_comb, b0_merge, [('out', 'in_files')]),
        (b0_merge, topup, [('merged_file', 'in_file')]),
        (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'),
                         ('out_movpar', 'in_topup_movpar'),
                         ('out_enc_file', 'encoding_file')]),
        (inputnode, unwarp, [('in_file', 'in_files')]),
        (unwarp, outputnode, [('out_corrected', 'out_file')]),
        # (b0_ref,      scaling,    [('roi_file', 'in_file')]),
        # (scaling,     vsm2dfm,    [('factor', 'inputnode.scaling')]),
        (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]),
        (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]),
        (topup, outputnode, [('out_field', 'out_vsm')]),
        (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')])
    ])
    return wf
Example #26
0
isotropic_surface_smooth = pe.MapNode(
    interface=fs.Smooth(proj_frac_avg=(0, 1, 0.1)),
    iterfield=['in_file'],
    name="isotropic_surface_smooth")
preprocessing.connect(surfregister, 'out_reg_file', isotropic_surface_smooth,
                      'reg_file')
preprocessing.connect(realign, "realigned_files", isotropic_surface_smooth,
                      "in_file")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth,
                      "surface_fwhm")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_surface_smooth, "vol_fwhm")
preprocessing.connect(recon_all, 'subjects_dir', isotropic_surface_smooth,
                      'subjects_dir')

merge_smoothed_files = pe.Node(
    interface=util.Merge(3), name='merge_smoothed_files')
preprocessing.connect(isotropic_voxel_smooth, 'smoothed_files',
                      merge_smoothed_files, 'in1')
preprocessing.connect(anisotropic_voxel_smooth, 'outputnode.smoothed_files',
                      merge_smoothed_files, 'in2')
preprocessing.connect(isotropic_surface_smooth, 'smoothed_file',
                      merge_smoothed_files, 'in3')

select_smoothed_files = pe.Node(
    interface=util.Select(), name="select_smoothed_files")
preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files,
                      'inlist')


def chooseindex(roi):
    return {
Example #27
0
def init_dwi_pre_hmc_wf(scan_groups,
                        b0_threshold,
                        preprocess_rpe_series,
                        dwi_denoise_window,
                        denoise_method,
                        unringing_method,
                        dwi_no_biascorr,
                        no_b0_harmonization,
                        denoise_before_combining,
                        orientation,
                        omp_nthreads,
                        source_file,
                        low_mem,
                        calculate_qc=True,
                        name="pre_hmc_wf"):
    """
    This workflow merges and denoises dwi scans. The outputs from this workflow is
    a single dwi file (optionally denoised) and corresponding bvals, bvecs.

    In the general case, a single warped group will be sent to this workflow. However,
    since eddy expects a single 4D input file, two warped groups can be processed
    separately and merged into a 4D file. This happens when ``preprocess_rpe_series`` is
    ``True``. FSL's eddy also requires data in LAS+ orientation.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from qsiprep.workflows.dwi.pre_hmc import init_dwi_pre_hmc_wf
        wf = init_dwi_pre_hmc_wf(['/completely/made/up/path/sub-01_dwi.nii.gz'],
                                  b0_threshold=100,
                                  preprocess_rpe_series=False,
                                  dwi_denoise_window=7,
                                  denoise_method='dwidenoise',
                                  unringing_method='mrdegibbs',
                                  dwi_no_biascorr=False,
                                  no_b0_harmonization=False,
                                  denoise_before_combining=True,
                                  omp_nthreads=1,
                                  low_mem=False)

    **Parameters**

        dwi_denoise_window : int
            window size in voxels for ``dwidenoise``. Must be odd. If 0, '
            '``dwidwenoise`` will not be run'
        unringing_method : str
            algorithm to use for removing Gibbs ringing. Options: none, mrdegibbs
        dwi_no_biascorr : bool
            run spatial bias correction (N4) on dwi series
        no_b0_harmonization : bool
            skip rescaling dwi scans to have matching b=0 intensities across scans
        denoise_before_combining : bool
            'run ``dwidenoise`` before combining dwis. Requires ``combine_all_dwis``'
        omp_nthreads : int
            Maximum number of threads an individual process may use
        orientation : str
            'LPS' or 'LAS'
        low_mem : bool
            Write uncompressed .nii files in some cases to reduce memory usage

    **Outputs**
        dwi_file
            a (potentially-denoised) dwi file
        bvec_file
            a bvec file
        bval_file
            a bval files
        b0_indices
            list of the positions of the b0 images in the dwi series
        b0_images
            list of paths to single-volume b0 images
        original_files
            list of paths to the original files that the single volumes came from
        original_grouping
            list of warped space group ids
        raw_concatenated
            4d image of the raw inputs concatenated (for QC and visualization)
    """
    workflow = Workflow(name=name)
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'dwi_file', 'bval_file', 'bvec_file', 'original_files',
        'denoising_confounds', 'noise_images', 'bias_images', 'qc_file',
        'raw_concatenated', 'validation_reports'
    ]),
                         name='outputnode')
    dwi_series_pedir = scan_groups['dwi_series_pedir']
    dwi_series = scan_groups['dwi_series']

    # Configure the denoising window
    if denoise_method == 'dwidenoise' and dwi_denoise_window == 'auto':
        dwi_denoise_window = 5
        LOGGER.info("Automatically using 5, 5, 5 window for dwidenoise")
    if dwi_denoise_window != 'auto':
        try:
            dwi_denoise_window = int(dwi_denoise_window)
        except ValueError:
            raise Exception("dwi denoise window must be an integer or 'auto'")
    workflow.__postdesc__ = gen_denoising_boilerplate(
        denoise_method, dwi_denoise_window, unringing_method, dwi_no_biascorr,
        no_b0_harmonization, b0_threshold)

    # Special case: Two reverse PE DWI series are going to get combined for eddy
    if preprocess_rpe_series:
        workflow.__desc__ = "Images were grouped into two phase encoding polarity groups. "
        rpe_series = scan_groups['fieldmap_info']['rpe_series']
        # Merge, denoise, split, hmc on the plus series
        plus_files, minus_files = (rpe_series, dwi_series) if dwi_series_pedir.endswith("-") \
            else (dwi_series, rpe_series)
        pe_axis = dwi_series_pedir[0]
        plus_source_file = get_source_file(plus_files, suffix='_PEplus')
        merge_plus = init_merge_and_denoise_wf(
            raw_dwi_files=plus_files,
            b0_threshold=b0_threshold,
            dwi_denoise_window=dwi_denoise_window,
            unringing_method=unringing_method,
            dwi_no_biascorr=dwi_no_biascorr,
            denoise_method=denoise_method,
            no_b0_harmonization=no_b0_harmonization,
            denoise_before_combining=denoise_before_combining,
            orientation=orientation,
            omp_nthreads=omp_nthreads,
            source_file=plus_source_file,
            phase_id=pe_axis + "+ phase-encoding direction",
            calculate_qc=False,
            name="merge_plus")

        # Merge, denoise, split, hmc on the minus series
        minus_source_file = get_source_file(minus_files, suffix='_PEminus')
        merge_minus = init_merge_and_denoise_wf(
            raw_dwi_files=minus_files,
            b0_threshold=b0_threshold,
            dwi_denoise_window=dwi_denoise_window,
            denoise_method=denoise_method,
            unringing_method=unringing_method,
            dwi_no_biascorr=dwi_no_biascorr,
            no_b0_harmonization=no_b0_harmonization,
            denoise_before_combining=denoise_before_combining,
            orientation=orientation,
            omp_nthreads=omp_nthreads,
            source_file=minus_source_file,
            phase_id=pe_axis + "- phase-encoding direction",
            calculate_qc=False,
            name="merge_minus")

        # Combine the original images from the splits into one 4D series + bvals/bvecs
        pm_validation = pe.Node(niu.Merge(2), name='pm_validation')
        pm_dwis = pe.Node(niu.Merge(2), name='pm_dwis')
        pm_bids_dwis = pe.Node(niu.Merge(2), name='pm_bids_dwis')
        pm_bvals = pe.Node(niu.Merge(2), name='pm_bvals')
        pm_bvecs = pe.Node(niu.Merge(2), name='pm_bvecs')
        pm_bias = pe.Node(niu.Merge(2), name='pm_bias')
        pm_noise_images = pe.Node(niu.Merge(2), name='pm_noise')
        pm_denoising_confounds = pe.Node(niu.Merge(2),
                                         name='pm_denoising_confounds')
        pm_raw_images = pe.Node(niu.Merge(2), name='pm_raw_images')
        rpe_concat = pe.Node(MergeDWIs(
            harmonize_b0_intensities=not no_b0_harmonization,
            b0_threshold=b0_threshold),
                             name='rpe_concat')
        raw_rpe_concat = pe.Node(Merge(is_dwi=True), name='raw_rpe_concat')
        qc_wf = init_modelfree_qc_wf(dwi_files=plus_files + minus_files)

        workflow.connect([
            # combine PE+
            (merge_plus, pm_dwis, [('outputnode.merged_image', 'in1')]),
            (merge_plus, pm_bids_dwis, [('outputnode.original_files', 'in1')]),
            (merge_plus, pm_bvals, [('outputnode.merged_bval', 'in1')]),
            (merge_plus, pm_bvecs, [('outputnode.merged_bvec', 'in1')]),
            (merge_plus, pm_bias, [('outputnode.bias_images', 'in1')]),
            (merge_plus, pm_noise_images, [('outputnode.noise_images', 'in1')
                                           ]),
            (merge_plus, pm_raw_images, [('outputnode.merged_raw_image', 'in1')
                                         ]),
            (merge_plus, pm_denoising_confounds,
             [('outputnode.denoising_confounds', 'in1')]),
            (merge_plus, pm_validation, [('outputnode.validation_reports',
                                          'in1')]),

            # combine PE-
            (merge_minus, pm_dwis, [('outputnode.merged_image', 'in2')]),
            (merge_minus, pm_bids_dwis, [('outputnode.original_files', 'in2')
                                         ]),
            (merge_minus, pm_bvals, [('outputnode.merged_bval', 'in2')]),
            (merge_minus, pm_bvecs, [('outputnode.merged_bvec', 'in2')]),
            (merge_minus, pm_bias, [('outputnode.bias_images', 'in2')]),
            (merge_minus, pm_noise_images, [('outputnode.noise_images', 'in2')
                                            ]),
            (merge_minus, pm_raw_images, [('outputnode.merged_raw_image',
                                           'in2')]),
            (merge_minus, pm_denoising_confounds,
             [('outputnode.denoising_confounds', 'in2')]),
            (merge_minus, pm_validation, [('outputnode.validation_reports',
                                           'in2')]),
            (pm_dwis, rpe_concat, [('out', 'dwi_files')]),
            (pm_bids_dwis, rpe_concat, [('out', 'bids_dwi_files')]),
            (pm_bvals, rpe_concat, [('out', 'bval_files')]),
            (pm_bvecs, rpe_concat, [('out', 'bvec_files')]),
            (pm_denoising_confounds, rpe_concat, [('out',
                                                   'denoising_confounds')]),

            # Connect to the outputnode
            (rpe_concat, outputnode, [('out_dwi', 'dwi_file'),
                                      ('out_bval', 'bval_file'),
                                      ('out_bvec', 'bvec_file'),
                                      ('original_images', 'original_files'),
                                      ('merged_denoising_confounds',
                                       'denoising_confounds')]),
            (pm_validation, outputnode, [('out', 'validation_reports')]),
            (pm_noise_images, outputnode, [('out', 'noise_images')]),
            (pm_bias, outputnode, [('out', 'bias_images')]),
            (pm_raw_images, raw_rpe_concat, [('out', 'in_files')]),
            (raw_rpe_concat, outputnode, [('out_file', 'raw_concatenated')]),

            # Connect to the QC calculator
            (raw_rpe_concat, qc_wf, [('out_file', 'inputnode.dwi_file')]),
            (rpe_concat, qc_wf, [('out_bval', 'inputnode.bval_file'),
                                 ('out_bvec', 'inputnode.bvec_file')]),
            (qc_wf, outputnode, [('outputnode.qc_summary', 'qc_file')])
        ])

        workflow.__postdesc__ += "Both distortion groups were then merged into a " \
                                 "single file, as required for the FSL workflows.\n\n"
        return workflow
    workflow.__postdesc__ += "\n\n"
    merge_dwis = init_merge_and_denoise_wf(
        raw_dwi_files=dwi_series,
        b0_threshold=b0_threshold,
        dwi_denoise_window=dwi_denoise_window,
        denoise_method=denoise_method,
        unringing_method=unringing_method,
        dwi_no_biascorr=dwi_no_biascorr,
        no_b0_harmonization=no_b0_harmonization,
        denoise_before_combining=denoise_before_combining,
        orientation=orientation,
        calculate_qc=True,
        phase_id=dwi_series_pedir,
        source_file=source_file)

    workflow.connect([(merge_dwis, outputnode, [
        ('outputnode.merged_image', 'dwi_file'),
        ('outputnode.merged_bval', 'bval_file'),
        ('outputnode.merged_bvec', 'bvec_file'),
        ('outputnode.bias_images', 'bias_images'),
        ('outputnode.noise_images', 'noise_images'),
        ('outputnode.validation_reports', 'validation_reports'),
        ('outputnode.denoising_confounds', 'denoising_confounds'),
        ('outputnode.original_files', 'original_files'),
        ('outputnode.merged_raw_image', 'raw_concatenated')
    ])])

    if calculate_qc:
        qc_wf = init_modelfree_qc_wf(dwi_files=dwi_series)
        workflow.connect([(merge_dwis, qc_wf, [
            ('outputnode.merged_raw_image', 'inputnode.dwi_file'),
            ('outputnode.merged_bval', 'inputnode.bval_file'),
            ('outputnode.merged_bvec', 'inputnode.bvec_file')
        ]), (qc_wf, outputnode, [('outputnode.qc_summary', 'qc_file')])])

    return workflow
Example #28
0
def init_asl_std_trans_wf(
    freesurfer,
    mem_gb,
    omp_nthreads,
    spaces,
    name='asl_std_trans_wf',
    use_compression=True,
    use_fieldwarp=False,
):
    """
    Sample fMRI into standard space with a single-step resampling of the original ASL series.

    .. important::
        This workflow provides two outputnodes.
        One output node (with name ``poutputnode``) will be parameterized in a Nipype sense
        (see `Nipype iterables
        <https://miykael.github.io/nipype_tutorial/notebooks/basic_iteration.html>`__), and a
        second node (``outputnode``) will collapse the parameterized outputs into synchronous
        lists of the output fields listed below.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from niworkflows.utils.spaces import SpatialReferences
            from aslprep.workflows.asl import init_asl_std_trans_wf
            wf = init_asl_std_trans_wf(
                freesurfer=True,
                mem_gb=3,
                omp_nthreads=1,
                spaces=SpatialReferences(
                    spaces=['MNI152Lin',
                            ('MNIPediatricAsym', {'cohort': '6'})],
                    checkpoint=True),
            )

    Parameters
    ----------
    freesurfer : :obj:`bool`
        Whether to generate FreeSurfer's aseg/aparc segmentations on ASLspace.
    mem_gb : :obj:`float`
        Size of ASL file in GB
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    spaces : :py:class:`~niworkflows.utils.spaces.SpatialReferences`
        A container for storing, organizing, and parsing spatial normalizations. Composed of
        :py:class:`~niworkflows.utils.spaces.Reference` objects representing spatial references.
        Each ``Reference`` contains a space, which is a string of either TemplateFlow template IDs.
    name : :obj:`str`
        Name of workflow (default: ``asl_std_trans_wf``)
    use_compression : :obj:`bool`
        Save registered ASL series as ``.nii.gz``
    use_fieldwarp : :obj:`bool`
        Include SDC warp in single-shot transform from ASL to MNI

    Inputs
    ------
    anat2std_xfm
        List of anatomical-to-standard space transforms generated during
        spatial normalization.
    asl_aparc
        FreeSurfer's ``aparc+aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    asl_aseg
        FreeSurfer's ``aseg.mgz`` atlas projected into the T1w reference
        (only if ``recon-all`` was run).
    asl_mask
        Skull-stripping mask of reference image
    asl_split
        Individual 3D volumes, not motion corrected
    fieldwarp
        a :abbr:`DFM (displacements field map)` in ITK format
    hmc_xforms
        List of affine transforms aligning each volume to ``ref_image`` in ITK format
    itk_asl_to_t1
        Affine transform from ``ref_asl_brain`` to T1 space (ITK format)
    name_source
        ASL series NIfTI file
        Used to recover original information lost during processing
    templates
        List of templates that were applied as targets during
        spatial normalization.

    Outputs
    -------
    asl_std
        ASL series, resampled to template space
    cbf_std, *cbf
        cbf series, resampled to template space
    asl_std_ref
        Reference, contrast-enhanced summary of the ASL series, resampled to template space
    asl_mask_std
        ASL series mask in template space
    asl_aseg_std
        FreeSurfer's ``aseg.mgz`` atlas, in template space at the ASL resolution
        (only if ``recon-all`` was run)
    asl_aparc_std
        FreeSurfer's ``aparc+aseg.mgz`` atlas, in template space at the ASL resolution
        (only if ``recon-all`` was run)
    template
        Template identifiers synchronized correspondingly to previously
        described outputs.

    """
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.func.util import init_asl_reference_wf
    from ...niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
    from ...niworkflows.interfaces.itk import MultiApplyTransforms
    from ...niworkflows.interfaces.utility import KeySelect
    from ...niworkflows.interfaces.utils import GenerateSamplingReference
    from ...niworkflows.interfaces.nilearn import Merge
    from ...niworkflows.utils.spaces import format_reference

    workflow = Workflow(name=name)
    output_references = spaces.cached.get_spaces(nonstandard=False, dim=(3, ))
    std_vol_references = [(s.fullname, s.spec) for s in spaces.references
                          if s.standard and s.dim == 3]

    if len(output_references) == 1:
        workflow.__desc__ = """\
The ASL and CBF dreivatives  were resampled into standard space,
generating a *preprocessed ASL and computed CBF in {tpl} space*.
""".format(tpl=output_references[0])
    elif len(output_references) > 1:
        workflow.__desc__ = """\
The ASL and CBF dreivatives were resampled into several standard spaces,
correspondingly generating the following *spatially-normalized,
preprocessed ASL runs*: {tpl}.
""".format(tpl=', '.join(output_references))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'anat2std_xfm',
        'cbf',
        'meancbf',
        'att',
        'score',
        'avgscore',
        'scrub',
        'basil',
        'pv',
        'attb',
        'asl_aparc',
        'asl_aseg',
        'asl_mask',
        'asl_split',
        'fieldwarp',
        'hmc_xforms',
        'itk_asl_to_t1',
        'name_source',
        'templates',
    ]),
                        name='inputnode')

    iterablesource = pe.Node(niu.IdentityInterface(fields=['std_target']),
                             name='iterablesource')
    # Generate conversions for every template+spec at the input
    iterablesource.iterables = [('std_target', std_vol_references)]

    split_target = pe.Node(niu.Function(
        function=_split_spec,
        input_names=['in_target'],
        output_names=['space', 'template', 'spec']),
                           run_without_submitting=True,
                           name='split_target')

    select_std = pe.Node(KeySelect(fields=['anat2std_xfm']),
                         name='select_std',
                         run_without_submitting=True)

    select_tpl = pe.Node(niu.Function(function=_select_template),
                         name='select_tpl',
                         run_without_submitting=True)

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)

    mask_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                           name='mask_std_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    nxforms = 3 + use_fieldwarp
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    asl_to_std_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                   name='asl_to_std_transform',
                                   mem_gb=mem_gb * 3 * omp_nthreads,
                                   n_procs=omp_nthreads)
    cbf_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc",
        float=True,
        input_image_type=3,
        dimension=3),
                                   name='cbf_to_std_transform',
                                   mem_gb=mem_gb * 3 * omp_nthreads,
                                   n_procs=omp_nthreads)

    score_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc",
        float=True,
        input_image_type=3,
        dimension=3),
                                     name='score_to_std_transform',
                                     mem_gb=mem_gb * 3 * omp_nthreads,
                                     n_procs=omp_nthreads)

    meancbf_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True),
                                       name='meancbf_to_std_transform',
                                       mem_gb=mem_gb * 3 * omp_nthreads,
                                       n_procs=omp_nthreads)

    avgscore_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True),
                                        name='avgscore_to_std_transform',
                                        mem_gb=mem_gb * 3 * omp_nthreads,
                                        n_procs=omp_nthreads)

    scrub_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True),
                                     name='scrub_to_std_transform',
                                     mem_gb=mem_gb * 3 * omp_nthreads,
                                     n_procs=omp_nthreads)

    basil_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True),
                                     name='basil_to_std_transform',
                                     mem_gb=mem_gb * 3 * omp_nthreads,
                                     n_procs=omp_nthreads)

    pv_to_std_transform = pe.Node(ApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True),
                                  name='pv_to_std_transform',
                                  mem_gb=mem_gb * 3 * omp_nthreads,
                                  n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a reference on the target standard space
    gen_final_ref = init_asl_reference_wf(omp_nthreads=omp_nthreads,
                                          pre_mask=True)

    workflow.connect([
        (iterablesource, split_target, [('std_target', 'in_target')]),
        (iterablesource, select_tpl, [('std_target', 'template')]),
        (inputnode, select_std, [('anat2std_xfm', 'anat2std_xfm'),
                                 ('templates', 'keys')]),
        (inputnode, mask_std_tfm, [('asl_mask', 'input_image')]),
        (inputnode, gen_ref, [(('asl_split', _first), 'moving_image')]),
        (inputnode, merge_xforms, [(('itk_asl_to_t1', _aslist), 'in2')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, mask_merge_tfms, [(('itk_asl_to_t1', _aslist), 'in2')]),
        (inputnode, asl_to_std_transform, [('asl_split', 'input_image')]),
        (split_target, select_std, [('space', 'key')]),
        (select_std, merge_xforms, [('anat2std_xfm', 'in1')]),
        (select_std, mask_merge_tfms, [('anat2std_xfm', 'in1')]),
        (split_target, gen_ref, [(('spec', _is_native), 'keep_native')]),
        (select_tpl, gen_ref, [('out', 'fixed_image')]),
        (merge_xforms, asl_to_std_transform, [('out', 'transforms')]),
        (gen_ref, asl_to_std_transform, [('out_file', 'reference_image')]),
        (gen_ref, mask_std_tfm, [('out_file', 'reference_image')]),
        (mask_merge_tfms, mask_std_tfm, [('out', 'transforms')]),
        (mask_std_tfm, gen_final_ref, [('output_image', 'inputnode.asl_mask')
                                       ]),
        (asl_to_std_transform, merge, [('out_files', 'in_files')]),
        (merge, gen_final_ref, [('out_file', 'inputnode.asl_file')]),
    ])

    output_names = [
        'asl_mask_std',
        'asl_std',
        'asl_std_ref',
        'spatial_reference',
        'template',
        'cbf_std',
        'meancbf_std',
        'score_std',
        'avgscore_std',
        'scrub_std',
        'basil_std',
        'pv_std',
    ] + freesurfer * ['asl_aseg_std', 'asl_aparc_std']

    poutputnode = pe.Node(niu.IdentityInterface(fields=output_names),
                          name='poutputnode')
    workflow.connect([
        # Connecting outputnode
        (iterablesource, poutputnode, [(('std_target', format_reference),
                                        'spatial_reference')]),
        (merge, poutputnode, [('out_file', 'asl_std')]),
        (gen_final_ref, poutputnode, [('outputnode.ref_image', 'asl_std_ref')
                                      ]),
        (mask_std_tfm, poutputnode, [('output_image', 'asl_mask_std')]),
        (select_std, poutputnode, [('key', 'template')]),
        (mask_merge_tfms, cbf_to_std_transform, [('out', 'transforms')]),
        (gen_ref, cbf_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, cbf_to_std_transform, [('cbf', 'input_image')]),
        (cbf_to_std_transform, poutputnode, [('output_image', 'cbf_std')]),
        (mask_merge_tfms, score_to_std_transform, [('out', 'transforms')]),
        (gen_ref, score_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, score_to_std_transform, [('score', 'input_image')]),
        (score_to_std_transform, poutputnode, [('output_image', 'score_std')]),
        (mask_merge_tfms, meancbf_to_std_transform, [('out', 'transforms')]),
        (gen_ref, meancbf_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, meancbf_to_std_transform, [('cbf', 'input_image')]),
        (meancbf_to_std_transform, poutputnode, [('output_image',
                                                  'meancbf_std')]),
        (mask_merge_tfms, avgscore_to_std_transform, [('out', 'transforms')]),
        (gen_ref, avgscore_to_std_transform, [('out_file', 'reference_image')
                                              ]),
        (inputnode, avgscore_to_std_transform, [('avgscore', 'input_image')]),
        (avgscore_to_std_transform, poutputnode, [('output_image',
                                                   'avgscore_std')]),
        (mask_merge_tfms, scrub_to_std_transform, [('out', 'transforms')]),
        (gen_ref, scrub_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, scrub_to_std_transform, [('scrub', 'input_image')]),
        (scrub_to_std_transform, poutputnode, [('output_image', 'scrub_std')]),
        (mask_merge_tfms, basil_to_std_transform, [('out', 'transforms')]),
        (gen_ref, basil_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, basil_to_std_transform, [('basil', 'input_image')]),
        (basil_to_std_transform, poutputnode, [('output_image', 'basil_std')]),
        (mask_merge_tfms, pv_to_std_transform, [('out', 'transforms')]),
        (gen_ref, pv_to_std_transform, [('out_file', 'reference_image')]),
        (inputnode, pv_to_std_transform, [('pv', 'input_image')]),
        (pv_to_std_transform, poutputnode, [('output_image', 'pv_std')]),
    ])

    if freesurfer:
        # Sample the parcellation files to functional space
        aseg_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                               name='aseg_std_tfm',
                               mem_gb=1)
        aparc_std_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'),
                                name='aparc_std_tfm',
                                mem_gb=1)

        workflow.connect([
            (inputnode, aseg_std_tfm, [('asl_aseg', 'input_image')]),
            (inputnode, aparc_std_tfm, [('asl_aparc', 'input_image')]),
            (select_std, aseg_std_tfm, [('anat2std_xfm', 'transforms')]),
            (select_std, aparc_std_tfm, [('anat2std_xfm', 'transforms')]),
            (gen_ref, aseg_std_tfm, [('out_file', 'reference_image')]),
            (gen_ref, aparc_std_tfm, [('out_file', 'reference_image')]),
            (aseg_std_tfm, poutputnode, [('output_image', 'asl_aseg_std')]),
            (aparc_std_tfm, poutputnode, [('output_image', 'asl_aparc_std')]),
        ])

    # Connect parametric outputs to a Join outputnode
    outputnode = pe.JoinNode(niu.IdentityInterface(fields=output_names),
                             name='outputnode',
                             joinsource='iterablesource')
    workflow.connect([
        (poutputnode, outputnode, [(f, f) for f in output_names]),
    ])
    return workflow
Example #29
0
def epi_pipeline(name="susceptibility_distortion_correction_using_t1"):
    """
    This workflow allows to correct for echo-planareinduced susceptibility artifacts without fieldmap
    (e.g. ADNI Database) by elastically register DWIs to their respective baseline T1-weighted
    structural scans using an inverse consistent registration algorithm with a mutual information cost
    function (SyN algorithm). This workflow allows also a coregistration of DWIs with their respective
    baseline T1-weighted structural scans in order to latter combine tracks and cortex parcelation.
    ..  warning:: This workflow rotates the `b`-vectors'
    .. References
      .. Nir et al. (Neurobiology of Aging 2015)- Connectivity network measures predict volumetric atrophy in mild cognitive impairment

        Leow et al. (IEEE Trans Med Imaging 2007)- Statistical Properties of Jacobian Maps and the Realization of Unbiased Large Deformation Nonlinear Image Registration
    Example
    -------
    >>> epi = epi_pipeline()
    >>> epi.inputs.inputnode.DWI = 'DWI.nii'
    >>> epi.inputs.inputnode.bvec = 'bvec.txt'
    >>> epi.inputs.inputnode.T1 = 'T1.nii'
    >>> epi.run() # doctest: +SKIP
    """

    import nipype.interfaces.c3 as c3
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    from clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils import (
        ants_combin_transform,
        ants_registration_syn_quick,
        ants_warp_image_multi_transform,
        change_itk_transform_type,
        create_jacobian_determinant_image,
        expend_matrix_list,
        rotate_bvecs,
    )

    inputnode = pe.Node(niu.IdentityInterface(fields=["T1", "DWI", "bvec"]),
                        name="inputnode")

    split = pe.Node(fsl.Split(dimension="t"), name="SplitDWIs")
    pick_ref = pe.Node(niu.Select(), name="Pick_b0")
    pick_ref.inputs.index = [0]

    flirt_b0_2_T1 = pe.Node(interface=fsl.FLIRT(dof=6), name="flirt_B0_2_T1")
    flirt_b0_2_T1.inputs.interp = "spline"
    flirt_b0_2_T1.inputs.cost = "normmi"
    flirt_b0_2_T1.inputs.cost_func = "normmi"

    apply_xfm = pe.Node(interface=fsl.preprocess.ApplyXFM(), name="apply_xfm")
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(
        interface=niu.Function(
            input_names=["in_matrix", "in_bvec"],
            output_names=["out_matrix_list"],
            function=expend_matrix_list,
        ),
        name="expend_matrix",
    )

    rot_bvec = pe.Node(
        niu.Function(
            input_names=["in_matrix", "in_bvec"],
            output_names=["out_file"],
            function=rotate_bvecs,
        ),
        name="Rotate_Bvec",
    )

    antsRegistrationSyNQuick = pe.Node(
        interface=niu.Function(
            input_names=["fix_image", "moving_image"],
            output_names=[
                "image_warped",
                "affine_matrix",
                "warp",
                "inverse_warped",
                "inverse_warp",
            ],
            function=ants_registration_syn_quick,
        ),
        name="antsRegistrationSyNQuick",
    )

    c3d_flirt2ants = pe.Node(c3.C3dAffineTool(), name="fsl_reg_2_itk")
    c3d_flirt2ants.inputs.itk_transform = True
    c3d_flirt2ants.inputs.fsl2ras = True

    change_transform = pe.Node(
        niu.Function(
            input_names=["input_affine_file"],
            output_names=["updated_affine_file"],
            function=change_itk_transform_type,
        ),
        name="change_transform_type",
    )

    merge_transform = pe.Node(niu.Merge(3), name="MergeTransforms")

    apply_transform = pe.MapNode(
        interface=niu.Function(
            input_names=["fix_image", "moving_image", "ants_warp_affine"],
            output_names=["out_warp_field", "out_warped"],
            function=ants_combin_transform,
        ),
        iterfield=["moving_image"],
        name="warp_filed",
    )

    jacobian = pe.MapNode(
        interface=niu.Function(
            input_names=["imageDimension", "deformationField", "outputImage"],
            output_names=["outputImage"],
            function=create_jacobian_determinant_image,
        ),
        iterfield=["deformationField"],
        name="jacobian",
    )

    jacobian.inputs.imageDimension = 3
    jacobian.inputs.outputImage = "Jacobian_image.nii.gz"

    jacmult = pe.MapNode(
        fsl.MultiImageMaths(op_string="-mul %s"),
        iterfield=["in_file", "operand_files"],
        name="ModulateDWIs",
    )

    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=["in_file"],
                       name="RemoveNegative")

    merge = pe.Node(fsl.Merge(dimension="t"), name="MergeDWIs")

    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "DWI_2_T1_Coregistration_matrix",
            "epi_correction_deformation_field",
            "epi_correction_affine_transform",
            "epi_correction_image_warped",
            "DWIs_epicorrected",
            "warp_epi",
            "out_bvec",
        ]),
        name="outputnode",
    )

    wf = pe.Workflow(name="epi_pipeline")

    wf.connect([(inputnode, split, [("DWI", "in_file")])])
    wf.connect([(split, pick_ref, [("out_files", "inlist")])])
    wf.connect([(pick_ref, flirt_b0_2_T1, [("out", "in_file")])])
    wf.connect([(inputnode, flirt_b0_2_T1, [("T1", "reference")])])
    wf.connect([(inputnode, rot_bvec, [("bvec", "in_bvec")])])
    wf.connect([(flirt_b0_2_T1, expend_matrix, [("out_matrix_file",
                                                 "in_matrix")])])
    wf.connect([(inputnode, expend_matrix, [("bvec", "in_bvec")])])
    wf.connect([(expend_matrix, rot_bvec, [("out_matrix_list", "in_matrix")])])
    wf.connect([(inputnode, antsRegistrationSyNQuick, [("T1", "fix_image")])])
    wf.connect([(flirt_b0_2_T1, antsRegistrationSyNQuick, [("out_file",
                                                            "moving_image")])])

    wf.connect([(inputnode, c3d_flirt2ants, [("T1", "reference_file")])])
    wf.connect([(pick_ref, c3d_flirt2ants, [("out", "source_file")])])
    wf.connect([(flirt_b0_2_T1, c3d_flirt2ants, [("out_matrix_file",
                                                  "transform_file")])])
    wf.connect([(c3d_flirt2ants, change_transform, [("itk_transform",
                                                     "input_affine_file")])])

    wf.connect([(antsRegistrationSyNQuick, merge_transform, [("warp", "in1")])
                ])
    wf.connect([(antsRegistrationSyNQuick, merge_transform, [("affine_matrix",
                                                              "in2")])])
    wf.connect([(change_transform, merge_transform, [("updated_affine_file",
                                                      "in3")])])
    wf.connect([(inputnode, apply_transform, [("T1", "fix_image")])])
    wf.connect([(split, apply_transform, [("out_files", "moving_image")])])

    wf.connect([(merge_transform, apply_transform, [("out", "ants_warp_affine")
                                                    ])])
    wf.connect([(apply_transform, jacobian, [("out_warp_field",
                                              "deformationField")])])
    wf.connect([(apply_transform, jacmult, [("out_warped", "operand_files")])])
    wf.connect([(jacobian, jacmult, [("outputImage", "in_file")])])
    wf.connect([(jacmult, thres, [("out_file", "in_file")])])
    wf.connect([(thres, merge, [("out_file", "in_files")])])

    wf.connect([(merge, outputnode, [("merged_file", "DWIs_epicorrected")])])
    wf.connect([(
        flirt_b0_2_T1,
        outputnode,
        [("out_matrix_file", "DWI_2_T1_Coregistration_matrix")],
    )])
    wf.connect([(
        antsRegistrationSyNQuick,
        outputnode,
        [
            ("warp", "epi_correction_deformation_field"),
            ("affine_matrix", "epi_correction_affine_transform"),
            ("image_warped", "epi_correction_image_warped"),
        ],
    )])
    wf.connect([(merge_transform, outputnode, [("out", "warp_epi")])])
    wf.connect([(rot_bvec, outputnode, [("out_file", "out_bvec")])])

    return wf
Example #30
0
def init_dwi_model_hmc_wf(modelname,
                          transform,
                          mem_gb,
                          omp_nthreads,
                          num_iters=2,
                          name='dwi_model_hmc_wf',
                          metric="Mattes"):
    """Create a model-based hmc workflow.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from qsiprep.workflows.dwi.hmc import init_dwi_model_hmc_wf
        wf = init_dwi_model_hmc_wf(modelname='3dSHORE',
                                   transform='Affine',
                                   num_iters=2,
                                   mem_gb=3,
                                   omp_nthreads=1)

    **Parameters**

        modelname : str
            one of the models for reconstructing an EAP and producing
            signal estimates used for motion correction
        transform : str
            either "Rigid" or "Affine". Choosing "Affine" may help with Eddy warping
        num_iters : int
            the number of times the model will be updated with transformed data

    **Inputs**

        dwi_files
            list of 3d dwi files
        b0_indices
            list of which indices in `dwi_files` are b0 images
        initial_transforms
            list of b0-based transforms from dwis to the b0 template
        warped_b0_images
            list of aligned b0 images
        b0_mask
            mask of containing brain voxels
        bvecs
            list of bvec files corresponding to `dwi_files`
        bvals
            list of bval files corresponding to `dwi_files`

    **Outputs**

        hmc_transforms
            list of transforms, one per file in `dwi_files`
        model_predicted_images: list
            Model-predicted images reverse-transformed into alignment with ``dwi_files``
        cnr_image: str
            If hmc_model is 'none' this is the tsnr of the b=0 images. Otherwise it is the
            model fit divided by the model error in each voxel.
        optimization_data: str
            CSV file tracking the motion estimates across shoreline iterations


    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'dwi_files', 'b0_indices', 'initial_transforms', 'bvec_files',
        'bval_files', 'warped_b0_images', 'warped_b0_mask'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'hmc_transforms', 'model_predicted_images', 'cnr_image',
        'optimization_data'
    ]),
                         name='outputnode')
    workflow.__desc__ = "The the SHORELine method was used to estimate head motion in b>0 " \
                        "images. This entails leaving out each b>0 image and reconstructing " \
                        "the others using 3dSHORE [@merlet3dshore]. The signal for the left-" \
                        "out image serves as the registration target. A total of {num_iters} " \
                        "iterations were run using a {transform} transform. ".format(
                            transform=transform, num_iters=num_iters)

    # Merge b0s into a single volume, put the non-b0 dwis into a list
    extract_dwis = pe.Node(ExtractDWIsForModel(), name="extract_dwis")

    # Initialize with the transforms provided
    b0_based_image_transforms = pe.MapNode(
        ants.ApplyTransforms(interpolation="BSpline"),
        iterfield=['input_image', 'transforms'],
        name="b0_based_image_transforms")
    # Rotate the original bvecs as well
    b0_based_bvec_transforms = pe.Node(GradientRotation(),
                                       name="b0_based_bvec_transforms")

    # Create a mask and an average from the aligned b0 images
    b0_mean = pe.Node(B0Mean(), name='b0_mean')

    # Start building and connecting the model iterations
    initial_model_iteration = init_hmc_model_iteration_wf(
        modelname,
        transform,
        precision="coarse",
        name="initial_model_iteration")

    # Collect motion estimates across iterations
    collect_motion_params = pe.Node(niu.Merge(num_iters),
                                    name="collect_motion_params")

    workflow.connect([
        (inputnode, extract_dwis, [('dwi_files', 'dwi_files'),
                                   ('bval_files', 'bval_files'),
                                   ('bvec_files', 'bvec_files'),
                                   ('initial_transforms', 'transforms'),
                                   ('b0_indices', 'b0_indices')]),
        (inputnode, b0_mean, [('warped_b0_images', 'b0_images')]),
        (extract_dwis, b0_based_bvec_transforms,
         [('model_bvecs', 'bvec_files'), ('model_bvals', 'bval_files'),
          ('transforms', 'affine_transforms')]),
        (extract_dwis, b0_based_image_transforms,
         [('model_dwi_files', 'input_image'), ('transforms', 'transforms')]),
        (b0_mean, b0_based_image_transforms, [('average_image',
                                               'reference_image')]),

        # Connect the first iteration
        (extract_dwis, initial_model_iteration,
         [('model_dwi_files', 'inputnode.original_dwi_files'),
          ('model_bvecs', 'inputnode.original_bvecs'),
          ('model_bvals', 'inputnode.bvals')]),
        (b0_based_image_transforms, initial_model_iteration,
         [('output_image', 'inputnode.approx_aligned_dwi_files')]),
        (b0_based_bvec_transforms, initial_model_iteration,
         [('bvecs', 'inputnode.approx_aligned_bvecs')]),
        (b0_mean, initial_model_iteration, [('average_image',
                                             'inputnode.b0_mean')]),
        (inputnode, initial_model_iteration, [('warped_b0_mask',
                                               'inputnode.b0_mask')]),
        (initial_model_iteration, collect_motion_params,
         [('outputnode.motion_params', 'in1')])
    ])

    model_iterations = [initial_model_iteration]
    for iteration_num in range(num_iters - 1):
        iteration_name = 'shoreline_iteration%03d' % (iteration_num + 1)
        motion_key = 'in%d' % (iteration_num + 2)
        model_iterations.append(
            init_hmc_model_iteration_wf(modelname=modelname,
                                        transform=transform,
                                        precision="precise",
                                        name=iteration_name))
        workflow.connect([(model_iterations[-2], model_iterations[-1], [
            ('outputnode.aligned_dwis', 'inputnode.approx_aligned_dwi_files'),
            ('outputnode.aligned_bvecs', 'inputnode.approx_aligned_bvecs')
        ]),
                          (extract_dwis, model_iterations[-1],
                           [('model_dwi_files',
                             'inputnode.original_dwi_files'),
                            ('model_bvals', 'inputnode.bvals'),
                            ('model_bvecs', 'inputnode.original_bvecs')]),
                          (b0_mean, model_iterations[-1],
                           [('average_image', 'inputnode.b0_mean')]),
                          (inputnode, model_iterations[-1],
                           [('warped_b0_mask', 'inputnode.b0_mask')]),
                          (model_iterations[-1], collect_motion_params,
                           [('outputnode.motion_params', motion_key)])])

    # Return to the original, b0-interspersed ordering
    reorder_dwi_xforms = pe.Node(ReorderOutputs(), name='reorder_dwi_xforms')

    # Create a report:
    shoreline_report = pe.Node(SHORELineReport(), name='shoreline_report')
    ds_report_shoreline_gif = pe.Node(
        DerivativesDataSink(suffix="shoreline_animation"),
        name='ds_report_shoreline_gif',
        mem_gb=1,
        run_without_submitting=True)

    calculate_cnr = pe.Node(CalculateCNR(), name='calculate_cnr')

    if num_iters > 1:
        summarize_iterations = pe.Node(IterationSummary(),
                                       name='summarize_iterations')
        ds_report_iteration_plot = pe.Node(
            DerivativesDataSink(suffix="shoreline_iterdata"),
            name='ds_report_iteration_plot',
            mem_gb=0.1,
            run_without_submitting=True)
        workflow.connect([(collect_motion_params, summarize_iterations,
                           [('out', 'collected_motion_files')]),
                          (summarize_iterations, ds_report_iteration_plot,
                           [('plot_file', 'in_file')]),
                          (summarize_iterations, outputnode,
                           [('iteration_summary_file', 'optimization_data')]),
                          (summarize_iterations, shoreline_report,
                           [('iteration_summary_file', 'iteration_summary')])])

    workflow.connect([
        (model_iterations[-1], reorder_dwi_xforms,
         [('outputnode.hmc_transforms', 'model_based_transforms'),
          ('outputnode.predicted_dwis', 'model_predicted_images'),
          ('outputnode.aligned_dwis', 'warped_dwi_images')]),
        (b0_mean, reorder_dwi_xforms, [('average_image', 'b0_mean')]),
        (inputnode,
         reorder_dwi_xforms, [('warped_b0_images', 'warped_b0_images'),
                              ('b0_indices', 'b0_indices'),
                              ('initial_transforms', 'initial_transforms')]),
        (reorder_dwi_xforms, outputnode,
         [('hmc_warped_images', 'aligned_dwis'),
          ('full_transforms', 'hmc_transforms'),
          ('full_predicted_dwi_series', 'model_predicted_images')]),
        (inputnode, shoreline_report, [('dwi_files', 'original_images')]),
        (reorder_dwi_xforms, calculate_cnr,
         [('hmc_warped_images', 'hmc_warped_images'),
          ('full_predicted_dwi_series', 'predicted_images')]),
        (inputnode, calculate_cnr, [('warped_b0_mask', 'mask_image')]),
        (calculate_cnr, outputnode, [('cnr_image', 'cnr_image')]),
        (reorder_dwi_xforms, shoreline_report,
         [('full_predicted_dwi_series', 'model_predicted_images'),
          ('hmc_warped_images', 'registered_images')]),
        (shoreline_report, ds_report_shoreline_gif, [('plot_file', 'in_file')
                                                     ]),
    ])

    return workflow