Пример #1
0
def headmsk_wf(name='HeadMaskWorkflow', use_bet=True):
    """
    Computes a head mask as in [Mortamet2009]_.

    .. workflow::

        from mriqc.workflows.anatomical import headmsk_wf
        wf = headmsk_wf()

    """

    has_dipy = False
    try:
        from dipy.denoise import nlmeans
        has_dipy = True
    except ImportError:
        pass

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'in_segm']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), name='outputnode')

    if use_bet or not has_dipy:
        # Alternative for when dipy is not installed
        bet = pe.Node(fsl.BET(surfaces=True), name='fsl_bet')
        workflow.connect([
            (inputnode, bet, [('in_file', 'in_file')]),
            (bet, outputnode, [('outskin_mask_file', 'out_file')])
        ])

    else:
        from niworkflows.nipype.interfaces.dipy import Denoise
        enhance = pe.Node(niu.Function(
            input_names=['in_file'], output_names=['out_file'], function=_enhance), name='Enhance')
        estsnr = pe.Node(niu.Function(
            input_names=['in_file', 'seg_file'], output_names=['out_snr'],
            function=_estimate_snr), name='EstimateSNR')
        denoise = pe.Node(Denoise(), name='Denoise')
        gradient = pe.Node(niu.Function(
            input_names=['in_file', 'snr'], output_names=['out_file'], function=image_gradient), name='Grad')
        thresh = pe.Node(niu.Function(
            input_names=['in_file', 'in_segm'], output_names=['out_file'], function=gradient_threshold),
                         name='GradientThreshold')

        workflow.connect([
            (inputnode, estsnr, [('in_file', 'in_file'),
                                 ('in_segm', 'seg_file')]),
            (estsnr, denoise, [('out_snr', 'snr')]),
            (inputnode, enhance, [('in_file', 'in_file')]),
            (enhance, denoise, [('out_file', 'in_file')]),
            (estsnr, gradient, [('out_snr', 'snr')]),
            (denoise, gradient, [('out_file', 'in_file')]),
            (inputnode, thresh, [('in_segm', 'in_segm')]),
            (gradient, thresh, [('out_file', 'in_file')]),
            (thresh, outputnode, [('out_file', 'out_file')])
        ])

    return workflow
Пример #2
0
def init_fsl_bbr_wf(bold2t1w_dof, report, name='fsl_bbr_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fs_2_t1_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_matrix_file', 'out_reg_file', 'out_report', 'final_cost']),
                         name='outputnode')

    wm_mask = pe.Node(niu.Function(function=extract_wm), name='wm_mask')
    _FLIRT = FLIRTRPT if report else fsl.FLIRT
    flt_bbr_init = pe.Node(fsl.FLIRT(dof=6), name='flt_bbr_init')
    flt_bbr = pe.Node(_FLIRT(cost_func='bbr', dof=bold2t1w_dof, save_log=True),
                      name='flt_bbr')
    flt_bbr.inputs.schedule = op.join(os.getenv('FSLDIR'),
                                      'etc/flirtsch/bbr.sch')

    def get_final_cost(in_file):
        from niworkflows.nipype import logging
        with open(in_file, 'r') as fobj:
            for line in fobj:
                if line.startswith('>> print U:1'):
                    costs = next(fobj).split()
                    return float(costs[0])
        logger = logging.getLogger('interface')
        logger.error('No cost report found in log file. Please report this '
                     'issue, with contents of {}'.format(in_file))

    get_cost = pe.Node(niu.Function(function=get_final_cost),
                       name='get_cost',
                       run_without_submitting=True)

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_seg')]),
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1_brain', 'reference')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (inputnode, flt_bbr, [('in_file', 'in_file'),
                              ('t1_brain', 'reference')]),
        (wm_mask, flt_bbr, [('out', 'wm_seg')]),
        (flt_bbr, outputnode, [('out_matrix_file', 'out_matrix_file')]),
        (flt_bbr, get_cost, [('out_log', 'in_file')]),
        (get_cost, outputnode, [('out', 'final_cost')]),
    ])

    if report:
        flt_bbr.inputs.generate_report = True
        workflow.connect([(flt_bbr, outputnode, [('out_report', 'out_report')])
                          ])

    return workflow
Пример #3
0
def airmsk_wf(name='AirMaskWorkflow'):
    """
    Implements the Step 1 of [Mortamet2009]_.

    .. workflow::

        from mriqc.workflows.anatomical import airmsk_wf
        wf = airmsk_wf()

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_mask', 'head_mask', 'inverse_composite_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'artifact_msk', 'rot_mask']),
        name='outputnode')

    rotmsk = pe.Node(RotationMask(), name='RotationMask')

    invt = pe.Node(ants.ApplyTransforms(dimension=3,
                                        default_value=0,
                                        interpolation='Linear',
                                        float=True),
                   name='invert_xfm')
    invt.inputs.input_image = op.join(get_mni_icbm152_nlin_asym_09c(),
                                      '1mm_headmask.nii.gz')

    binarize = pe.Node(niu.Function(function=_binarize), name='Binarize')

    qi1 = pe.Node(ArtifactMask(), name='ArtifactMask')

    workflow.connect([(inputnode, rotmsk, [('in_file', 'in_file')]),
                      (inputnode, qi1, [('in_file', 'in_file'),
                                        ('head_mask', 'head_mask')]),
                      (rotmsk, qi1, [('out_file', 'rot_mask')]),
                      (inputnode, invt, [('in_mask', 'reference_image'),
                                         ('inverse_composite_transform',
                                          'transforms')]),
                      (invt, binarize, [('output_image', 'in_file')]),
                      (binarize, qi1, [('out', 'nasion_post_mask')]),
                      (qi1, outputnode, [('out_air_msk', 'out_file'),
                                         ('out_art_msk', 'artifact_msk')]),
                      (rotmsk, outputnode, [('out_file', 'rot_mask')])])
    return workflow
Пример #4
0
def init_ica_aroma_wf(name='ica_aroma_wf', ignore_aroma_err=False):
    '''
    From: https://github.com/rhr-pruim/ICA-AROMA
    Description:
    ICA-AROMA (i.e. ‘ICA-based Automatic Removal Of Motion Artifacts’) concerns
    a data-driven method to identify and remove motion-related independent
    components from fMRI data.

    Preconditions/Assumptions:
    The input fmri bold file is in standard space
    (for ease of interfacing with the original ICA-AROMA code)

    Steps:
    1) smooth data using SUSAN
    2) run melodic outside of ICA_AROMA to generate the report
    3) run ICA_AROMA
    4) print identified motion components (aggressive) to tsv
    5) pass classified_motion_ICs and melodic_mix for user to complete nonaggr denoising
    '''
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(utility.IdentityInterface(
        fields=['epi_mni', 'movpar_file', 'epi_mask_mni']),
                        name='inputnode')

    outputnode = pe.Node(utility.IdentityInterface(fields=[
        'aroma_confounds', 'out_report', 'aroma_noise_ics', 'melodic_mix'
    ]),
                         name='outputnode')

    # helper function to get
    # smoothing node (SUSAN)
    # functions to help set SUSAN
    def getbtthresh(medianval):
        return 0.75 * medianval

    def getusans_func(image, thresh):
        return [tuple([image, thresh])]

    calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'),
                              name='calc_median_val')

    calc_epi_mean = pe.Node(fsl.MeanImage(), name='calc_epi_mean')

    brightness_threshold = pe.Node(utility.Function(function=getbtthresh,
                                                    input_names=['medianval'],
                                                    output_names=['thresh']),
                                   name='brightness_threshold')

    getusans = pe.Node(utility.Function(function=getusans_func,
                                        input_names=['image', 'thresh'],
                                        output_names=['usans']),
                       name='getusans')

    smooth = pe.Node(fsl.SUSAN(fwhm=6.0), name='smooth')

    # melodic node
    melodic = pe.Node(nws.MELODICRPT(no_bet=True,
                                     no_mm=True,
                                     generate_report=True),
                      name="melodic")

    # ica_aroma node
    ica_aroma = pe.Node(aroma.ICA_AROMA(denoise_type='no'), name='ica_aroma')

    # extract the confound ICs from the results
    ica_aroma_confound_extraction = pe.Node(
        utility.Function(
            function=get_ica_confounds,
            input_names=['ica_out_dir', 'ignore_aroma_err'],
            output_names=['aroma_confounds', 'aroma_noise_ics',
                          'melodic_mix']),
        name='ica_aroma_confound_extraction')
    ica_aroma_confound_extraction.inputs.ignore_aroma_err = ignore_aroma_err

    # connect the nodes
    workflow.connect([
        # Connect input nodes to complete smoothing
        (inputnode, calc_median_val, [('epi_mni', 'in_file'),
                                      ('epi_mask_mni', 'mask_file')]),
        (calc_median_val, brightness_threshold, [('out_stat', 'medianval')]),
        (inputnode, calc_epi_mean, [('epi_mni', 'in_file')]),
        (calc_epi_mean, getusans, [('out_file', 'image')]),
        (calc_median_val, getusans, [('out_stat', 'thresh')]),
        (inputnode, smooth, [('epi_mni', 'in_file')]),
        (getusans, smooth, [('usans', 'usans')]),
        (brightness_threshold, smooth, [('thresh', 'brightness_threshold')]),
        # connect smooth to melodic
        (smooth, melodic, [('smoothed_file', 'in_files')]),
        (inputnode, melodic, [('epi_mask_mni', 'report_mask'),
                              ('epi_mask_mni', 'mask')]),
        # connect nodes to ICA-AROMA
        (smooth, ica_aroma, [('smoothed_file', 'in_file')]),
        (inputnode, ica_aroma, [('movpar_file', 'motion_parameters')]),
        (melodic, ica_aroma, [('out_dir', 'melodic_dir')]),
        # geneerate tsvs from ICA_AROMA
        (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'ica_out_dir')]
         ),
        # output for processing and reporting
        (ica_aroma_confound_extraction,
         outputnode, [('aroma_confounds', 'aroma_confounds'),
                      ('aroma_noise_ics', 'aroma_noise_ics'),
                      ('melodic_mix', 'melodic_mix')]),
        # TODO change melodic report to reflect noise and non-noise components
        (melodic, outputnode, [('out_report', 'out_report')]),
    ])

    return workflow
Пример #5
0
def init_anat_derivatives_wf(output_dir,
                             output_spaces,
                             template,
                             freesurfer,
                             name='anat_derivatives_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 't1_mask', 't1_seg', 't1_tpms',
        't1_2_mni_forward_transform', 't1_2_mni', 'mni_mask', 'mni_seg',
        'mni_tpms', 'surfaces'
    ]),
                        name='inputnode')

    ds_t1_preproc = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                suffix='preproc'),
                            name='ds_t1_preproc',
                            run_without_submitting=True)

    ds_t1_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix='brainmask'),
                         name='ds_t1_mask',
                         run_without_submitting=True)

    ds_t1_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix='dtissue'),
                        name='ds_t1_seg',
                        run_without_submitting=True)

    ds_t1_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir, suffix='class-{extra_value}_probtissue'),
                         name='ds_t1_tpms',
                         run_without_submitting=True)
    ds_t1_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    suffix_fmt = 'space-{}_{}'.format
    ds_t1_mni = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix=suffix_fmt(
                                                template, 'preproc')),
                        name='ds_t1_mni',
                        run_without_submitting=True)

    ds_mni_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              suffix=suffix_fmt(
                                                  template, 'brainmask')),
                          name='ds_mni_mask',
                          run_without_submitting=True)

    ds_mni_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix=suffix_fmt(
                                                 template, 'dtissue')),
                         name='ds_mni_seg',
                         run_without_submitting=True)

    ds_mni_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir,
        suffix=suffix_fmt(template, 'class-{extra_value}_probtissue')),
                          name='ds_mni_tpms',
                          run_without_submitting=True)
    ds_mni_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    ds_t1_mni_warp = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 suffix=suffix_fmt(
                                                     template, 'warp')),
                             name='ds_t1_mni_warp',
                             run_without_submitting=True)

    def get_gifti_name(in_file):
        import os
        import re
        in_format = re.compile(r'(?P<LR>[lr])h.(?P<surf>.+)_converted.gii')
        name = os.path.basename(in_file)
        info = in_format.match(name).groupdict()
        info['LR'] = info['LR'].upper()
        return '{surf}.{LR}.surf'.format(**info)

    name_surfs = pe.MapNode(niu.Function(function=get_gifti_name),
                            iterfield='in_file',
                            name='name_surfs')

    ds_surfs = pe.MapNode(DerivativesDataSink(base_directory=output_dir),
                          iterfield=['in_file', 'suffix'],
                          name='ds_surfs',
                          run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_t1_preproc, [('source_file', 'source_file'),
                                    ('t1_preproc', 'in_file')]),
        (inputnode, ds_t1_mask, [('source_file', 'source_file'),
                                 ('t1_mask', 'in_file')]),
        (inputnode, ds_t1_seg, [('source_file', 'source_file'),
                                ('t1_seg', 'in_file')]),
        (inputnode, ds_t1_tpms, [('source_file', 'source_file'),
                                 ('t1_tpms', 'in_file')]),
    ])

    if freesurfer:
        workflow.connect([
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_surfs, [('source_file', 'source_file'),
                                   ('surfaces', 'in_file')]),
            (name_surfs, ds_surfs, [('out', 'suffix')]),
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('source_file', 'source_file'),
                                         ('t1_2_mni_forward_transform',
                                          'in_file')]),
            (inputnode, ds_t1_mni, [('source_file', 'source_file'),
                                    ('t1_2_mni', 'in_file')]),
            (inputnode, ds_mni_mask, [('source_file', 'source_file'),
                                      ('mni_mask', 'in_file')]),
            (inputnode, ds_mni_seg, [('source_file', 'source_file'),
                                     ('mni_seg', 'in_file')]),
            (inputnode, ds_mni_tpms, [('source_file', 'source_file'),
                                      ('mni_tpms', 'in_file')]),
        ])

    return workflow
Пример #6
0
def init_surface_recon_wf(omp_nthreads, hires, name='surface_recon_wf'):

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        't1w', 't2w', 'skullstripped_t1', 'subjects_dir', 'subject_id'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'subjects_dir', 'subject_id', 'fs_2_t1_transform', 'surfaces',
        'out_report'
    ]),
                         name='outputnode')

    def detect_inputs(t1w_list, t2w_list=[], hires_enabled=True):
        from niworkflows.nipype.interfaces.base import isdefined
        from niworkflows.nipype.utils.filemanip import filename_to_list
        from niworkflows.nipype.interfaces.traits_extension import Undefined
        import nibabel as nib
        t1w_list = filename_to_list(t1w_list)
        t2w_list = filename_to_list(t2w_list) if isdefined(t2w_list) else []
        t1w_ref = nib.load(t1w_list[0])
        # Use high resolution preprocessing if voxel size < 1.0mm
        # Tolerance of 0.05mm requires that rounds down to 0.9mm or lower
        hires = hires_enabled and max(t1w_ref.header.get_zooms()) < 1 - 0.05

        t2w = Undefined
        if t2w_list and max(nib.load(t2w_list[0]).header.get_zooms()) < 1.2:
            t2w = t2w_list[0]

        # https://surfer.nmr.mgh.harvard.edu/fswiki/SubmillimeterRecon
        mris_inflate = '-n 50' if hires else Undefined
        return (t2w, isdefined(t2w), hires, mris_inflate)

    recon_config = pe.Node(niu.Function(
        function=detect_inputs,
        output_names=['t2w', 'use_T2', 'hires', 'mris_inflate']),
                           name='recon_config')
    recon_config.inputs.hires_enabled = hires

    autorecon1 = pe.Node(fs.ReconAll(directive='autorecon1',
                                     flags='-noskullstrip',
                                     openmp=omp_nthreads),
                         name='autorecon1')
    autorecon1.interface._can_resume = False
    autorecon1.interface.num_threads = omp_nthreads

    def inject_skullstripped(subjects_dir, subject_id, skullstripped):
        import os
        import nibabel as nib
        from nilearn.image import resample_to_img, new_img_like
        from niworkflows.nipype.utils.filemanip import copyfile
        mridir = os.path.join(subjects_dir, subject_id, 'mri')
        t1 = os.path.join(mridir, 'T1.mgz')
        bm_auto = os.path.join(mridir, 'brainmask.auto.mgz')
        bm = os.path.join(mridir, 'brainmask.mgz')

        if not os.path.exists(bm_auto):
            img = nib.load(t1)
            mask = nib.load(skullstripped)
            bmask = new_img_like(mask, mask.get_data() > 0)
            resampled_mask = resample_to_img(bmask, img, 'nearest')
            masked_image = new_img_like(
                img,
                img.get_data() * resampled_mask.get_data())
            masked_image.to_filename(bm_auto)

        if not os.path.exists(bm):
            copyfile(bm_auto, bm, copy=True, use_hardlink=True)

        return subjects_dir, subject_id

    skull_strip_extern = pe.Node(niu.Function(
        function=inject_skullstripped,
        output_names=['subjects_dir', 'subject_id']),
                                 name='skull_strip_extern')

    fs_transform = pe.Node(fs.Tkregister2(fsl_out='freesurfer2subT1.mat',
                                          reg_header=True),
                           name='fs_transform')

    autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads)
    gifti_surface_wf = init_gifti_surface_wf()

    workflow.connect([
        # Configuration
        (inputnode, recon_config, [('t1w', 't1w_list'), ('t2w', 't2w_list')]),
        # Passing subjects_dir / subject_id enforces serial order
        (inputnode, autorecon1, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id')]),
        (autorecon1, skull_strip_extern, [('subjects_dir', 'subjects_dir'),
                                          ('subject_id', 'subject_id')]),
        (skull_strip_extern, autorecon_resume_wf,
         [('subjects_dir', 'inputnode.subjects_dir'),
          ('subject_id', 'inputnode.subject_id')]),
        (autorecon_resume_wf, gifti_surface_wf,
         [('outputnode.subjects_dir', 'inputnode.subjects_dir'),
          ('outputnode.subject_id', 'inputnode.subject_id')]),
        # Reconstruction phases
        (inputnode, autorecon1, [('t1w', 'T1_files')]),
        (
            recon_config,
            autorecon1,
            [
                ('t2w', 'T2_file'),
                ('hires', 'hires'),
                # First run only (recon-all saves expert options)
                ('mris_inflate', 'mris_inflate')
            ]),
        (inputnode, skull_strip_extern, [('skullstripped_t1', 'skullstripped')
                                         ]),
        (recon_config, autorecon_resume_wf, [('use_T2', 'inputnode.use_T2')]),
        # Construct transform from FreeSurfer conformed image to FMRIPREP
        # reoriented image
        (inputnode, fs_transform, [('t1w', 'target_image')]),
        (autorecon1, fs_transform, [('T1', 'moving_image')]),
        # Output
        (autorecon_resume_wf, outputnode,
         [('outputnode.subjects_dir', 'subjects_dir'),
          ('outputnode.subject_id', 'subject_id'),
          ('outputnode.out_report', 'out_report')]),
        (gifti_surface_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        (fs_transform, outputnode, [('fsl_file', 'fs_2_t1_transform')]),
    ])

    return workflow
Пример #7
0
def init_sdc_unwarp_wf(reportlets_dir,
                       omp_nthreads,
                       fmap_bspline,
                       fmap_demean,
                       debug,
                       name='sdc_unwarp_wf'):
    """
    This workflow takes in a displacements fieldmap and calculates the corresponding
    displacements field (in other words, an ANTs-compatible warp file).

    It also calculates a new mask for the input dataset that takes into account the distortions.
    The mask is restricted to the field of view of the fieldmap since outside of it corrections
    could not be performed.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.unwarp import init_sdc_unwarp_wf
        wf = init_sdc_unwarp_wf(reportlets_dir='.', omp_nthreads=8,
                                fmap_bspline=False, fmap_demean=True,
                                debug=False)


    Inputs

        in_reference
            the reference image
        in_mask
            a brain mask corresponding to ``in_reference``
        name_source
            path to the original _bold file being unwarped
        fmap
            the fieldmap in Hz
        fmap_ref
            the reference (anatomical) image corresponding to ``fmap``
        fmap_mask
            a brain mask corresponding to ``fmap``


    Outputs

        out_reference
            the ``in_reference`` after unwarping
        out_reference_brain
            the ``in_reference`` after unwarping and skullstripping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_jacobian
            the jacobian of the field (for drop-out alleviation)
        out_mask
            mask of the unwarped input file
        out_mask_report
            reportled for the skullstripping

    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_reference', 'in_reference_brain', 'in_mask', 'name_source',
        'fmap_ref', 'fmap_mask', 'fmap'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_reference', 'out_reference_brain', 'out_warp', 'out_mask',
        'out_jacobian', 'out_mask_report'
    ]),
                         name='outputnode')

    meta = pe.Node(ReadSidecarJSON(), name='meta')

    # Register the reference of the fieldmap to the reference
    # of the target image (the one that shall be corrected)
    ants_settings = pkgr.resource_filename('fmriprep',
                                           'data/fmap-any_registration.json')
    if debug:
        ants_settings = pkgr.resource_filename(
            'fmriprep', 'data/fmap-any_registration_testing.json')
    fmap2ref_reg = pe.Node(ANTSRegistrationRPT(
        generate_report=True,
        from_file=ants_settings,
        output_inverse_warped_image=True,
        output_warped_image=True,
        num_threads=omp_nthreads),
                           name='fmap2ref_reg')
    fmap2ref_reg.interface.num_threads = omp_nthreads

    ds_reg = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                         suffix='fmap_reg'),
                     name='ds_reg')

    # Map the VSM into the EPI space
    fmap2ref_apply = pe.Node(ANTSApplyTransformsRPT(generate_report=True,
                                                    dimension=3,
                                                    interpolation='BSpline',
                                                    float=True),
                             name='fmap2ref_apply')

    fmap_mask2ref_apply = pe.Node(ANTSApplyTransformsRPT(
        generate_report=False,
        dimension=3,
        interpolation='NearestNeighbor',
        float=True),
                                  name='fmap_mask2ref_apply')

    ds_reg_vsm = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                             suffix='fmap_reg_vsm'),
                         name='ds_reg_vsm')

    # Fieldmap to rads and then to voxels (VSM - voxel shift map)
    torads = pe.Node(niu.Function(function=_hz2rads), name='torads')

    gen_vsm = pe.Node(fsl.FUGUE(save_unmasked_shift=True), name='gen_vsm')
    # Convert the VSM into a DFM (displacements field map)
    # or: FUGUE shift to ANTS warping.
    vsm2dfm = pe.Node(itk.FUGUEvsm2ANTSwarp(), name='vsm2dfm')
    jac_dfm = pe.Node(ants.CreateJacobianDeterminantImage(
        imageDimension=3, outputImage='jacobian.nii.gz'),
                      name='jac_dfm')

    unwarp_reference = pe.Node(ANTSApplyTransformsRPT(
        dimension=3,
        generate_report=False,
        float=True,
        interpolation='LanczosWindowedSinc'),
                               name='unwarp_reference')

    fieldmap_fov_mask = pe.Node(niu.Function(function=_fill_with_ones),
                                name='fieldmap_fov_mask')

    fmap_fov2ref_apply = pe.Node(ANTSApplyTransformsRPT(
        generate_report=False,
        dimension=3,
        interpolation='NearestNeighbor',
        float=True),
                                 name='fmap_fov2ref_apply')

    apply_fov_mask = pe.Node(fsl.ApplyMask(), name="apply_fov_mask")

    enhance_and_skullstrip_epi_wf = init_enhance_and_skullstrip_epi_wf()

    workflow.connect([
        (inputnode, meta, [('name_source', 'in_file')]),
        (inputnode, fmap2ref_reg, [('fmap_ref', 'moving_image')]),
        (inputnode, fmap2ref_apply, [('in_reference', 'reference_image')]),
        (fmap2ref_reg, fmap2ref_apply, [('composite_transform', 'transforms')
                                        ]),
        (inputnode, fmap_mask2ref_apply, [('in_reference', 'reference_image')
                                          ]),
        (fmap2ref_reg, fmap_mask2ref_apply, [('composite_transform',
                                              'transforms')]),
        (inputnode, ds_reg_vsm, [('name_source', 'source_file')]),
        (fmap2ref_apply, ds_reg_vsm, [('out_report', 'in_file')]),
        (inputnode, fmap2ref_reg, [('in_reference_brain', 'fixed_image')]),
        (inputnode, ds_reg, [('name_source', 'source_file')]),
        (fmap2ref_reg, ds_reg, [('out_report', 'in_file')]),
        (inputnode, fmap2ref_apply, [('fmap', 'input_image')]),
        (inputnode, fmap_mask2ref_apply, [('fmap_mask', 'input_image')]),
        (fmap2ref_apply, torads, [('output_image', 'in_file')]),
        (meta, gen_vsm, [(('out_dict', _get_ec), 'dwell_time'),
                         (('out_dict', _get_pedir_fugue), 'unwarp_direction')
                         ]),
        (meta, vsm2dfm, [(('out_dict', _get_pedir_bids), 'pe_dir')]),
        (torads, gen_vsm, [('out', 'fmap_in_file')]),
        (vsm2dfm, unwarp_reference, [('out_file', 'transforms')]),
        (inputnode, unwarp_reference, [('in_reference', 'reference_image')]),
        (inputnode, unwarp_reference, [('in_reference', 'input_image')]),
        (vsm2dfm, outputnode, [('out_file', 'out_warp')]),
        (vsm2dfm, jac_dfm, [('out_file', 'deformationField')]),
        (inputnode, fieldmap_fov_mask, [('fmap_ref', 'in_file')]),
        (fieldmap_fov_mask, fmap_fov2ref_apply, [('out', 'input_image')]),
        (inputnode, fmap_fov2ref_apply, [('in_reference', 'reference_image')]),
        (fmap2ref_reg, fmap_fov2ref_apply, [('composite_transform',
                                             'transforms')]),
        (fmap_fov2ref_apply, apply_fov_mask, [('output_image', 'mask_file')]),
        (unwarp_reference, apply_fov_mask, [('output_image', 'in_file')]),
        (apply_fov_mask, enhance_and_skullstrip_epi_wf,
         [('out_file', 'inputnode.in_file')]),
        (apply_fov_mask, outputnode, [('out_file', 'out_reference')]),
        (enhance_and_skullstrip_epi_wf, outputnode,
         [('outputnode.mask_file', 'out_mask'),
          ('outputnode.out_report', 'out_mask_report'),
          ('outputnode.skull_stripped_file', 'out_reference_brain')]),
        (jac_dfm, outputnode, [('jacobian_image', 'out_jacobian')]),
    ])

    if not fmap_bspline:
        workflow.connect([(fmap_mask2ref_apply, gen_vsm, [('output_image',
                                                           'mask_file')])])

    if fmap_demean:
        # Demean within mask
        demean = pe.Node(niu.Function(function=_demean), name='demean')

        workflow.connect([
            (gen_vsm, demean, [('shift_out_file', 'in_file')]),
            (fmap_mask2ref_apply, demean, [('output_image', 'in_mask')]),
            (demean, vsm2dfm, [('out', 'in_file')]),
        ])

    else:
        workflow.connect([
            (gen_vsm, vsm2dfm, [('shift_out_file', 'in_file')]),
        ])

    return workflow
Пример #8
0
def init_bold_stc_wf(metadata, name='bold_stc_wf'):
    """
    This workflow performs :abbr:`STC (slice-timing correction)` over the input
    :abbr:`BOLD (blood-oxygen-level dependent)` image.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_stc_wf
        wf = init_bold_stc_wf(
            metadata={"RepetitionTime": 2.0,
                      "SliceTiming": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]},
            )

    **Parameters**

        metadata : dict
            BIDS metadata for BOLD file
        name : str
            Name of workflow (default: ``bold_stc_wf``)

    **Inputs**

        bold_file
            BOLD series NIfTI file
        skip_vols
            Number of non-steady-state volumes detected at beginning of ``bold_file``

    **Outputs**

        stc_file
            Slice-timing corrected BOLD series NIfTI file

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['bold_file', 'skip_vols']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['stc_file']),
                         name='outputnode')

    LOGGER.log(25, 'Slice-timing correction will be included.')

    def create_custom_slice_timing_file_func(metadata):
        import os
        slice_timings_sec = ["%f" % t for t in metadata["SliceTiming"]]
        out_file = os.path.abspath("timings.1D")
        with open(out_file, "w") as fp:
            fp.write("\t".join(slice_timings_sec))
        return out_file

    create_custom_slice_timing_file = pe.Node(
        niu.Function(function=create_custom_slice_timing_file_func),
        name="create_custom_slice_timing_file",
        mem_gb=DEFAULT_MEMORY_MIN_GB)
    create_custom_slice_timing_file.inputs.metadata = metadata

    # It would be good to fingerprint memory use of afni.TShift
    slice_timing_correction = pe.Node(afni.TShift(
        outputtype='NIFTI_GZ', tr='{}s'.format(metadata["RepetitionTime"])),
                                      name='slice_timing_correction')

    copy_xform = pe.Node(CopyXForm(), name='copy_xform', mem_gb=0.1)

    def _prefix_at(x):
        return "@%s" % x

    workflow.connect([
        (inputnode, slice_timing_correction, [('bold_file', 'in_file'),
                                              ('skip_vols', 'ignore')]),
        (create_custom_slice_timing_file, slice_timing_correction,
         [(('out', _prefix_at), 'tpattern')]),
        (slice_timing_correction, copy_xform, [('out_file', 'in_file')]),
        (inputnode, copy_xform, [('bold_file', 'hdr_file')]),
        (copy_xform, outputnode, [('out_file', 'stc_file')]),
    ])

    return workflow
Пример #9
0
def init_fmap_unwarp_report_wf(reportlets_dir, name='fmap_unwarp_report_wf'):
    """
    This workflow generates and saves a reportlet showing the effect of fieldmap
    unwarping a BOLD image.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.base import init_fmap_unwarp_report_wf
        wf = init_fmap_unwarp_report_wf(reportlets_dir='.')

    **Parameters**

        reportlets_dir : str
            Directory in which to save reportlets
        name : str, optional
            Workflow name (default: fmap_unwarp_report_wf)

    **Inputs**

        in_pre
            Reference image, before unwarping
        in_post
            Reference image, after unwarping
        in_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        in_xfm
            Affine transform from T1 space to BOLD space (ITK format)

    """

    from niworkflows.nipype.pipeline import engine as pe
    from niworkflows.nipype.interfaces import utility as niu
    from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms

    from niworkflows.interfaces import SimpleBeforeAfter
    from ...interfaces.images import extract_wm
    from ...interfaces import DerivativesDataSink

    DEFAULT_MEMORY_MIN_GB = 0.01

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_pre', 'in_post', 'in_seg', 'in_xfm', 'name_source']),
                        name='inputnode')

    map_seg = pe.Node(ApplyTransforms(dimension=3,
                                      float=True,
                                      interpolation='NearestNeighbor'),
                      name='map_seg',
                      mem_gb=0.3)

    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    bold_rpt_ds = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                              suffix='variant-hmcsdc_preproc'),
                          name='bold_rpt_ds',
                          mem_gb=DEFAULT_MEMORY_MIN_GB,
                          run_without_submitting=True)
    workflow.connect([
        (inputnode, bold_rpt, [('in_post', 'after'), ('in_pre', 'before')]),
        (inputnode, bold_rpt_ds, [('name_source', 'source_file')]),
        (bold_rpt, bold_rpt_ds, [('out_report', 'in_file')]),
        (inputnode, map_seg, [('in_post', 'reference_image'),
                              ('in_seg', 'input_image'),
                              ('in_xfm', 'transforms')]),
        (map_seg, sel_wm, [('output_image', 'in_seg')]),
        (sel_wm, bold_rpt, [('out', 'wm_seg')]),
    ])

    return workflow
Пример #10
0
def init_bbreg_wf(bold2t1w_dof, report, reregister=True, name='bbreg_wf'):
    """
    This workflow uses FreeSurfer's ``bbregister`` to register a BOLD image to
    a T1-weighted structural image.

    It is a counterpart to :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`,
    which performs the same task using FSL's FLIRT with a BBR cost function.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.util import init_bbreg_wf
        wf = init_bbreg_wf(bold2t1w_dof=9, report=False)


    Parameters

        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        report : bool
            Generate visual report of registration quality
        rereigster : bool, optional
            Update affine registration matrix with FreeSurfer-T1w transform
            (default: True)
        name : str, optional
            Workflow name (default: bbreg_wf)


    Inputs

        in_file
            Reference BOLD image to be registered
        fs_2_t1_transform
            FSL-style affine matrix translating from FreeSurfer T1.mgz to T1w
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID (must have folder in SUBJECTS_DIR)
        t1_brain
            Unused (see :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`)
        t1_seg
            Unused (see :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`)


    Outputs

        out_matrix_file
            FSL-style registration matrix
        out_reg_file
            FreeSurfer-style registration matrix (.dat)
        final_cost
            Value of cost function at final registration
        out_report
            reportlet for assessing registration quality

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fs_2_t1_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_matrix_file', 'out_reg_file', 'out_report', 'final_cost']),
                         name='outputnode')

    _BBRegister = BBRegisterRPT if report else fs.BBRegister
    bbregister = pe.Node(_BBRegister(dof=bold2t1w_dof,
                                     contrast_type='t2',
                                     init='coreg',
                                     registered_file=True,
                                     out_fsl_file=True),
                         name='bbregister')

    def apply_fs_transform(fs_2_t1_transform, bbreg_transform):
        import os
        import numpy as np
        out_file = os.path.abspath('transform.mat')
        fs_xfm = np.loadtxt(fs_2_t1_transform)
        bbrxfm = np.loadtxt(bbreg_transform)
        out_xfm = fs_xfm.dot(bbrxfm)
        assert np.allclose(out_xfm[3], [0, 0, 0, 1])
        out_xfm[3] = [0, 0, 0, 1]
        np.savetxt(out_file, out_xfm, fmt=str('%.12g'))
        return out_file

    transformer = pe.Node(niu.Function(function=apply_fs_transform),
                          name='transformer')

    def get_final_cost(in_file):
        import numpy as np
        return np.loadtxt(in_file, usecols=[0])

    get_cost = pe.Node(niu.Function(function=get_final_cost), name='get_cost')

    workflow.connect([
        (inputnode, bbregister, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id'),
                                 ('in_file', 'source_file')]),
        (bbregister, get_cost, [('min_cost_file', 'in_file')]),
        (bbregister, outputnode, [('out_reg_file', 'out_reg_file')]),
        (get_cost, outputnode, [('out', 'final_cost')]),
    ])

    if reregister:
        workflow.connect([
            (inputnode, transformer, [('fs_2_t1_transform',
                                       'fs_2_t1_transform')]),
            (bbregister, transformer, [('out_fsl_file', 'bbreg_transform')]),
            (transformer, outputnode, [('out', 'out_matrix_file')]),
        ])
    else:
        workflow.connect([
            (bbregister, outputnode, [('out_fsl_file', 'out_matrix_file')]),
        ])

    if report:
        bbregister.inputs.generate_report = True
        workflow.connect([(bbregister, outputnode, [('out_report',
                                                     'out_report')])])

    return workflow
Пример #11
0
def init_nonlinear_sdc_wf(bold_file,
                          freesurfer,
                          bold2t1w_dof,
                          template,
                          omp_nthreads,
                          bold_pe='j',
                          atlas_threshold=3,
                          name='nonlinear_sdc_wf'):
    """
    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.syn import init_nonlinear_sdc_wf
        wf = init_nonlinear_sdc_wf(
            bold_file='/dataset/sub-01/func/sub-01_task-rest_bold.nii.gz',
            bold_pe='j',
            freesurfer=True,
            bold2t1w_dof=9,
            template='MNI152NLin2009cAsym',
            omp_nthreads=8)

    **Inputs**

        t1_brain
            skull-stripped, bias-corrected structural image
        bold_ref
            skull-stripped reference image
        t1_seg
            FAST segmentation white and gray matter, in native T1w space
        t1_2_mni_reverse_transform
            inverse registration transform of T1w image to MNI template

    **Outputs**

        out_reference_brain
            the ``bold_ref`` image after unwarping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_mask
            mask of the unwarped input file
        out_mask_report
            reportlet for the skullstripping

    .. [Huntenburg2014] Huntenburg, J. M. (2014) Evaluating Nonlinear
                        Coregistration of BOLD EPI and T1w Images. Berlin: Master
                        Thesis, Freie Universität. `PDF
                        <http://pubman.mpdl.mpg.de/pubman/item/escidoc:2327525:5/component/escidoc:2327523/master_thesis_huntenburg_4686947.pdf>`_.
    .. [Treiber2016] Treiber, J. M. et al. (2016) Characterization and Correction
                     of Geometric Distortions in 814 Diffusion Weighted Images,
                     PLoS ONE 11(3): e0152472. doi:`10.1371/journal.pone.0152472
                     <https://doi.org/10.1371/journal.pone.0152472>`_.
    .. [Wang2017] Wang S, et al. (2017) Evaluation of Field Map and Nonlinear
                  Registration Methods for Correction of Susceptibility Artifacts
                  in Diffusion MRI. Front. Neuroinform. 11:17.
                  doi:`10.3389/fninf.2017.00017
                  <https://doi.org/10.3389/fninf.2017.00017>`_.
    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(
        ['t1_brain', 'bold_ref', 't1_2_mni_reverse_transform', 't1_seg']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface([
        'out_reference_brain', 'out_mask', 'out_warp', 'out_warp_report',
        'out_mask_report'
    ]),
                         name='outputnode')

    if bold_pe is None or bold_pe[0] not in ['i', 'j']:
        LOGGER.warning(
            'Incorrect phase-encoding direction, assuming PA (posterior-to-anterior'
        )
        bold_pe = 'j'

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = pkgr.resource_filename('fmriprep', 'data/fmap_atlas.nii.gz')
    atlas_2_template_affine = pkgr.resource_filename(
        'fmriprep', 'data/fmap_atlas_2_{}_affine.mat'.format(template))
    # Registration specifications
    affine_transform = pkgr.resource_filename('fmriprep', 'data/affine.json')
    syn_transform = pkgr.resource_filename('fmriprep',
                                           'data/susceptibility_syn.json')

    invert_t1w = pe.Node(InvertT1w(), name='invert_t1w', mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1',
                       n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref',
                       n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3),
                             name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
    transform_list.inputs.in3 = atlas_2_template_affine

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref',
        n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(fsl.maths.MathsCommand(
        args='-thr {:.8g} -bin'.format(atlas_threshold),
        output_datatype='char'),
                              name='threshold_atlas',
                              mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2),
                                name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(bold_pe[0] == 'i'), int(bold_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(Registration(from_file=syn_transform,
                               restrict_deformation=restrict),
                  name='syn',
                  n_procs=omp_nthreads)

    seg_2_ref = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                        float=True,
                                        invert_transform_flags=[True]),
                        name='seg_2_ref',
                        n_procs=omp_nthreads,
                        mem_gb=0.3)
    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)
    syn_rpt = pe.Node(SimpleBeforeAfter(), name='syn_rpt', mem_gb=0.1)

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1_brain', 'in_file'),
                                 ('bold_ref', 'ref_file')]),
        (inputnode, ref_2_t1, [('bold_ref', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('bold_ref', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [('t1_2_mni_reverse_transform', 'in2')]),
        (inputnode, atlas_2_ref, [('bold_ref', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('bold_ref', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (inputnode, seg_2_ref, [('t1_seg', 'input_image')]),
        (ref_2_t1, seg_2_ref, [('forward_transforms', 'transforms')]),
        (syn, seg_2_ref, [('warped_image', 'reference_image')]),
        (seg_2_ref, sel_wm, [('output_image', 'in_seg')]),
        (inputnode, syn_rpt, [('bold_ref', 'before')]),
        (syn, syn_rpt, [('warped_image', 'after')]),
        (sel_wm, syn_rpt, [('out', 'wm_seg')]),
        (syn, skullstrip_bold_wf, [('warped_image', 'inputnode.in_file')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (skullstrip_bold_wf, outputnode,
         [('outputnode.skull_stripped_file', 'out_reference_brain'),
          ('outputnode.mask_file', 'out_mask'),
          ('outputnode.out_report', 'out_mask_report')]),
        (syn_rpt, outputnode, [('out_report', 'out_warp_report')])
    ])

    return workflow
Пример #12
0
def init_fmap_wf(reportlets_dir, omp_nthreads, fmap_bspline, name='fmap_wf'):
    """
    Fieldmap workflow - when we have a sequence that directly measures the fieldmap
    we just need to mask it (using the corresponding magnitude image) to remove the
    noise in the surrounding air region, and ensure that units are Hz.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.fmap import init_fmap_wf
        wf = init_fmap_wf(reportlets_dir='.', omp_nthreads=6,
                          fmap_bspline=False)

    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['magnitude', 'fieldmap']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
                         name='outputnode')

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')
    # Merge input fieldmap images
    fmapmrg = pe.Node(IntraModalMerge(zero_based_avg=False, hmc=False),
                      name='fmapmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                         name='n4_correct')
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_fmap_mask = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='fmap_mask'), name='ds_fmap_mask')

    workflow.connect([
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (inputnode, fmapmrg, [('fieldmap', 'in_files')]),
        (magmrg, n4_correct, [('out_file', 'input_image')]),
        (n4_correct, bet, [('output_image', 'in_file')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_fmap_mask, [('fieldmap', 'source_file')]),
        (bet, ds_fmap_mask, [('out_report', 'in_file')]),
    ])

    if fmap_bspline:
        # despike_threshold=1.0, mask_erode=1),
        fmapenh = pe.Node(FieldEnhance(
            unwrap=False, despike=False, njobs=omp_nthreads),
            name='fmapenh')
        fmapenh.interface.num_threads = omp_nthreads
        fmapenh.interface.estimated_memory_gb = 4

        workflow.connect([
            (bet, fmapenh, [('mask_file', 'in_mask'),
                            ('out_file', 'in_magnitude')]),
            (fmapmrg, fmapenh, [('out_file', 'in_file')]),
            (fmapenh, outputnode, [('out_file', 'fmap')]),
        ])

    else:
        torads = pe.Node(niu.Function(output_names=['out_file', 'cutoff_hz'],
                                      function=_torads), name='torads')
        prelude = pe.Node(fsl.PRELUDE(), name='prelude')
        tohz = pe.Node(niu.Function(function=_tohz), name='tohz')

        denoise = pe.Node(fsl.SpatialFilter(operation='median', kernel_shape='sphere',
                                            kernel_size=3), name='denoise')
        demean = pe.Node(niu.Function(function=demean_image), name='demean')
        cleanup_wf = cleanup_edge_pipeline(name='cleanup_wf')

        applymsk = pe.Node(ApplyMask(), name='applymsk')

        workflow.connect([
            (bet, prelude, [('mask_file', 'mask_file'),
                            ('out_file', 'magnitude_file')]),
            (fmapmrg, torads, [('out_file', 'in_file')]),
            (torads, tohz, [('cutoff_hz', 'cutoff_hz')]),
            (torads, prelude, [('out_file', 'phase_file')]),
            (prelude, tohz, [('unwrapped_phase_file', 'in_file')]),
            (tohz, denoise, [('out', 'in_file')]),
            (denoise, demean, [('out_file', 'in_file')]),
            (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
            (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
            (cleanup_wf, applymsk, [('outputnode.out_file', 'in_file')]),
            (bet, applymsk, [('mask_file', 'in_mask')]),
            (applymsk, outputnode, [('out_file', 'fmap')]),
        ])

    return workflow
Пример #13
0
def init_bbreg_wf(bold2t1w_dof, report, reregister=True, name='bbreg_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fs_2_t1_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_matrix_file', 'out_reg_file', 'out_report', 'final_cost']),
                         name='outputnode')

    _BBRegister = BBRegisterRPT if report else fs.BBRegister
    bbregister = pe.Node(_BBRegister(dof=bold2t1w_dof,
                                     contrast_type='t2',
                                     init='coreg',
                                     registered_file=True,
                                     out_fsl_file=True),
                         name='bbregister')

    def apply_fs_transform(fs_2_t1_transform, bbreg_transform):
        import os
        import numpy as np
        out_file = os.path.abspath('transform.mat')
        fs_xfm = np.loadtxt(fs_2_t1_transform)
        bbrxfm = np.loadtxt(bbreg_transform)
        out_xfm = fs_xfm.dot(bbrxfm)
        assert np.allclose(out_xfm[3], [0, 0, 0, 1])
        out_xfm[3] = [0, 0, 0, 1]
        np.savetxt(out_file, out_xfm, fmt=str('%.12g'))
        return out_file

    transformer = pe.Node(niu.Function(function=apply_fs_transform),
                          name='transformer')

    def get_final_cost(in_file):
        import numpy as np
        return np.loadtxt(in_file, usecols=[0])

    get_cost = pe.Node(niu.Function(function=get_final_cost),
                       name='get_cost',
                       run_without_submitting=True)

    workflow.connect([
        (inputnode, bbregister, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id'),
                                 ('in_file', 'source_file')]),
        (bbregister, get_cost, [('min_cost_file', 'in_file')]),
        (bbregister, outputnode, [('out_reg_file', 'out_reg_file')]),
        (get_cost, outputnode, [('out', 'final_cost')]),
    ])

    if reregister:
        workflow.connect([
            (inputnode, transformer, [('fs_2_t1_transform',
                                       'fs_2_t1_transform')]),
            (bbregister, transformer, [('out_fsl_file', 'bbreg_transform')]),
            (transformer, outputnode, [('out', 'out_matrix_file')]),
        ])
    else:
        workflow.connect([
            (bbregister, outputnode, [('out_fsl_file', 'out_matrix_file')]),
        ])

    if report:
        bbregister.inputs.generate_report = True
        workflow.connect([(bbregister, outputnode, [('out_report',
                                                     'out_report')])])

    return workflow
Пример #14
0
def init_anat_preproc_wf(skull_strip_template, output_spaces, template, debug,
                         freesurfer, longitudinal, omp_nthreads, hires, reportlets_dir,
                         output_dir, num_t1w,
                         name='anat_preproc_wf'):
    r"""
    This workflow controls the anatomical preprocessing stages of FMRIPREP.

    This includes:

     - Creation of a structural template
     - Skull-stripping and bias correction
     - Tissue segmentation
     - Normalization
     - Surface reconstruction with FreeSurfer

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.anatomical import init_anat_preproc_wf
        wf = init_anat_preproc_wf(omp_nthreads=1,
                                  reportlets_dir='.',
                                  output_dir='.',
                                  template='MNI152NLin2009cAsym',
                                  output_spaces=['T1w', 'fsnative',
                                                 'template', 'fsaverage5'],
                                  skull_strip_template='OASIS',
                                  freesurfer=True,
                                  longitudinal=False,
                                  debug=False,
                                  hires=True,
                                  num_t1w=1)

    **Parameters**

        skull_strip_template : str
            Name of ANTs skull-stripping template ('OASIS' or 'NKI')
        output_spaces : list
            List of output spaces functional images are to be resampled to.

            Some pipeline components will only be instantiated for some output spaces.

            Valid spaces:

              - T1w
              - template
              - fsnative
              - fsaverage (or other pre-existing FreeSurfer templates)
        template : str
            Name of template targeted by `'template'` output space
        debug : bool
            Enable debugging outputs
        freesurfer : bool
            Enable FreeSurfer surface reconstruction (may increase runtime)
        longitudinal : bool
            Create unbiased structural template, regardless of number of inputs
            (may increase runtime)
        omp_nthreads : int
            Maximum number of threads an individual process may use
        hires : bool
            Enable sub-millimeter preprocessing in FreeSurfer
        reportlets_dir : str
            Directory in which to save reportlets
        output_dir : str
            Directory in which to save derivatives
        name : str, optional
            Workflow name (default: anat_preproc_wf)


    **Inputs**

        t1w
            List of T1-weighted structural images
        t2w
            List of T2-weighted structural images
        subjects_dir
            FreeSurfer SUBJECTS_DIR


    **Outputs**

        t1_preproc
            Bias-corrected structural template, defining T1w space
        t1_brain
            Skull-stripped ``t1_preproc``
        t1_mask
            Mask of the skull-stripped template image
        t1_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        t1_tpms
            List of tissue probability maps in T1w space
        t1_2_mni
            T1w template, normalized to MNI space
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        t1_2_mni_reverse_transform
            ANTs-compatible affine-and-warp transform file (inverse)
        mni_mask
            Mask of skull-stripped template, in MNI space
        mni_seg
            Segmentation, resampled into MNI space
        mni_tpms
            List of tissue probability maps in MNI space
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
        t1_2_fsnative_reverse_transform
            LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w
        surfaces
            GIFTI surfaces (gray/white boundary, midthickness, pial, inflated)

    **Subworkflows**

        * :py:func:`~fmriprep.workflows.anatomical.init_skullstrip_ants_wf`
        * :py:func:`~fmriprep.workflows.anatomical.init_surface_recon_wf`

    """

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['t1w', 't2w', 'subjects_dir', 'subject_id']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['t1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms',
                't1_2_mni', 't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                'mni_mask', 'mni_seg', 'mni_tpms',
                'template_transforms',
                'subjects_dir', 'subject_id', 't1_2_fsnative_forward_transform',
                't1_2_fsnative_reverse_transform', 'surfaces']),
        name='outputnode')

    buffernode = pe.Node(niu.IdentityInterface(
        fields=['t1_brain', 't1_mask']), name='buffernode')

    anat_template_wf = init_anat_template_wf(longitudinal=longitudinal, omp_nthreads=omp_nthreads,
                                             num_t1w=num_t1w)

    # 3. Skull-stripping
    # Bias field correction is handled in skull strip workflows.
    skullstrip_ants_wf = init_skullstrip_ants_wf(name='skullstrip_ants_wf',
                                                 skull_strip_template=skull_strip_template,
                                                 debug=debug,
                                                 omp_nthreads=omp_nthreads)

    workflow.connect([
        (inputnode, anat_template_wf, [('t1w', 'inputnode.t1w')]),
        (anat_template_wf, skullstrip_ants_wf, [('outputnode.t1_template', 'inputnode.in_file')]),
        (skullstrip_ants_wf, outputnode, [('outputnode.bias_corrected', 't1_preproc')]),
        (anat_template_wf, outputnode, [
            ('outputnode.template_transforms', 't1_template_transforms')]),
        (buffernode, outputnode, [('t1_brain', 't1_brain'),
                                  ('t1_mask', 't1_mask')]),
    ])

    # 4. Surface reconstruction
    if freesurfer:
        surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf',
                                                 omp_nthreads=omp_nthreads, hires=hires)
        applyrefined = pe.Node(fsl.ApplyMask(), name='applyrefined')
        workflow.connect([
            (inputnode, surface_recon_wf, [
                ('t2w', 'inputnode.t2w'),
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id')]),
            (anat_template_wf, surface_recon_wf, [('outputnode.t1_template', 'inputnode.t1w')]),
            (skullstrip_ants_wf, surface_recon_wf, [
                ('outputnode.out_file', 'inputnode.skullstripped_t1'),
                ('outputnode.out_segs', 'inputnode.ants_segs'),
                ('outputnode.bias_corrected', 'inputnode.corrected_t1')]),
            (skullstrip_ants_wf, applyrefined, [
                ('outputnode.bias_corrected', 'in_file')]),
            (surface_recon_wf, applyrefined, [
                ('outputnode.out_brainmask', 'mask_file')]),
            (surface_recon_wf, outputnode, [
                ('outputnode.subjects_dir', 'subjects_dir'),
                ('outputnode.subject_id', 'subject_id'),
                ('outputnode.t1_2_fsnative_forward_transform', 't1_2_fsnative_forward_transform'),
                ('outputnode.t1_2_fsnative_reverse_transform', 't1_2_fsnative_reverse_transform'),
                ('outputnode.surfaces', 'surfaces')]),
            (applyrefined, buffernode, [('out_file', 't1_brain')]),
            (surface_recon_wf, buffernode, [
                ('outputnode.out_brainmask', 't1_mask')]),
        ])
    else:
        workflow.connect([
            (skullstrip_ants_wf, buffernode, [
              ('outputnode.out_file', 't1_brain'),
              ('outputnode.out_mask', 't1_mask')]),
        ])

    # 5. Segmentation
    t1_seg = pe.Node(fsl.FAST(segments=True, no_bias=True, probability_maps=True),
                     name='t1_seg', mem_gb=3)

    workflow.connect([
        (buffernode, t1_seg, [('t1_brain', 'in_files')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_seg'),
                              ('probability_maps', 't1_tpms')]),
    ])

    # 6. Spatial normalization (T1w to MNI registration)
    t1_2_mni = pe.Node(
        RobustMNINormalizationRPT(
            float=True,
            generate_report=True,
            flavor='testing' if debug else 'precise',
        ),
        name='t1_2_mni',
        n_procs=omp_nthreads,
        mem_gb=2
    )

    # Resample the brain mask and the tissue probability maps into mni space
    mni_mask = pe.Node(
        ApplyTransforms(dimension=3, default_value=0, float=True,
                        interpolation='NearestNeighbor'),
        name='mni_mask'
    )

    mni_seg = pe.Node(
        ApplyTransforms(dimension=3, default_value=0, float=True,
                        interpolation='NearestNeighbor'),
        name='mni_seg'
    )

    mni_tpms = pe.MapNode(
        ApplyTransforms(dimension=3, default_value=0, float=True,
                        interpolation='Linear'),
        iterfield=['input_image'],
        name='mni_tpms'
    )

    if 'template' in output_spaces:
        template_str = nid.TEMPLATE_MAP[template]
        ref_img = op.join(nid.get_dataset(template_str), '1mm_T1.nii.gz')

        t1_2_mni.inputs.template = template_str
        mni_mask.inputs.reference_image = ref_img
        mni_seg.inputs.reference_image = ref_img
        mni_tpms.inputs.reference_image = ref_img

        workflow.connect([
            (skullstrip_ants_wf, t1_2_mni, [('outputnode.bias_corrected', 'moving_image')]),
            (buffernode, t1_2_mni, [('t1_mask', 'moving_mask')]),
            (buffernode, mni_mask, [('t1_mask', 'input_image')]),
            (t1_2_mni, mni_mask, [('composite_transform', 'transforms')]),
            (t1_seg, mni_seg, [('tissue_class_map', 'input_image')]),
            (t1_2_mni, mni_seg, [('composite_transform', 'transforms')]),
            (t1_seg, mni_tpms, [('probability_maps', 'input_image')]),
            (t1_2_mni, mni_tpms, [('composite_transform', 'transforms')]),
            (t1_2_mni, outputnode, [
                ('warped_image', 't1_2_mni'),
                ('composite_transform', 't1_2_mni_forward_transform'),
                ('inverse_composite_transform', 't1_2_mni_reverse_transform')]),
            (mni_mask, outputnode, [('output_image', 'mni_mask')]),
            (mni_seg, outputnode, [('output_image', 'mni_seg')]),
            (mni_tpms, outputnode, [('output_image', 'mni_tpms')]),
        ])

    seg2msks = pe.Node(niu.Function(function=_seg2msks), name='seg2msks')
    seg_rpt = pe.Node(ROIsPlot(colors=['r', 'magenta', 'b', 'g']), name='seg_rpt')
    anat_reports_wf = init_anat_reports_wf(
        reportlets_dir=reportlets_dir, output_spaces=output_spaces, template=template,
        freesurfer=freesurfer)
    workflow.connect([
        (inputnode, anat_reports_wf, [
            (('t1w', fix_multi_T1w_source_name), 'inputnode.source_file')]),
        (anat_template_wf, anat_reports_wf, [
            ('outputnode.out_report', 'inputnode.t1_conform_report')]),
        (anat_template_wf, seg_rpt, [
            ('outputnode.t1_template', 'in_file')]),
        (t1_seg, seg2msks, [('tissue_class_map', 'in_file')]),
        (seg2msks, seg_rpt, [('out', 'in_rois')]),
        (outputnode, seg_rpt, [('t1_mask', 'in_mask')]),
        (seg_rpt, anat_reports_wf, [('out_report', 'inputnode.seg_report')]),
    ])

    if freesurfer:
        workflow.connect([
            (surface_recon_wf, anat_reports_wf, [
                ('outputnode.out_report', 'inputnode.recon_report')])
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (t1_2_mni, anat_reports_wf, [('out_report', 'inputnode.t1_2_mni_report')]),
        ])

    anat_derivatives_wf = init_anat_derivatives_wf(output_dir=output_dir,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   freesurfer=freesurfer)

    workflow.connect([
        (anat_template_wf, anat_derivatives_wf, [
            ('outputnode.t1w_valid_list', 'inputnode.source_files')]),
        (outputnode, anat_derivatives_wf, [
            ('t1_template_transforms', 'inputnode.t1_template_transforms'),
            ('t1_preproc', 'inputnode.t1_preproc'),
            ('t1_mask', 'inputnode.t1_mask'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('t1_tpms', 'inputnode.t1_tpms'),
            ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform'),
            ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform'),
            ('t1_2_mni', 'inputnode.t1_2_mni'),
            ('mni_mask', 'inputnode.mni_mask'),
            ('mni_seg', 'inputnode.mni_seg'),
            ('mni_tpms', 'inputnode.mni_tpms'),
            ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform'),
            ('surfaces', 'inputnode.surfaces'),
        ]),
    ])

    return workflow
Пример #15
0
def init_anat_derivatives_wf(output_dir, output_spaces, template, freesurfer,
                             name='anat_derivatives_wf'):
    """
    Set up a battery of datasinks to store derivatives in the right location
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['source_files', 't1_template_transforms',
                    't1_preproc', 't1_mask', 't1_seg', 't1_tpms',
                    't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                    't1_2_mni', 'mni_mask', 'mni_seg', 'mni_tpms',
                    't1_2_fsnative_forward_transform', 'surfaces']),
        name='inputnode')

    t1_name = pe.Node(niu.Function(function=fix_multi_T1w_source_name), name='t1_name')

    ds_t1_preproc = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix='preproc'),
        name='ds_t1_preproc', run_without_submitting=True)

    ds_t1_mask = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix='brainmask'),
        name='ds_t1_mask', run_without_submitting=True)

    ds_t1_seg = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix='dtissue'),
        name='ds_t1_seg', run_without_submitting=True)

    ds_t1_tpms = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix='class-{extra_value}_probtissue'),
        name='ds_t1_tpms', run_without_submitting=True)
    ds_t1_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    suffix_fmt = 'space-{}_{}'.format
    ds_t1_mni = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix=suffix_fmt(template, 'preproc')),
        name='ds_t1_mni', run_without_submitting=True)

    ds_mni_mask = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix=suffix_fmt(template, 'brainmask')),
        name='ds_mni_mask', run_without_submitting=True)

    ds_mni_seg = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix=suffix_fmt(template, 'dtissue')),
        name='ds_mni_seg', run_without_submitting=True)

    ds_mni_tpms = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix=suffix_fmt(template, 'class-{extra_value}_probtissue')),
        name='ds_mni_tpms', run_without_submitting=True)
    ds_mni_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    # Transforms
    suffix_fmt = 'space-{}_target-{}_{}'.format
    ds_t1_mni_inv_warp = pe.Node(
        DerivativesDataSink(base_directory=output_dir,
                            suffix=suffix_fmt(template, 'T1w', 'warp')),
        name='ds_t1_mni_inv_warp', run_without_submitting=True)

    ds_t1_template_transforms = pe.MapNode(
        DerivativesDataSink(base_directory=output_dir, suffix=suffix_fmt('orig', 'T1w', 'affine')),
        iterfield=['source_file', 'in_file'],
        name='ds_t1_template_transforms', run_without_submitting=True)

    suffix_fmt = 'target-{}_{}'.format
    ds_t1_mni_warp = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix=suffix_fmt(template, 'warp')),
        name='ds_t1_mni_warp', run_without_submitting=True)

    lta_2_itk = pe.Node(fs.utils.LTAConvert(out_itk=True), name='lta_2_itk')

    ds_t1_fsnative = pe.Node(
        DerivativesDataSink(base_directory=output_dir, suffix=suffix_fmt('fsnative', 'affine')),
        name='ds_t1_fsnative', run_without_submitting=True)

    name_surfs = pe.MapNode(GiftiNameSource(pattern=r'(?P<LR>[lr])h.(?P<surf>.+)_converted.gii',
                                            template='{surf}.{LR}.surf'),
                            iterfield='in_file',
                            name='name_surfs',
                            run_without_submitting=True)

    ds_surfs = pe.MapNode(
        DerivativesDataSink(base_directory=output_dir),
        iterfield=['in_file', 'suffix'], name='ds_surfs', run_without_submitting=True)

    workflow.connect([
        (inputnode, t1_name, [('source_files', 'in_files')]),
        (inputnode, ds_t1_template_transforms, [('source_files', 'source_file'),
                                                ('t1_template_transforms', 'in_file')]),
        (inputnode, ds_t1_preproc, [('t1_preproc', 'in_file')]),
        (inputnode, ds_t1_mask, [('t1_mask', 'in_file')]),
        (inputnode, ds_t1_seg, [('t1_seg', 'in_file')]),
        (inputnode, ds_t1_tpms, [('t1_tpms', 'in_file')]),
        (t1_name, ds_t1_preproc, [('out', 'source_file')]),
        (t1_name, ds_t1_mask, [('out', 'source_file')]),
        (t1_name, ds_t1_seg, [('out', 'source_file')]),
        (t1_name, ds_t1_tpms, [('out', 'source_file')]),
    ])

    if freesurfer:
        workflow.connect([
            (inputnode, lta_2_itk, [('t1_2_fsnative_forward_transform', 'in_lta')]),
            (t1_name, ds_t1_fsnative, [('out', 'source_file')]),
            (lta_2_itk, ds_t1_fsnative, [('out_itk', 'in_file')]),
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_surfs, [('surfaces', 'in_file')]),
            (t1_name, ds_surfs, [('out', 'source_file')]),
            (name_surfs, ds_surfs, [('out_name', 'suffix')]),
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('t1_2_mni_forward_transform', 'in_file')]),
            (inputnode, ds_t1_mni_inv_warp, [('t1_2_mni_reverse_transform', 'in_file')]),
            (inputnode, ds_t1_mni, [('t1_2_mni', 'in_file')]),
            (inputnode, ds_mni_mask, [('mni_mask', 'in_file')]),
            (inputnode, ds_mni_seg, [('mni_seg', 'in_file')]),
            (inputnode, ds_mni_tpms, [('mni_tpms', 'in_file')]),
            (t1_name, ds_t1_mni_warp, [('out', 'source_file')]),
            (t1_name, ds_t1_mni_inv_warp, [('out', 'source_file')]),
            (t1_name, ds_t1_mni, [('out', 'source_file')]),
            (t1_name, ds_mni_mask, [('out', 'source_file')]),
            (t1_name, ds_mni_seg, [('out', 'source_file')]),
            (t1_name, ds_mni_tpms, [('out', 'source_file')]),
        ])

    return workflow
Пример #16
0
def compute_iqms(settings, name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

      from mriqc.workflows.functional import compute_iqms
      wf = compute_iqms(settings={'output_dir': 'out'})


    """
    from .utils import _tofloat
    from ..interfaces.transitional import GCOR

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd',
        'fd_thres', 'in_tsnr', 'metadata', 'exclude_index'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = settings.get('fd_thres', 0.2)
    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True),
                     name='ComputeDVARS',
                     mem_gb=biggest_file_gb * 3)

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True,
                                         out_file='outliers.out'),
                       name='outliers',
                       mem_gb=biggest_file_gb * 2.5)

    quality = pe.Node(afni.QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality',
                      mem_gb=biggest_file_gb * 3)

    gcor = pe.Node(GCOR(), name='gcor', mem_gb=biggest_file_gb * 2)

    measures = pe.Node(FunctionalQC(),
                       name='measures',
                       mem_gb=biggest_file_gb * 3)

    workflow.connect([(inputnode, dvnode, [('hmc_epi', 'in_file'),
                                           ('brainmask', 'in_mask')]),
                      (inputnode, measures, [('epi_mean', 'in_epi'),
                                             ('brainmask', 'in_mask'),
                                             ('hmc_epi', 'in_hmc'),
                                             ('hmc_fd', 'in_fd'),
                                             ('fd_thres', 'fd_thres'),
                                             ('in_tsnr', 'in_tsnr')]),
                      (inputnode, fwhm, [('epi_mean', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, quality, [('hmc_epi', 'in_file')]),
                      (inputnode, outliers, [('hmc_epi', 'in_file'),
                                             ('brainmask', 'mask')]),
                      (inputnode, gcor, [('hmc_epi', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (dvnode, measures, [('out_all', 'in_dvars')]),
                      (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
                      (dvnode, outputnode, [('out_all', 'out_dvars')]),
                      (outliers, outputnode, [('out_file', 'outliers')])])

    # Add metadata
    meta = pe.Node(ReadSidecarJSON(), name='metadata')
    addprov = pe.Node(niu.Function(function=_add_provenance),
                      name='provenance')
    addprov.inputs.settings = {
        'fd_thres': settings.get('fd_thres', 0.2),
        'hmc_fsl': settings.get('hmc_fsl', True),
    }

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(modality='bold', out_dir=deriv_dir),
                       name='datasink')

    workflow.connect([
        (inputnode, datasink, [('exclude_index', 'dummy_trs')]),
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, addprov, [('in_file', 'in_file')]),
        (meta, datasink, [('subject_id', 'subject_id'),
                          ('session_id', 'session_id'), ('task_id', 'task_id'),
                          ('acq_id', 'acq_id'), ('rec_id', 'rec_id'),
                          ('run_id', 'run_id'), ('out_dict', 'metadata')]),
        (addprov, datasink, [('out', 'provenance')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (gcor, datasink, [(('out', _tofloat), 'gcor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    # FFT spikes finder
    if settings.get('fft_spikes_detector', False):
        from .utils import slice_wise_fft
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft),
                             name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('in_ras', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])
    return workflow
Пример #17
0
def individual_reports(settings, name='ReportsWorkflow'):
    """
    Encapsulates nodes writing plots

    .. workflow::

      from mriqc.workflows.functional import individual_reports
      wf = individual_reports(settings={'output_dir': 'out'})

    """
    from ..interfaces import PlotMosaic, PlotSpikes
    from ..reports import individual_html

    verbose = settings.get('verbose_reports', False)
    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    pages = 5
    extra_pages = 0
    if verbose:
        extra_pages = 4

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_iqms', 'in_ras', 'hmc_epi', 'epi_mean', 'brainmask', 'hmc_fd',
        'fd_thres', 'epi_parc', 'in_dvars', 'in_stddev', 'outliers',
        'in_spikes', 'in_fft', 'mni_report', 'ica_report'
    ]),
                        name='inputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = settings.get('fd_thres', 0.2)

    spmask = pe.Node(niu.Function(input_names=['in_file', 'in_mask'],
                                  output_names=['out_file', 'out_plot'],
                                  function=spikes_mask),
                     name='SpikesMask',
                     mem_gb=biggest_file_gb * 3.5)

    spikes_bg = pe.Node(Spikes(no_zscore=True, detrend=False),
                        name='SpikesFinderBgMask',
                        mem_gb=biggest_file_gb * 2.5)

    bigplot = pe.Node(FMRISummary(),
                      name='BigPlot',
                      mem_gb=biggest_file_gb * 3.5)
    workflow.connect([
        (inputnode, spikes_bg, [('in_ras', 'in_file')]),
        (inputnode, spmask, [('in_ras', 'in_file')]),
        (inputnode, bigplot, [('hmc_epi', 'in_func'), ('brainmask', 'in_mask'),
                              ('hmc_fd', 'fd'), ('fd_thres', 'fd_thres'),
                              ('in_dvars', 'dvars'), ('epi_parc', 'in_segm'),
                              ('outliers', 'outliers')]),
        (spikes_bg, bigplot, [('out_tsz', 'in_spikes_bg')]),
        (spmask, spikes_bg, [('out_file', 'in_mask')]),
    ])

    mosaic_mean = pe.Node(PlotMosaic(out_file='plot_func_mean_mosaic1.svg',
                                     cmap='Greys_r'),
                          name='PlotMosaicMean')

    mosaic_stddev = pe.Node(PlotMosaic(
        out_file='plot_func_stddev_mosaic2_stddev.svg', cmap='viridis'),
                            name='PlotMosaicSD')

    mplots = pe.Node(
        niu.Merge(pages + extra_pages +
                  int(settings.get('fft_spikes_detector', False)) +
                  int(settings.get('ica', False))),
        name='MergePlots')
    rnode = pe.Node(niu.Function(input_names=['in_iqms', 'in_plots'],
                                 output_names=['out_file'],
                                 function=individual_html),
                    name='GenerateReport')

    # Link images that should be reported
    dsplots = pe.Node(nio.DataSink(base_directory=settings['output_dir'],
                                   parameterization=False),
                      name='dsplots')
    dsplots.inputs.container = 'reports'

    workflow.connect([
        (inputnode, rnode, [('in_iqms', 'in_iqms')]),
        (inputnode, mosaic_mean, [('epi_mean', 'in_file')]),
        (inputnode, mosaic_stddev, [('in_stddev', 'in_file')]),
        (mosaic_mean, mplots, [('out_file', 'in1')]),
        (mosaic_stddev, mplots, [('out_file', 'in2')]),
        (bigplot, mplots, [('out_file', 'in3')]),
        (mplots, rnode, [('out', 'in_plots')]),
        (rnode, dsplots, [('out_file', '@html_report')]),
    ])

    if settings.get('fft_spikes_detector', False):
        mosaic_spikes = pe.Node(PlotSpikes(out_file='plot_spikes.svg',
                                           cmap='viridis',
                                           title='High-Frequency spikes'),
                                name='PlotSpikes')

        workflow.connect([(inputnode, mosaic_spikes, [('in_ras', 'in_file'),
                                                      ('in_spikes',
                                                       'in_spikes'),
                                                      ('in_fft', 'in_fft')]),
                          (mosaic_spikes, mplots, [('out_file', 'in4')])])

    if settings.get('ica', False):
        page_number = 4
        if settings.get('fft_spikes_detector', False):
            page_number += 1
        workflow.connect([(inputnode, mplots, [('ica_report',
                                                'in%d' % page_number)])])

    if not verbose:
        return workflow

    mosaic_zoom = pe.Node(PlotMosaic(out_file='plot_anat_mosaic1_zoomed.svg',
                                     cmap='Greys_r'),
                          name='PlotMosaicZoomed')

    mosaic_noise = pe.Node(PlotMosaic(out_file='plot_anat_mosaic2_noise.svg',
                                      only_noise=True,
                                      cmap='viridis_r'),
                           name='PlotMosaicNoise')

    # Verbose-reporting goes here
    from ..interfaces.viz import PlotContours

    plot_bmask = pe.Node(PlotContours(display_mode='z',
                                      levels=[.5],
                                      colors=['r'],
                                      cut_coords=10,
                                      out_file='bmask'),
                         name='PlotBrainmask')

    workflow.connect([
        (inputnode, plot_bmask, [('epi_mean', 'in_file'),
                                 ('brainmask', 'in_contours')]),
        (inputnode, mosaic_zoom, [('epi_mean', 'in_file'),
                                  ('brainmask', 'bbox_mask_file')]),
        (inputnode, mosaic_noise, [('epi_mean', 'in_file')]),
        (mosaic_zoom, mplots, [('out_file', 'in%d' % (pages + 1))]),
        (mosaic_noise, mplots, [('out_file', 'in%d' % (pages + 2))]),
        (plot_bmask, mplots, [('out_file', 'in%d' % (pages + 3))]),
        (inputnode, mplots, [('mni_report', 'in%d' % (pages + 4))]),
    ])
    return workflow
Пример #18
0
def init_bbreg_wf(use_bbr, bold2t1w_dof, omp_nthreads, name='bbreg_wf'):
    """
    This workflow uses FreeSurfer's ``bbregister`` to register a BOLD image to
    a T1-weighted structural image.

    It is a counterpart to :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`,
    which performs the same task using FSL's FLIRT with a BBR cost function.

    The ``use_bbr`` option permits a high degree of control over registration.
    If ``False``, standard, affine coregistration will be performed using
    FreeSurfer's ``mri_coreg`` tool.
    If ``True``, ``bbregister`` will be seeded with the initial transform found
    by ``mri_coreg`` (equivalent to running ``bbregister --init-coreg``).
    If ``None``, after ``bbregister`` is run, the resulting affine transform
    will be compared to the initial transform found by ``mri_coreg``.
    Excessive deviation will result in rejecting the BBR refinement and
    accepting the original, affine registration.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.registration import init_bbreg_wf
        wf = init_bbreg_wf(use_bbr=True, bold2t1w_dof=9, omp_nthreads=1)


    Parameters

        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        name : str, optional
            Workflow name (default: bbreg_wf)


    Inputs

        in_file
            Reference BOLD image to be registered
        t1_2_fsnative_reverse_transform
            FSL-style affine matrix translating from FreeSurfer T1.mgz to T1w
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID (must have folder in SUBJECTS_DIR)
        t1_brain
            Unused (see :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`)
        t1_seg
            Unused (see :py:func:`~fmriprep.workflows.util.init_fsl_bbr_wf`)


    Outputs

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        itk_t1_to_bold
            Affine transform from T1 space to BOLD space (ITK format)
        out_report
            Reportlet for assessing registration quality
        fallback
            Boolean indicating whether BBR was rejected (mri_coreg registration returned)

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            't1_2_fsnative_reverse_transform', 'subjects_dir', 'subject_id',  # BBRegister
            't1_seg', 't1_brain']),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']),
        name='outputnode')

    mri_coreg = pe.Node(
        MRICoregRPT(dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01,
                    generate_report=not use_bbr),
        name='mri_coreg', n_procs=omp_nthreads, mem_gb=5)

    lta_concat = pe.Node(ConcatenateLTA(out_file='out.lta'), name='lta_concat')
    # XXX LTA-FSL-ITK may ultimately be able to be replaced with a straightforward
    # LTA-ITK transform, but right now the translation parameters are off.
    lta2fsl_fwd = pe.Node(fs.utils.LTAConvert(out_fsl=True), name='lta2fsl_fwd')
    lta2fsl_inv = pe.Node(fs.utils.LTAConvert(out_fsl=True, invert=True), name='lta2fsl_inv')
    fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_fwd', mem_gb=DEFAULT_MEMORY_MIN_GB)
    fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_inv', mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, mri_coreg, [('subjects_dir', 'subjects_dir'),
                                ('subject_id', 'subject_id'),
                                ('in_file', 'source_file')]),
        # Output ITK transforms
        (inputnode, lta_concat, [('t1_2_fsnative_reverse_transform', 'in_lta2')]),
        (lta_concat, lta2fsl_fwd, [('out_file', 'in_lta')]),
        (lta_concat, lta2fsl_inv, [('out_file', 'in_lta')]),
        (inputnode, fsl2itk_fwd, [('t1_brain', 'reference_file'),
                                  ('in_file', 'source_file')]),
        (inputnode, fsl2itk_inv, [('in_file', 'reference_file'),
                                  ('t1_brain', 'source_file')]),
        (lta2fsl_fwd, fsl2itk_fwd, [('out_fsl', 'transform_file')]),
        (lta2fsl_inv, fsl2itk_inv, [('out_fsl', 'transform_file')]),
        (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]),
        (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]),
    ])

    # Short-circuit workflow building, use initial registration
    if use_bbr is False:
        workflow.connect([
            (mri_coreg, outputnode, [('out_report', 'out_report')]),
            (mri_coreg, lta_concat, [('out_lta_file', 'in_lta1')])])
        outputnode.inputs.fallback = True

        return workflow

    bbregister = pe.Node(
        BBRegisterRPT(dof=bold2t1w_dof, contrast_type='t2', registered_file=True,
                      out_lta_file=True, generate_report=True),
        name='bbregister', mem_gb=12)

    workflow.connect([
        (inputnode, bbregister, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id'),
                                 ('in_file', 'source_file')]),
        (mri_coreg, bbregister, [('out_lta_file', 'init_reg_file')]),
    ])

    # Short-circuit workflow building, use boundary-based registration
    if use_bbr is True:
        workflow.connect([
            (bbregister, outputnode, [('out_report', 'out_report')]),
            (bbregister, lta_concat, [('out_lta_file', 'in_lta1')])])
        outputnode.inputs.fallback = False

        return workflow

    transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms')
    reports = pe.Node(niu.Merge(2), run_without_submitting=True, name='reports')

    lta_ras2ras = pe.MapNode(fs.utils.LTAConvert(out_lta=True), iterfield=['in_lta'],
                             name='lta_ras2ras', mem_gb=2)
    compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms')

    select_transform = pe.Node(niu.Select(), run_without_submitting=True, name='select_transform')
    select_report = pe.Node(niu.Select(), run_without_submitting=True, name='select_report')

    workflow.connect([
        (bbregister, transforms, [('out_lta_file', 'in1')]),
        (mri_coreg, transforms, [('out_lta_file', 'in2')]),
        # Normalize LTA transforms to RAS2RAS (inputs are VOX2VOX) and compare
        (transforms, lta_ras2ras, [('out', 'in_lta')]),
        (lta_ras2ras, compare_transforms, [('out_lta', 'lta_list')]),
        (compare_transforms, outputnode, [('out', 'fallback')]),
        # Select output transform
        (transforms, select_transform, [('out', 'inlist')]),
        (compare_transforms, select_transform, [('out', 'index')]),
        (select_transform, lta_concat, [('out', 'in_lta1')]),
        # Select output report
        (bbregister, reports, [('out_report', 'in1')]),
        (mri_coreg, reports, [('out_report', 'in2')]),
        (reports, select_report, [('out', 'inlist')]),
        (compare_transforms, select_report, [('out', 'index')]),
        (select_report, outputnode, [('out', 'out_report')]),
    ])

    return workflow
Пример #19
0
def init_fsl_bbr_wf(bold2t1w_dof, report, name='fsl_bbr_wf'):
    """
    This workflow uses FSL FLIRT to register a BOLD image to a T1-weighted
    structural image, using a boundary-based registration (BBR) cost function.

    It is a counterpart to :py:func:`~fmriprep.workflows.util.init_bbreg_wf`,
    which performs the same task using FreeSurfer's ``bbregister``.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.util import init_fsl_bbr_wf
        wf = init_fsl_bbr_wf(bold2t1w_dof=9, report=False)


    Parameters

        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        report : bool
            Generate visual report of registration quality
        name : str, optional
            Workflow name (default: fsl_bbr_wf)


    Inputs

        in_file
            Reference BOLD image to be registered
        t1_brain
            Skull-stripped T1-weighted structural image
        t1_seg
            FAST segmentation of ``t1_brain``
        fs_2_t1_transform
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subjects_dir
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subject_id
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)


    Outputs

        out_matrix_file
            FSL-style registration matrix
        out_reg_file
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        final_cost
            Value of cost function at final registration
        out_report
            reportlet for assessing registration quality

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fs_2_t1_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_matrix_file', 'out_reg_file', 'out_report', 'final_cost']),
                         name='outputnode')

    wm_mask = pe.Node(niu.Function(function=extract_wm), name='wm_mask')
    _FLIRT = FLIRTRPT if report else fsl.FLIRT
    flt_bbr_init = pe.Node(fsl.FLIRT(dof=6), name='flt_bbr_init')
    flt_bbr = pe.Node(_FLIRT(cost_func='bbr', dof=bold2t1w_dof, save_log=True),
                      name='flt_bbr')
    flt_bbr.inputs.schedule = op.join(os.getenv('FSLDIR'),
                                      'etc/flirtsch/bbr.sch')

    def get_final_cost(in_file):
        from niworkflows.nipype import logging
        with open(in_file, 'r') as fobj:
            for line in fobj:
                if line.startswith(' >> print U:1'):
                    costs = next(fobj).split()
                    return float(costs[0])
        logger = logging.getLogger('interface')
        logger.error('No cost report found in log file. Please report this '
                     'issue, with contents of {}'.format(in_file))

    get_cost = pe.Node(niu.Function(function=get_final_cost), name='get_cost')

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_seg')]),
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1_brain', 'reference')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (inputnode, flt_bbr, [('in_file', 'in_file'),
                              ('t1_brain', 'reference')]),
        (wm_mask, flt_bbr, [('out', 'wm_seg')]),
        (flt_bbr, outputnode, [('out_matrix_file', 'out_matrix_file')]),
        (flt_bbr, get_cost, [('out_log', 'in_file')]),
        (get_cost, outputnode, [('out', 'final_cost')]),
    ])

    if report:
        flt_bbr.inputs.generate_report = True
        workflow.connect([(flt_bbr, outputnode, [('out_report', 'out_report')])
                          ])

    return workflow
Пример #20
0
def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, name='fsl_bbr_wf'):
    """
    This workflow uses FSL FLIRT to register a BOLD image to a T1-weighted
    structural image, using a boundary-based registration (BBR) cost function.

    It is a counterpart to :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`,
    which performs the same task using FreeSurfer's ``bbregister``.

    The ``use_bbr`` option permits a high degree of control over registration.
    If ``False``, standard, rigid coregistration will be performed by FLIRT.
    If ``True``, FLIRT-BBR will be seeded with the initial transform found by
    the rigid coregistration.
    If ``None``, after FLIRT-BBR is run, the resulting affine transform
    will be compared to the initial transform found by FLIRT.
    Excessive deviation will result in rejecting the BBR refinement and
    accepting the original, affine registration.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.registration import init_fsl_bbr_wf
        wf = init_fsl_bbr_wf(use_bbr=True, bold2t1w_dof=9)


    Parameters

        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        name : str, optional
            Workflow name (default: fsl_bbr_wf)


    Inputs

        in_file
            Reference BOLD image to be registered
        t1_brain
            Skull-stripped T1-weighted structural image
        t1_seg
            FAST segmentation of ``t1_brain``
        t1_2_fsnative_reverse_transform
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subjects_dir
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subject_id
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)


    Outputs

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        itk_t1_to_bold
            Affine transform from T1 space to BOLD space (ITK format)
        out_report
            Reportlet for assessing registration quality
        fallback
            Boolean indicating whether BBR was rejected (rigid FLIRT registration returned)

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            't1_2_fsnative_reverse_transform', 'subjects_dir', 'subject_id',  # BBRegister
            't1_seg', 't1_brain']),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']),
        name='outputnode')

    wm_mask = pe.Node(niu.Function(function=extract_wm), name='wm_mask')
    flt_bbr_init = pe.Node(FLIRTRPT(dof=6, generate_report=not use_bbr), name='flt_bbr_init')

    invt_bbr = pe.Node(fsl.ConvertXFM(invert_xfm=True), name='invt_bbr',
                       mem_gb=DEFAULT_MEMORY_MIN_GB)

    #  BOLD to T1 transform matrix is from fsl, using c3 tools to convert to
    #  something ANTs will like.
    fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_fwd', mem_gb=DEFAULT_MEMORY_MIN_GB)
    fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_inv', mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1_brain', 'reference')]),
        (inputnode, fsl2itk_fwd, [('t1_brain', 'reference_file'),
                                  ('in_file', 'source_file')]),
        (inputnode, fsl2itk_inv, [('in_file', 'reference_file'),
                                  ('t1_brain', 'source_file')]),
        (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]),
        (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]),
        (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]),
    ])

    # Short-circuit workflow building, use rigid registration
    if use_bbr is False:
        workflow.connect([
            (flt_bbr_init, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr_init, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]),
            (flt_bbr_init, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = True

        return workflow

    flt_bbr = pe.Node(
        FLIRTRPT(cost_func='bbr', dof=bold2t1w_dof, generate_report=True,
                 schedule=op.join(os.getenv('FSLDIR'), 'etc/flirtsch/bbr.sch')),
        name='flt_bbr')

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_seg')]),
        (inputnode, flt_bbr, [('in_file', 'in_file'),
                              ('t1_brain', 'reference')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (wm_mask, flt_bbr, [('out', 'wm_seg')]),
    ])

    # Short-circuit workflow building, use boundary-based registration
    if use_bbr is True:
        workflow.connect([
            (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]),
            (flt_bbr, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = False

        return workflow

    transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms')
    reports = pe.Node(niu.Merge(2), run_without_submitting=True, name='reports')

    compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms')

    select_transform = pe.Node(niu.Select(), run_without_submitting=True, name='select_transform')
    select_report = pe.Node(niu.Select(), run_without_submitting=True, name='select_report')

    fsl_to_lta = pe.MapNode(fs.utils.LTAConvert(out_lta=True), iterfield=['in_fsl'],
                            name='fsl_to_lta')

    workflow.connect([
        (flt_bbr, transforms, [('out_matrix_file', 'in1')]),
        (flt_bbr_init, transforms, [('out_matrix_file', 'in2')]),
        # Convert FSL transforms to LTA (RAS2RAS) transforms and compare
        (inputnode, fsl_to_lta, [('in_file', 'source_file'),
                                 ('t1_brain', 'target_file')]),
        (transforms, fsl_to_lta, [('out', 'in_fsl')]),
        (fsl_to_lta, compare_transforms, [('out_lta', 'lta_list')]),
        (compare_transforms, outputnode, [('out', 'fallback')]),
        # Select output transform
        (transforms, select_transform, [('out', 'inlist')]),
        (compare_transforms, select_transform, [('out', 'index')]),
        (select_transform, invt_bbr, [('out', 'in_file')]),
        (select_transform, fsl2itk_fwd, [('out', 'transform_file')]),
        (flt_bbr, reports, [('out_report', 'in1')]),
        (flt_bbr_init, reports, [('out_report', 'in2')]),
        (reports, select_report, [('out', 'inlist')]),
        (compare_transforms, select_report, [('out', 'index')]),
        (select_report, outputnode, [('out', 'out_report')]),
    ])

    return workflow
Пример #21
0
def init_phdiff_wf(reportlets_dir, omp_nthreads, name='phdiff_wf'):
    """
    Estimates the fieldmap using a phase-difference image and one or more
    magnitude images corresponding to two or more :abbr:`GRE (Gradient Echo sequence)`
    acquisitions. The `original code was taken from nipype
    <https://github.com/nipy/nipype/blob/master/nipype/workflows/dmri/fsl/artifacts.py#L514>`_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.phdiff import init_phdiff_wf
        wf = init_phdiff_wf(reportlets_dir='.', omp_nthreads=1)


    Outputs::

      outputnode.fmap_ref - The average magnitude image, skull-stripped
      outputnode.fmap_mask - The brain mask applied to the fieldmap
      outputnode.fmap - The estimated fieldmap in Hz


    """

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'phasediff']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    def _pick1st(inlist):
        return inlist[0]

    # Read phasediff echo times
    meta = pe.Node(ReadSidecarJSON(),
                   name='meta',
                   mem_gb=0.01,
                   run_without_submitting=True)
    dte = pe.Node(niu.Function(function=_delta_te), name='dte', mem_gb=0.01)

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                 name='n4',
                 n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_fmap_mask = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                               suffix='fmap_mask'),
                           name='ds_fmap_mask',
                           mem_gb=0.01,
                           run_without_submitting=True)
    # uses mask from bet; outputs a mask
    # dilate = pe.Node(fsl.maths.MathsCommand(
    #     nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate')

    # phase diff -> radians
    pha2rads = pe.Node(niu.Function(function=siemens2rads), name='pha2rads')

    # FSL PRELUDE will perform phase-unwrapping
    prelude = pe.Node(fsl.PRELUDE(), name='prelude')

    denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                        kernel_shape='sphere',
                                        kernel_size=3),
                      name='denoise')

    demean = pe.Node(niu.Function(function=demean_image), name='demean')

    cleanup_wf = cleanup_edge_pipeline(name="cleanup_wf")

    compfmap = pe.Node(niu.Function(function=phdiff2fmap), name='compfmap')

    # The phdiff2fmap interface is equivalent to:
    # rad2rsec (using rads2radsec from nipype.workflows.dmri.fsl.utils)
    # pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='ComputeFieldmapFUGUE')
    # rsec2hz (divide by 2pi)

    workflow = pe.Workflow(name=name)
    workflow.connect([
        (inputnode, meta, [('phasediff', 'in_file')]),
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (magmrg, n4, [('out_avg', 'input_image')]),
        (n4, prelude, [('output_image', 'magnitude_file')]),
        (n4, bet, [('output_image', 'in_file')]),
        (bet, prelude, [('mask_file', 'mask_file')]),
        (inputnode, pha2rads, [('phasediff', 'in_file')]),
        (pha2rads, prelude, [('out', 'phase_file')]),
        (meta, dte, [('out_dict', 'in_values')]),
        (dte, compfmap, [('out', 'delta_te')]),
        (prelude, denoise, [('unwrapped_phase_file', 'in_file')]),
        (denoise, demean, [('out_file', 'in_file')]),
        (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
        (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
        (cleanup_wf, compfmap, [('outputnode.out_file', 'in_file')]),
        (compfmap, outputnode, [('out', 'fmap')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_fmap_mask, [('phasediff', 'source_file')]),
        (bet, ds_fmap_mask, [('out_report', 'in_file')]),
    ])

    return workflow
Пример #22
0
def compute_iqms(settings, modality='T1w', name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

        from mriqc.workflows.anatomical import compute_iqms
        wf = compute_iqms(settings={'output_dir': 'out'})

    """
    from .utils import _tofloat
    from ..interfaces.anatomical import Harmonize

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras', 'brainmask', 'airmask', 'artmask', 'headmask',
        'rotmask', 'segmentation', 'inu_corrected', 'in_inu', 'pvms',
        'metadata', 'inverse_composite_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'out_noisefit']),
        name='outputnode')

    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Extract metadata
    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    # Add provenance
    addprov = pe.Node(niu.Function(function=_add_provenance),
                      name='provenance')
    addprov.inputs.settings = {'testing': settings.get('testing', False)}

    # AFNI check smoothing
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Harmonize
    homog = pe.Node(Harmonize(), name='harmonize')

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(erodemsk=settings.get('testing', False)),
                     name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           interpolation='Linear',
                                           float=True),
                      iterfield=['input_image'],
                      name='MNItpms2t1')
    invt.inputs.input_image = [
        op.join(get_mni_icbm152_nlin_asym_09c(), fname + '.nii.gz')
        for fname in ['1mm_tpm_csf', '1mm_tpm_gm', '1mm_tpm_wm']
    ]

    datasink = pe.Node(IQMFileSink(modality=modality, out_dir=deriv_dir),
                       name='datasink')
    datasink.inputs.modality = modality

    def _getwm(inlist):
        return inlist[-1]

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (meta, datasink, [('subject_id', 'subject_id'),
                          ('session_id', 'session_id'), ('acq_id', 'acq_id'),
                          ('rec_id', 'rec_id'), ('run_id', 'run_id'),
                          ('out_dict', 'metadata')]),
        (inputnode, addprov, [('in_file', 'in_file'), ('airmask', 'air_msk'),
                              ('rotmask', 'rot_msk')]),
        (inputnode, getqi2, [('in_ras', 'in_file'), ('airmask', 'air_msk')]),
        (inputnode, homog, [('inu_corrected', 'in_file'),
                            (('pvms', _getwm), 'wm_mask')]),
        (inputnode, measures, [('in_inu', 'in_bias'), ('in_ras', 'in_file'),
                               ('airmask', 'air_msk'),
                               ('headmask', 'head_msk'),
                               ('artmask', 'artifact_msk'),
                               ('rotmask', 'rot_msk'),
                               ('segmentation', 'in_segm'),
                               ('pvms', 'in_pvms')]),
        (inputnode, fwhm, [('in_ras', 'in_file'), ('brainmask', 'mask')]),
        (inputnode, invt, [('in_ras', 'reference_image'),
                           ('inverse_composite_transform', 'transforms')]),
        (homog, measures, [('out_file', 'in_noinu')]),
        (invt, measures, [('output_image', 'mni_tpms')]),
        (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
        (measures, datasink, [('out_qc', 'root')]),
        (addprov, datasink, [('out', 'provenance')]),
        (getqi2, datasink, [('qi2', 'qi_2')]),
        (getqi2, outputnode, [('out_file', 'out_noisefit')]),
        (datasink, outputnode, [('out_file', 'out_file')]),
    ])
    return workflow
Пример #23
0
def init_pepolar_unwarp_wf(fmaps,
                           bold_file,
                           omp_nthreads,
                           layout=None,
                           fmaps_pes=None,
                           bold_file_pe=None,
                           name="pepolar_unwarp_wf"):
    """
    This workflow takes in a set of EPI files with opposite phase encoding
    direction than the target file and calculates a displacements field
    (in other words, an ANTs-compatible warp file).

    This procedure works if there is only one '_epi' file is present
    (as long as it has the opposite phase encoding direction to the target
    file). The target file will be used to estimate the field distortion.
    However, if there is another '_epi' file present with a matching
    phase encoding direction to the target it will be used instead.

    Currently, different phase encoding dimension in the target file and the
    '_epi' file(s) (for example 'i' and 'j') is not supported.

    The warp field correcting for the distortions is estimated using AFNI's
    3dQwarp, with displacement estimation limited to the target file phase
    encoding direction.

    It also calculates a new mask for the input dataset that takes into
    account the distortions.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.unwarp import init_pepolar_unwarp_wf
        wf = init_pepolar_unwarp_wf(fmaps=['/dataset/sub-01/fmap/sub-01_epi.nii.gz'],
                                    fmaps_pes=['j-'],
                                    bold_file='/dataset/sub-01/func/sub-01_task-rest_bold.nii.gz',
                                    bold_file_pe='j',
                                    omp_nthreads=8)


    Inputs

        in_reference
            the reference image
        in_reference_brain
            the reference image skullstripped
        in_mask
            a brain mask corresponding to ``in_reference``
        name_source
            not used, kept for signature compatibility with ``init_sdc_unwarp_wf``

    Outputs

        out_reference
            the ``in_reference`` after unwarping
        out_reference_brain
            the ``in_reference`` after unwarping and skullstripping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_mask
            mask of the unwarped input file
        out_mask_report
            reportlet for the skullstripping

    """
    if not bold_file_pe:
        bold_file_pe = layout.get_metadata(bold_file)["PhaseEncodingDirection"]

    usable_fieldmaps_matching_pe = []
    usable_fieldmaps_opposite_pe = []
    args = '-noXdis -noYdis -noZdis'
    rm_arg = {'i': '-noXdis', 'j': '-noYdis', 'k': '-noZdis'}[bold_file_pe[0]]
    args = args.replace(rm_arg, '')

    for i, fmap in enumerate(fmaps):
        if fmaps_pes:
            fmap_pe = fmaps_pes[i]
        else:
            fmap_pe = layout.get_metadata(fmap)["PhaseEncodingDirection"]
        if fmap_pe[0] == bold_file_pe[0]:
            if len(fmap_pe) != len(bold_file_pe):
                add_list = usable_fieldmaps_opposite_pe
            else:
                add_list = usable_fieldmaps_matching_pe
            add_list.append(fmap)

    if len(usable_fieldmaps_opposite_pe) == 0:
        raise Exception("None of the discovered fieldmaps has the right "
                        "phase encoding direction. Possibly a problem with "
                        "metadata. If not, rerun with '--ignore fieldmaps' to "
                        "skip distortion correction step.")

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_reference', 'in_reference_brain', 'in_mask', 'name_source'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_reference', 'out_reference_brain', 'out_warp', 'out_mask',
        'out_mask_report'
    ]),
                         name='outputnode')

    prepare_epi_opposite_wf = init_prepare_epi_wf(
        ants_nthreads=omp_nthreads, name="prepare_epi_opposite_wf")
    prepare_epi_opposite_wf.inputs.inputnode.fmaps = usable_fieldmaps_opposite_pe

    qwarp = pe.Node(afni.QwarpPlusMinus(
        pblur=[0.05, 0.05],
        blur=[-1, -1],
        noweight=True,
        minpatch=9,
        nopadWARP=True,
        environ={'OMP_NUM_THREADS': str(omp_nthreads)},
        args=args),
                    name='qwarp')
    qwarp.interface.num_threads = omp_nthreads

    workflow.connect([
        (inputnode, prepare_epi_opposite_wf, [('in_reference_brain',
                                               'inputnode.ref_brain')]),
        (prepare_epi_opposite_wf, qwarp, [('outputnode.out_file', 'base_file')
                                          ]),
    ])

    if usable_fieldmaps_matching_pe:
        prepare_epi_matching_wf = init_prepare_epi_wf(
            ants_nthreads=omp_nthreads, name="prepare_epi_matching_wf")
        prepare_epi_matching_wf.inputs.inputnode.fmaps = usable_fieldmaps_matching_pe

        workflow.connect([
            (inputnode, prepare_epi_matching_wf, [('in_reference_brain',
                                                   'inputnode.ref_brain')]),
            (prepare_epi_matching_wf, qwarp, [('outputnode.out_file',
                                               'source_file')]),
        ])
    else:
        workflow.connect([(inputnode, qwarp, [('in_reference_brain',
                                               'source_file')])])

    to_ants = pe.Node(niu.Function(function=_fix_hdr), name='to_ants')

    cphdr_warp = pe.Node(CopyHeader(), name='cphdr_warp')

    unwarp_reference = pe.Node(ANTSApplyTransformsRPT(
        dimension=3,
        generate_report=False,
        float=True,
        interpolation='LanczosWindowedSinc'),
                               name='unwarp_reference')

    enhance_and_skullstrip_epi_wf = init_enhance_and_skullstrip_epi_wf()

    workflow.connect([
        (inputnode, cphdr_warp, [('in_reference', 'hdr_file')]),
        (qwarp, cphdr_warp, [('source_warp', 'in_file')]),
        (cphdr_warp, to_ants, [('out_file', 'in_file')]),
        (to_ants, unwarp_reference, [('out', 'transforms')]),
        (inputnode, unwarp_reference, [('in_reference', 'reference_image'),
                                       ('in_reference', 'input_image')]),
        (unwarp_reference, enhance_and_skullstrip_epi_wf,
         [('output_image', 'inputnode.in_file')]),
        (unwarp_reference, outputnode, [('output_image', 'out_reference')]),
        (enhance_and_skullstrip_epi_wf, outputnode,
         [('outputnode.mask_file', 'out_mask'),
          ('outputnode.out_report', 'out_report'),
          ('outputnode.skull_stripped_file', 'out_reference_brain')]),
        (to_ants, outputnode, [('out', 'out_warp')]),
    ])

    return workflow
Пример #24
0
def individual_reports(settings, name='ReportsWorkflow'):
    """
    Encapsulates nodes writing plots

    .. workflow::

        from mriqc.workflows.anatomical import individual_reports
        wf = individual_reports(settings={'output_dir': 'out'})

    """
    from ..interfaces import PlotMosaic
    from ..reports import individual_html

    verbose = settings.get('verbose_reports', False)
    pages = 2
    extra_pages = 0
    if verbose:
        extra_pages = 7

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_ras', 'brainmask', 'headmask', 'airmask', 'artmask', 'rotmask',
        'segmentation', 'inu_corrected', 'noisefit', 'in_iqms', 'mni_report'
    ]),
                        name='inputnode')

    mosaic_zoom = pe.Node(PlotMosaic(out_file='plot_anat_mosaic1_zoomed.svg',
                                     title='zoomed',
                                     cmap='Greys_r'),
                          name='PlotMosaicZoomed')

    mosaic_noise = pe.Node(PlotMosaic(out_file='plot_anat_mosaic2_noise.svg',
                                      title='noise enhanced',
                                      only_noise=True,
                                      cmap='viridis_r'),
                           name='PlotMosaicNoise')

    mplots = pe.Node(niu.Merge(pages + extra_pages), name='MergePlots')
    rnode = pe.Node(niu.Function(input_names=['in_iqms', 'in_plots'],
                                 output_names=['out_file'],
                                 function=individual_html),
                    name='GenerateReport')

    # Link images that should be reported
    dsplots = pe.Node(nio.DataSink(base_directory=settings['output_dir'],
                                   parameterization=False),
                      name='dsplots')
    dsplots.inputs.container = 'reports'

    workflow.connect([
        (inputnode, rnode, [('in_iqms', 'in_iqms')]),
        (inputnode, mosaic_zoom, [('in_ras', 'in_file'),
                                  ('brainmask', 'bbox_mask_file')]),
        (inputnode, mosaic_noise, [('in_ras', 'in_file')]),
        (mosaic_zoom, mplots, [('out_file', "in1")]),
        (mosaic_noise, mplots, [('out_file', "in2")]),
        (mplots, rnode, [('out', 'in_plots')]),
        (rnode, dsplots, [('out_file', "@html_report")]),
    ])

    if not verbose:
        return workflow

    from ..interfaces.viz import PlotContours
    from ..viz.utils import plot_bg_dist
    plot_bgdist = pe.Node(niu.Function(input_names=['in_file'],
                                       output_names=['out_file'],
                                       function=plot_bg_dist),
                          name='PlotBackground')

    plot_segm = pe.Node(PlotContours(display_mode='z',
                                     levels=[.5, 1.5, 2.5],
                                     cut_coords=10,
                                     colors=['r', 'g', 'b']),
                        name='PlotSegmentation')

    plot_bmask = pe.Node(PlotContours(display_mode='z',
                                      levels=[.5],
                                      colors=['r'],
                                      cut_coords=10,
                                      out_file='bmask'),
                         name='PlotBrainmask')
    plot_airmask = pe.Node(PlotContours(display_mode='x',
                                        levels=[.5],
                                        colors=['r'],
                                        cut_coords=6,
                                        out_file='airmask'),
                           name='PlotAirmask')
    plot_headmask = pe.Node(PlotContours(display_mode='x',
                                         levels=[.5],
                                         colors=['r'],
                                         cut_coords=6,
                                         out_file='headmask'),
                            name='PlotHeadmask')
    plot_artmask = pe.Node(PlotContours(display_mode='z',
                                        levels=[.5],
                                        colors=['r'],
                                        cut_coords=10,
                                        out_file='artmask',
                                        saturate=True),
                           name='PlotArtmask')

    workflow.connect([
        (inputnode, plot_segm, [('in_ras', 'in_file'),
                                ('segmentation', 'in_contours')]),
        (inputnode, plot_bmask, [('in_ras', 'in_file'),
                                 ('brainmask', 'in_contours')]),
        (inputnode, plot_headmask, [('in_ras', 'in_file'),
                                    ('headmask', 'in_contours')]),
        (inputnode, plot_airmask, [('in_ras', 'in_file'),
                                   ('airmask', 'in_contours')]),
        (inputnode, plot_artmask, [('in_ras', 'in_file'),
                                   ('artmask', 'in_contours')]),
        (inputnode, plot_bgdist, [('noisefit', 'in_file')]),
        (inputnode, mplots, [('mni_report', "in%d" % (pages + 1))]),
        (plot_bmask, mplots, [('out_file', 'in%d' % (pages + 2))]),
        (plot_segm, mplots, [('out_file', 'in%d' % (pages + 3))]),
        (plot_artmask, mplots, [('out_file', 'in%d' % (pages + 4))]),
        (plot_headmask, mplots, [('out_file', 'in%d' % (pages + 5))]),
        (plot_airmask, mplots, [('out_file', 'in%d' % (pages + 6))]),
        (plot_bgdist, mplots, [('out_file', 'in%d' % (pages + 7))])
    ])
    return workflow
Пример #25
0
def init_anat_preproc_wf(skull_strip_ants,
                         skull_strip_template,
                         output_spaces,
                         template,
                         debug,
                         freesurfer,
                         omp_nthreads,
                         hires,
                         reportlets_dir,
                         output_dir,
                         name='anat_preproc_wf'):
    """T1w images preprocessing pipeline"""

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['t1w', 't2w', 'subjects_dir']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 't1_2_mni',
        't1_2_mni_forward_transform', 't1_2_mni_reverse_transform', 'mni_mask',
        'mni_seg', 'mni_tpms', 'subjects_dir', 'subject_id',
        'fs_2_t1_transform', 'surfaces'
    ]),
                         name='outputnode')

    def bidsinfo(in_file):
        from fmriprep.interfaces.bids import BIDS_NAME
        match = BIDS_NAME.search(in_file)
        params = match.groupdict() if match is not None else {}
        return tuple(
            map(params.get, [
                'subject_id', 'ses_id', 'task_id', 'acq_id', 'rec_id', 'run_id'
            ]))

    bids_info = pe.Node(niu.Function(function=bidsinfo,
                                     output_names=[
                                         'subject_id', 'ses_id', 'task_id',
                                         'acq_id', 'rec_id', 'run_id'
                                     ]),
                        name='bids_info',
                        run_without_submitting=True)

    summary = pe.Node(AnatomicalSummary(output_spaces=output_spaces,
                                        template=template),
                      name='summary')

    # 0. Reorient T1w image(s) to RAS and resample to common voxel space
    t1_conform = pe.Node(ConformSeries(), name='t1_conform')

    # 1. Align and merge if several T1w images are provided
    t1_merge = pe.Node(
        # StructuralReference is fs.RobustTemplate if > 1 volume, copying otherwise
        StructuralReference(
            auto_detect_sensitivity=True,
            initial_timepoint=1,
            fixed_timepoint=True,  # Align to first image
            intensity_scaling=True,  # 7-DOF (rigid + intensity)
            no_iteration=True,
            subsample_threshold=200,
        ),
        name='t1_merge')

    # 2. T1 Bias Field Correction
    # Bias field correction is handled in skull strip workflows.

    # 3. Skull-stripping
    #skullstrip_wf = init_skullstrip_afni_wf(name='skullstrip_afni_wf')
    skullstrip_wf = init_skullstrip_watershed_wf(
        name='skullstrip_watershed_wf')
    if skull_strip_ants:
        skullstrip_wf = init_skullstrip_ants_wf(
            name='skullstrip_ants_wf',
            debug=debug,
            omp_nthreads=omp_nthreads,
            skull_strip_template=skull_strip_template)

    # 4. Segmentation
    t1_seg = pe.Node(FASTRPT(generate_report=True,
                             segments=True,
                             no_bias=True,
                             probability_maps=True),
                     name='t1_seg')

    # 5. Spatial normalization (T1w to MNI registration)
    t1_2_mni = pe.Node(RobustMNINormalizationRPT(
        generate_report=True,
        num_threads=omp_nthreads,
        flavor='testing' if debug else 'precise',
    ),
                       name='t1_2_mni')
    # should not be necessary but does not hurt - make sure the multiproc
    # scheduler knows the resource limits
    t1_2_mni.interface.num_threads = omp_nthreads

    # Resample the brain mask and the tissue probability maps into mni space
    mni_mask = pe.Node(ants.ApplyTransforms(dimension=3,
                                            default_value=0,
                                            float=True,
                                            interpolation='NearestNeighbor'),
                       name='mni_mask')

    mni_seg = pe.Node(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           float=True,
                                           interpolation='NearestNeighbor'),
                      name='mni_seg')

    mni_tpms = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                               default_value=0,
                                               float=True,
                                               interpolation='Linear'),
                          iterfield=['input_image'],
                          name='mni_tpms')

    workflow.connect([
        (inputnode, bids_info, [(('t1w', fix_multi_T1w_source_name), 'in_file')
                                ]),
        (inputnode, t1_conform, [('t1w', 't1w_list')]),
        (t1_conform, t1_merge, [('t1w_list', 'in_files'),
                                (('t1w_list', add_suffix, '_template'),
                                 'out_file')]),
        (t1_merge, skullstrip_wf, [('out_file', 'inputnode.in_file')]),
        (skullstrip_wf, t1_seg, [('outputnode.out_file', 'in_files')]),
        (skullstrip_wf, outputnode, [('outputnode.bias_corrected',
                                      't1_preproc'),
                                     ('outputnode.out_file', 't1_brain'),
                                     ('outputnode.out_mask', 't1_mask')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_seg'),
                              ('probability_maps', 't1_tpms')]),
        (inputnode, summary, [('t1w', 't1w')]),
    ])
    if 'template' in output_spaces:
        template_str = nid.TEMPLATE_MAP[template]
        ref_img = op.join(nid.get_dataset(template_str), '1mm_T1.nii.gz')

        t1_2_mni.inputs.template = template_str
        mni_mask.inputs.reference_image = ref_img
        mni_seg.inputs.reference_image = ref_img
        mni_tpms.inputs.reference_image = ref_img

        workflow.connect([
            (skullstrip_wf, t1_2_mni, [('outputnode.bias_corrected',
                                        'moving_image')]),
            (skullstrip_wf, t1_2_mni, [('outputnode.out_mask', 'moving_mask')
                                       ]),
            (skullstrip_wf, mni_mask, [('outputnode.out_mask', 'input_image')
                                       ]),
            (t1_2_mni, mni_mask, [('composite_transform', 'transforms')]),
            (t1_seg, mni_seg, [('tissue_class_map', 'input_image')]),
            (t1_2_mni, mni_seg, [('composite_transform', 'transforms')]),
            (t1_seg, mni_tpms, [('probability_maps', 'input_image')]),
            (t1_2_mni, mni_tpms, [('composite_transform', 'transforms')]),
            (t1_2_mni, outputnode,
             [('warped_image', 't1_2_mni'),
              ('composite_transform', 't1_2_mni_forward_transform'),
              ('inverse_composite_transform', 't1_2_mni_reverse_transform')]),
            (mni_mask, outputnode, [('output_image', 'mni_mask')]),
            (mni_seg, outputnode, [('output_image', 'mni_seg')]),
            (mni_tpms, outputnode, [('output_image', 'mni_tpms')]),
        ])

    # 6. FreeSurfer reconstruction
    if freesurfer:
        surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf',
                                                 omp_nthreads=omp_nthreads,
                                                 hires=hires)

        workflow.connect([
            (inputnode, summary, [('subjects_dir', 'subjects_dir')]),
            (bids_info, summary, [('subject_id', 'subject_id')]),
            (inputnode, surface_recon_wf, [('t2w', 'inputnode.t2w'),
                                           ('subjects_dir',
                                            'inputnode.subjects_dir')]),
            (summary, surface_recon_wf, [('subject_id', 'inputnode.subject_id')
                                         ]),
            (t1_merge, surface_recon_wf, [('out_file', 'inputnode.t1w')]),
            (skullstrip_wf, surface_recon_wf,
             [('outputnode.out_file', 'inputnode.skullstripped_t1')]),
            (surface_recon_wf, outputnode,
             [('outputnode.subjects_dir', 'subjects_dir'),
              ('outputnode.subject_id', 'subject_id'),
              ('outputnode.fs_2_t1_transform', 'fs_2_t1_transform'),
              ('outputnode.surfaces', 'surfaces')]),
        ])

    anat_reports_wf = init_anat_reports_wf(reportlets_dir=reportlets_dir,
                                           skull_strip_ants=skull_strip_ants,
                                           output_spaces=output_spaces,
                                           template=template,
                                           freesurfer=freesurfer)
    workflow.connect([
        (inputnode, anat_reports_wf, [(('t1w', fix_multi_T1w_source_name),
                                       'inputnode.source_file')]),
        (t1_seg, anat_reports_wf, [('out_report', 'inputnode.t1_seg_report')]),
        (summary, anat_reports_wf, [('out_report', 'inputnode.summary_report')
                                    ]),
    ])

    if skull_strip_ants:
        workflow.connect([(skullstrip_wf, anat_reports_wf, [
            ('outputnode.out_report', 'inputnode.t1_skull_strip_report')
        ])])
    if freesurfer:
        workflow.connect([(surface_recon_wf, anat_reports_wf, [
            ('outputnode.out_report', 'inputnode.recon_report')
        ])])
    if 'template' in output_spaces:
        workflow.connect([
            (t1_2_mni, anat_reports_wf, [('out_report',
                                          'inputnode.t1_2_mni_report')]),
        ])

    anat_derivatives_wf = init_anat_derivatives_wf(output_dir=output_dir,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   freesurfer=freesurfer)

    workflow.connect([
        (inputnode, anat_derivatives_wf, [(('t1w', fix_multi_T1w_source_name),
                                           'inputnode.source_file')]),
        (outputnode, anat_derivatives_wf, [
            ('t1_preproc', 'inputnode.t1_preproc'),
            ('t1_mask', 'inputnode.t1_mask'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('t1_tpms', 'inputnode.t1_tpms'),
            ('t1_2_mni_forward_transform',
             'inputnode.t1_2_mni_forward_transform'),
            ('t1_2_mni', 'inputnode.t1_2_mni'),
            ('mni_mask', 'inputnode.mni_mask'),
            ('mni_seg', 'inputnode.mni_seg'),
            ('mni_tpms', 'inputnode.mni_tpms'),
            ('surfaces', 'inputnode.surfaces'),
        ]),
    ])

    return workflow
Пример #26
0
def init_bold_confs_wf(mem_gb, use_aroma, ignore_aroma_err, metadata,
                       name="bold_confs_wf"):
    """
    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.

    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``CSF``, ``WhiteMatter``, ``GlobalSignal``)
    #. DVARS - standard, nonstandard, and voxel-wise standard variants
       (``stdDVARS``, ``non-stdDVARS``, ``vx-wisestdDVARS``)
    #. Framewise displacement, based on MCFLIRT motion parameters
       (``FramewiseDisplacement``)
    #. Temporal CompCor (``tCompCorXX``)
    #. Anatomical CompCor (``aCompCorXX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``CosineXX``)
    #. Non-steady-state volumes (``NonSteadyStateXX``)
    #. Estimated head-motion parameters, in mm and rad
       (``X``, ``Y``, ``Z``, ``RotX``, ``RotY``, ``RotZ``)
    #. ICA-AROMA-identified noise components, if enabled
       (``AROMAAggrCompXX``)

    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_bold_confs_wf
        wf = init_bold_confs_wf(
            mem_gb=1,
            use_aroma=True,
            ignore_aroma_err=True,
            metadata={})

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        use_aroma : bool
            Perform ICA-AROMA on MNI-resampled functional series
        ignore_aroma_err : bool
            Do not fail on ICA-AROMA errors
        metadata : dict
            BIDS metadata for BOLD file

    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        movpar_file
            SPM-formatted motion parameters file
        t1_mask
            Mask of the skull-stripped template image
        t1_tpms
            List of tissue probability maps in T1w space
        t1_bold_xform
            Affine matrix that maps the T1w space into alignment with
            the native BOLD space
        bold_mni
            BOLD image resampled in MNI space (only if ``use_aroma`` enabled)
        bold_mask_mni
            Brain mask corresponding to the BOLD image resampled in MNI space
            (only if ``use_aroma`` enabled)

    **Outputs**

        confounds_file
            TSV of all aggregated confounds
        confounds_list
            List of calculated confounds for reporting
        acompcor_report
            Reportlet visualizing white-matter/CSF mask used for aCompCor
        tcompcor_report
            Reportlet visualizing ROI identified in tCompCor
        ica_aroma_report
            Reportlet visualizing MELODIC ICs, with ICA-AROMA signal/noise labels
        aroma_noise_ics
            CSV of noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        nonaggr_denoised_file
            BOLD series with non-aggressive ICA-AROMA denoising applied

    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`

    """

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bold', 'bold_mask', 'movpar_file', 't1_mask', 't1_tpms',
                't1_bold_xform', 'bold_mni', 'bold_mask_mni']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['confounds_file', 'confounds_list', 'rois_report', 'ica_aroma_report',
                'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file']),
        name='outputnode')

    # Get masks ready in T1w space
    acc_tpm = pe.Node(AddTPMs(indices=[0, 2]), name='tpms_add_csf_wm')  # acc stands for aCompCor
    csf_roi = pe.Node(TPM2ROI(erode_mm=0, mask_erode_mm=30), name='csf_roi')
    wm_roi = pe.Node(TPM2ROI(
        erode_prop=0.6, mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='wm_roi')
    acc_roi = pe.Node(TPM2ROI(
        erode_prop=0.6, mask_erode_prop=0.6**3),  # 0.6 = radius; 0.6^3 = volume
        name='acc_roi')

    # Map ROIs in T1w space into BOLD space
    csf_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='csf_tfm', mem_gb=0.1)
    wm_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                     name='wm_tfm', mem_gb=0.1)
    acc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='acc_tfm', mem_gb=0.1)
    tcc_tfm = pe.Node(ApplyTransforms(interpolation='NearestNeighbor', float=True),
                      name='tcc_tfm', mem_gb=0.1)

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_all=True, remove_zerovariance=True),
                    name="dvars", mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp", mem_gb=mem_gb)

    # a/t-CompCor
    non_steady_state = pe.Node(nac.NonSteadyStateDetector(), name='non_steady_state')
    tcompcor = pe.Node(nac.TCompCor(
        components_file='tcompcor.tsv', pre_filter='cosine', save_pre_filter=True,
        percentile_threshold=.05), name="tcompcor", mem_gb=mem_gb)

    acompcor = pe.Node(nac.ACompCor(
        components_file='acompcor.tsv', pre_filter='cosine', save_pre_filter=True),
        name="acompcor", mem_gb=mem_gb)

    # Set TR if present
    if 'RepetitionTime' in metadata:
        tcompcor.inputs.repetition_time = metadata['RepetitionTime']
        acompcor.inputs.repetition_time = metadata['RepetitionTime']

    # Global and segment regressors
    mrg_lbl = pe.Node(niu.Merge(3), name='merge_rois', run_without_submitting=True)
    signals = pe.Node(SignalExtraction(
        detrend=True, class_labels=["CSF", "WhiteMatter", "GlobalSignal"]),
        name="signals", mem_gb=mem_gb)

    # Arrange confounds
    add_header = pe.Node(AddTSVHeader(columns=["X", "Y", "Z", "RotX", "RotY", "RotZ"]),
                         name="add_header", mem_gb=0.01, run_without_submitting=True)
    concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True)

    # Generate reportlet
    mrg_compcor = pe.Node(niu.Merge(2), name='merge_compcor', run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(compress_report=True, colors=['r', 'b', 'magenta'],
                        generate_report=True), name='rois_plot')

    def _pick_csf(files):
        return files[0]

    def _pick_wm(files):
        return files[-1]

    workflow = pe.Workflow(name=name)
    workflow.connect([
        # Massage ROIs (in T1w space)
        (inputnode, acc_tpm, [('t1_tpms', 'in_files')]),
        (inputnode, csf_roi, [(('t1_tpms', _pick_csf), 'in_tpm'),
                              ('t1_mask', 'in_mask')]),
        (inputnode, wm_roi, [(('t1_tpms', _pick_wm), 'in_tpm'),
                             ('t1_mask', 'in_mask')]),
        (inputnode, acc_roi, [('t1_mask', 'in_mask')]),
        (acc_tpm, acc_roi, [('out_file', 'in_tpm')]),
        # Map ROIs to BOLD
        (inputnode, csf_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, csf_tfm, [('roi_file', 'input_image')]),
        (inputnode, wm_tfm, [('bold_mask', 'reference_image'),
                             ('t1_bold_xform', 'transforms')]),
        (wm_roi, wm_tfm, [('roi_file', 'input_image')]),
        (inputnode, acc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (acc_roi, acc_tfm, [('roi_file', 'input_image')]),
        (inputnode, tcc_tfm, [('bold_mask', 'reference_image'),
                              ('t1_bold_xform', 'transforms')]),
        (csf_roi, tcc_tfm, [('eroded_mask', 'input_image')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'),
                            ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # Calculate nonsteady state
        (inputnode, non_steady_state, [('bold', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (non_steady_state, tcompcor, [('n_volumes_to_discard', 'ignore_initial_volumes')]),
        (tcc_tfm, tcc_msk, [('output_image', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (non_steady_state, acompcor, [('n_volumes_to_discard', 'ignore_initial_volumes')]),
        (acc_tfm, acc_msk, [('output_image', 'roi_file')]),
        (acc_msk, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (csf_tfm, csf_msk, [('output_image', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (wm_tfm, wm_msk, [('output_image', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_header, [('movpar_file', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (dvars, concat, [('out_all', 'dvars')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_header, concat, [('out_file', 'motion')]),

        # Set outputs
        (concat, outputnode, [('confounds_file', 'confounds_file'),
                              ('confounds_list', 'confounds_list')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, outputnode, [('out_report', 'rois_report')]),
    ])

    if use_aroma:
        # ICA-AROMA
        ica_aroma_wf = init_ica_aroma_wf(name='ica_aroma_wf',
                                         ignore_aroma_err=ignore_aroma_err)
        workflow.connect([
            (inputnode, ica_aroma_wf, [('bold_mni', 'inputnode.bold_mni'),
                                       ('bold_mask_mni', 'inputnode.bold_mask_mni'),
                                       ('movpar_file', 'inputnode.movpar_file')]),
            (ica_aroma_wf, concat,
                [('outputnode.aroma_confounds', 'aroma')]),
            (ica_aroma_wf, outputnode,
                [('outputnode.out_report', 'ica_aroma_report'),
                 ('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                 ('outputnode.melodic_mix', 'melodic_mix'),
                 ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')])
        ])
    return workflow
Пример #27
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(['subjects_dir', 'subject_id']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']),
                         name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(MakeMidthickness(thickness=True,
                                               distance=0.5,
                                               out_name='midthickness'),
                              iterfield='in_file',
                              name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list',
                           run_without_submitting=True)
    fs_2_gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                          iterfield='in_file',
                          name='fs_2_gii')

    def normalize_surfs(in_file):
        """ Re-center GIFTI coordinates to fit align to native T1 space

        For midthickness surfaces, add MidThickness metadata

        Coordinate update based on:
        https://github.com/Washington-University/workbench/blob/1b79e56/src/Algorithms/AlgorithmSurfaceApplyAffine.cxx#L73-L91
        and
        https://github.com/Washington-University/Pipelines/blob/ae69b9a/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L147
        """
        import os
        import numpy as np
        import nibabel as nib
        img = nib.load(in_file)
        pointset = img.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0]
        coords = pointset.data
        c_ras_keys = ('VolGeomC_R', 'VolGeomC_A', 'VolGeomC_S')
        ras = np.array([float(pointset.metadata[key]) for key in c_ras_keys])
        # Apply C_RAS translation to coordinates
        pointset.data = (coords + ras).astype(coords.dtype)

        secondary = nib.gifti.GiftiNVPairs('AnatomicalStructureSecondary',
                                           'MidThickness')
        geom_type = nib.gifti.GiftiNVPairs('GeometricType', 'Anatomical')
        has_ass = has_geo = False
        for nvpair in pointset.meta.data:
            # Remove C_RAS translation from metadata to avoid double-dipping in FreeSurfer
            if nvpair.name in c_ras_keys:
                nvpair.value = '0.000000'
            # Check for missing metadata
            elif nvpair.name == secondary.name:
                has_ass = True
            elif nvpair.name == geom_type.name:
                has_geo = True
        fname = os.path.basename(in_file)
        # Update metadata for MidThickness/graymid surfaces
        if 'midthickness' in fname.lower() or 'graymid' in fname.lower():
            if not has_ass:
                pointset.meta.data.insert(1, secondary)
            if not has_geo:
                pointset.meta.data.insert(2, geom_type)
        img.to_filename(fname)
        return os.path.abspath(fname)

    fix_surfs = pe.MapNode(niu.Function(function=normalize_surfs),
                           iterfield='in_file',
                           name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'), ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs_2_gii, [('out', 'in_file')]),
        (fs_2_gii, fix_surfs, [('converted', 'in_file')]),
        (fix_surfs, outputnode, [('out', 'surfaces')]),
    ])

    return workflow
Пример #28
0
def init_ica_aroma_wf(name='ica_aroma_wf', ignore_aroma_err=False):
    '''
    This workflow wraps `ICA-AROMA`_ to identify and remove motion-related
    independent components from a BOLD time series.

    The following steps are performed:

    #. Smooth data using SUSAN
    #. Run MELODIC outside of ICA-AROMA to generate the report
    #. Run ICA-AROMA
    #. Aggregate identified motion components (aggressive) to TSV
    #. Return classified_motion_ICs and melodic_mix for user to complete
        non-aggressive denoising in T1w space

    Additionally, non-aggressive denoising is performed on the BOLD series
    resampled into MNI space.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_ica_aroma_wf
        wf = init_ica_aroma_wf()

    **Parameters**

        ignore_aroma_err : bool
            Do not fail on ICA-AROMA errors

    **Inputs**

        bold_mni
            BOLD series, resampled to template space
        movpar_file
            SPM-formatted motion parameters file
        bold_mask_mni
            BOLD series mask in template space

    **Outputs**

        aroma_confounds
            TSV of confounds identified as noise by ICA-AROMA
        aroma_noise_ics
            CSV of noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        nonaggr_denoised_file
            BOLD series with non-aggressive ICA-AROMA denoising applied
        out_report
            Reportlet visualizing MELODIC ICs, with ICA-AROMA signal/noise labels

    .. _ICA-AROMA: https://github.com/rhr-pruim/ICA-AROMA
    '''
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bold_mni', 'movpar_file', 'bold_mask_mni']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['aroma_confounds', 'out_report',
                'aroma_noise_ics', 'melodic_mix',
                'nonaggr_denoised_file']), name='outputnode')

    calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'), name='calc_median_val')
    calc_bold_mean = pe.Node(fsl.MeanImage(), name='calc_bold_mean')

    def getusans_func(image, thresh):
        return [tuple([image, thresh])]
    getusans = pe.Node(niu.Function(function=getusans_func, output_names=['usans']),
                       name='getusans', mem_gb=0.01)

    smooth = pe.Node(fsl.SUSAN(fwhm=6.0), name='smooth')

    # melodic node
    melodic = pe.Node(fsl.MELODIC(no_bet=True, no_mm=True), name="melodic")

    # ica_aroma node
    ica_aroma = pe.Node(ICA_AROMARPT(denoise_type='nonaggr', generate_report=True),
                        name='ica_aroma')

    # extract the confound ICs from the results
    ica_aroma_confound_extraction = pe.Node(ICAConfounds(ignore_aroma_err=ignore_aroma_err),
                                            name='ica_aroma_confound_extraction')

    def _getbtthresh(medianval):
        return 0.75 * medianval

    # connect the nodes
    workflow.connect([
        # Connect input nodes to complete smoothing
        (inputnode, calc_median_val, [('bold_mni', 'in_file'),
                                      ('bold_mask_mni', 'mask_file')]),
        (inputnode, calc_bold_mean, [('bold_mni', 'in_file')]),
        (calc_bold_mean, getusans, [('out_file', 'image')]),
        (calc_median_val, getusans, [('out_stat', 'thresh')]),
        (inputnode, smooth, [('bold_mni', 'in_file')]),
        (getusans, smooth, [('usans', 'usans')]),
        (calc_median_val, smooth, [(('out_stat', _getbtthresh), 'brightness_threshold')]),
        # connect smooth to melodic
        (smooth, melodic, [('smoothed_file', 'in_files')]),
        (inputnode, melodic, [('bold_mask_mni', 'mask')]),
        # connect nodes to ICA-AROMA
        (smooth, ica_aroma, [('smoothed_file', 'in_file')]),
        (inputnode, ica_aroma, [('bold_mask_mni', 'report_mask'),
                                ('movpar_file', 'motion_parameters')]),
        (melodic, ica_aroma, [('out_dir', 'melodic_dir')]),
        # generate tsvs from ICA-AROMA
        (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'in_directory')]),
        # output for processing and reporting
        (ica_aroma_confound_extraction, outputnode, [('aroma_confounds', 'aroma_confounds'),
                                                     ('aroma_noise_ics', 'aroma_noise_ics'),
                                                     ('melodic_mix', 'melodic_mix')]),
        # TODO change melodic report to reflect noise and non-noise components
        (ica_aroma, outputnode, [('out_report', 'out_report'),
                                 ('nonaggr_denoised_file', 'nonaggr_denoised_file')]),
    ])

    return workflow
Пример #29
0
def init_bold_surf_wf(mem_gb,
                      output_spaces,
                      medial_surface_nan,
                      name='bold_surf_wf'):
    """
    This workflow samples functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_surf_wf
        wf = init_bold_surf_wf(mem_gb=0.1,
                               output_spaces=['T1w', 'fsnative',
                                             'template', 'fsaverage5'],
                               medial_surface_nan=False)

    **Parameters**

        output_spaces : list
            List of output spaces functional images are to be resampled to
            Target spaces beginning with ``fs`` will be selected for resampling,
            such as ``fsaverage`` or related template spaces
            If the list contains ``fsnative``, images will be resampled to the
            individual subject's native surface
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files

    **Inputs**

        source_file
            Motion-corrected BOLD series in T1 space
        t1_preproc
            Bias-corrected structural template image
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    **Outputs**

        surfaces
            BOLD series, resampled to FreeSurfer surfaces

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 'subject_id', 'subjects_dir',
        't1_2_fsnative_forward_transform'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']),
                         name='outputnode')

    spaces = [space for space in output_spaces if space.startswith('fs')]

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'],
                         name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s',
                                       keep_ext=True),
                            iterfield='subject',
                            name='rename_src',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(fs.utils.LTAConvert(in_lta='identity.nofile',
                                                 out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'),
                             name='set_xfm_source')

    sampler = pe.MapNode(fs.SampleToSurface(sampling_method='average',
                                            sampling_range=(0, 1, 0.2),
                                            sampling_units='frac',
                                            interp_method='trilinear',
                                            cortex_mask=True,
                                            override_reg_subj=True,
                                            out_type='gii'),
                         iterfield=['source_file', 'target_subject'],
                         iterables=('hemi', ['lh', 'rh']),
                         name='sampler',
                         mem_gb=mem_gb * 3)

    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file', 'target_subject'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'],
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield='in_file',
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1_2_fsnative_forward_transform',
                                      'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if medial_surface_nan:
        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (sampler, medial_nans, [('out_file', 'in_file')]),
            (targets, medial_nans, [('out', 'target_subject')]),
            (medial_nans, merger, [('out', 'in1')]),
        ])
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
Пример #30
0
def init_discover_wf(bold_file_size_gb,
                     use_aroma,
                     ignore_aroma_err,
                     name="discover_wf"):
    ''' All input fields are required.

    Calculates global regressor and tCompCor
        from motion-corrected fMRI ('inputnode.fmri_file').
    Calculates DVARS from the fMRI and an EPI brain mask ('inputnode.epi_mask')
    Calculates frame displacement from MCFLIRT movement parameters ('inputnode.movpar_file')
    Calculates segment regressors and aCompCor
        from the fMRI and a white matter/gray matter/CSF segmentation ('inputnode.t1_seg'), after
        applying the transform to the images. Transforms should be fsl-formatted.
    Calculates noise components identified from ICA_AROMA (if ``use_aroma=True``)
    Saves the confounds in a file ('outputnode.confounds_file')'''

    inputnode = pe.Node(utility.IdentityInterface(fields=[
        'fmri_file', 'movpar_file', 't1_tpms', 'epi_mask', 'epi_mni',
        'epi_mask_mni'
    ]),
                        name='inputnode')
    outputnode = pe.Node(utility.IdentityInterface(fields=[
        'confounds_file', 'acompcor_report', 'tcompcor_report',
        'ica_aroma_report', 'aroma_noise_ics', 'melodic_mix'
    ]),
                         name='outputnode')

    # ICA-AROMA
    if use_aroma:
        ica_aroma_wf = init_ica_aroma_wf(name='ica_aroma_wf',
                                         ignore_aroma_err=ignore_aroma_err)

    # DVARS
    dvars = pe.Node(confounds.ComputeDVARS(save_all=True,
                                           remove_zerovariance=True),
                    name="dvars")
    dvars.interface.estimated_memory_gb = bold_file_size_gb * 3
    # Frame displacement
    frame_displace = pe.Node(
        confounds.FramewiseDisplacement(parameter_source="SPM"),
        name="frame_displace")
    frame_displace.interface.estimated_memory_gb = bold_file_size_gb * 3
    # CompCor
    tcompcor = pe.Node(TCompCorRPT(components_file='tcompcor.tsv',
                                   generate_report=True,
                                   percentile_threshold=.05),
                       name="tcompcor")
    tcompcor.interface.estimated_memory_gb = bold_file_size_gb * 3

    CSF_roi = pe.Node(utility.Function(
        function=prepare_roi_from_probtissue,
        output_names=['roi_file', 'eroded_mask']),
                      name='CSF_roi')
    CSF_roi.inputs.erosion_mm = 0
    CSF_roi.inputs.epi_mask_erosion_mm = 30

    WM_roi = pe.Node(utility.Function(function=prepare_roi_from_probtissue,
                                      output_names=['roi_file',
                                                    'eroded_mask']),
                     name='WM_roi')
    WM_roi.inputs.erosion_mm = 6
    WM_roi.inputs.epi_mask_erosion_mm = 10

    def concat_rois_func(in_WM, in_mask, ref_header):
        import os
        import nibabel as nb
        from nilearn.image import resample_to_img

        WM_nii = nb.load(in_WM)
        mask_nii = nb.load(in_mask)

        # we have to do this explicitly because of potential differences in
        # qform_code between the two files that prevent SignalExtraction to do
        # the concatenation
        concat_nii = nb.funcs.concat_images([
            resample_to_img(WM_nii, mask_nii, interpolation='nearest'),
            mask_nii
        ])
        concat_nii = nb.Nifti1Image(concat_nii.get_data(),
                                    nb.load(ref_header).affine,
                                    nb.load(ref_header).header)
        concat_nii.to_filename("concat.nii.gz")
        return os.path.abspath("concat.nii.gz")

    concat_rois = pe.Node(utility.Function(function=concat_rois_func),
                          name='concat_rois')

    # Global and segment regressors
    signals = pe.Node(SignalExtraction(
        detrend=True, class_labels=["WhiteMatter", "GlobalSignal"]),
                      name="signals")
    signals.interface.estimated_memory_gb = bold_file_size_gb * 3

    def combine_rois(in_CSF, in_WM, ref_header):
        import os
        import numpy as np
        import nibabel as nb

        CSF_nii = nb.load(in_CSF)
        CSF_data = CSF_nii.get_data()

        WM_nii = nb.load(in_WM)
        WM_data = WM_nii.get_data()

        combined = np.zeros_like(WM_data)

        combined[WM_data != 0] = 1
        combined[CSF_data != 0] = 1

        # we have to do this explicitly because of potential differences in
        # qform_code between the two files that prevent aCompCor to work
        new_nii = nb.Nifti1Image(combined,
                                 nb.load(ref_header).affine,
                                 nb.load(ref_header).header)
        new_nii.to_filename("logical_or.nii.gz")
        return os.path.abspath("logical_or.nii.gz")

    combine_rois = pe.Node(utility.Function(function=combine_rois),
                           name='combine_rois')

    acompcor = pe.Node(ACompCorRPT(components_file='acompcor.tsv',
                                   generate_report=True),
                       name="acompcor")
    acompcor.interface.estimated_memory_gb = bold_file_size_gb * 3

    # misc utilities
    concat = pe.Node(utility.Function(function=_gather_confounds),
                     name="concat")

    def pick_csf(files):
        return files[0]

    def pick_wm(files):
        return files[2]

    def add_header_func(in_file):
        import numpy as np
        import pandas as pd
        import os
        from sys import version_info
        PY3 = version_info[0] > 2

        data = np.loadtxt(in_file)

        df = pd.DataFrame(data,
                          columns=["X", "Y", "Z", "RotX", "RotY", "RotZ"])
        df.to_csv("motion.tsv", sep="\t" if PY3 else '\t'.encode(), index=None)

        return os.path.abspath("motion.tsv")

    add_header = pe.Node(utility.Function(function=add_header_func),
                         name="add_header")

    workflow = pe.Workflow(name=name)
    workflow.connect([
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('fmri_file', 'in_file'),
                            ('epi_mask', 'in_mask')]),
        (inputnode, frame_displace, [('movpar_file', 'in_file')]),
        (inputnode, tcompcor, [('fmri_file', 'realigned_file')]),
        (inputnode, CSF_roi, [(('t1_tpms', pick_csf), 'in_file')]),
        (inputnode, CSF_roi, [('epi_mask', 'epi_mask')]),
        (CSF_roi, tcompcor, [('eroded_mask', 'mask_files')]),
        (inputnode, WM_roi, [(('t1_tpms', pick_wm), 'in_file')]),
        (inputnode, WM_roi, [('epi_mask', 'epi_mask')]),
        (CSF_roi, combine_rois, [('roi_file', 'in_CSF')]),
        (WM_roi, combine_rois, [('roi_file', 'in_WM')]),
        (inputnode, combine_rois, [('fmri_file', 'ref_header')]),

        # anatomical confound: aCompCor.
        (inputnode, acompcor, [('fmri_file', 'realigned_file')]),
        (combine_rois, acompcor, [('out', 'mask_files')]),
        (WM_roi, concat_rois, [('roi_file', 'in_WM')]),
        (inputnode, concat_rois, [('epi_mask', 'in_mask')]),
        (inputnode, concat_rois, [('fmri_file', 'ref_header')]),

        # anatomical confound: signal extraction
        (concat_rois, signals, [('out', 'label_files')]),
        (inputnode, signals, [('fmri_file', 'in_file')]),

        # connect the confound nodes to the concatenate node
        (signals, concat, [('out_file', 'signals')]),
        (dvars, concat, [('out_all', 'dvars')]),
        (frame_displace, concat, [('out_file', 'frame_displace')]),
        (tcompcor, concat, [('components_file', 'tcompcor')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (inputnode, add_header, [('movpar_file', 'in_file')]),
        (add_header, concat, [('out', 'motion')]),
        (concat, outputnode, [('out', 'confounds_file')]),
        (acompcor, outputnode, [('out_report', 'acompcor_report')]),
        (tcompcor, outputnode, [('out_report', 'tcompcor_report')]),
    ])
    if use_aroma:
        workflow.connect([(inputnode, ica_aroma_wf, [
            ('epi_mni', 'inputnode.epi_mni'),
            ('epi_mask_mni', 'inputnode.epi_mask_mni'),
            ('movpar_file', 'inputnode.movpar_file')
        ]), (ica_aroma_wf, concat, [('outputnode.aroma_confounds', 'aroma')]),
                          (ica_aroma_wf, outputnode,
                           [('outputnode.out_report', 'ica_aroma_report'),
                            ('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                            ('outputnode.melodic_mix', 'melodic_mix')])])
    return workflow