Ejemplo n.º 1
0
def init_filt_reg_wf(name='filt_reg_wf', regfilt=None):
    inputnode = pe.Node(IdentityInterface(
        fields=['bold', 'bold_mask', 'MELODICmix', 'AROMAnoiseICs']),
                        name='inputnode')

    outputnode = pe.Node(IdentityInterface(fields=['bold_regfilt']),
                         name='outputnode')

    workflow = pe.Workflow(name=name)
    if regfilt:

        def csv_to_list(csv_f):
            import csv
            with open(csv_f) as f:
                reader = csv.reader(f, delimiter=str(','))
                mlist = list(reader)[0]
            return [int(x) for x in mlist]

        filter_regressor = pe.Node(FilterRegressor(), name='filter_regressor')
        workflow.connect([
            (inputnode, filter_regressor, [('bold', 'in_file'),
                                           ('bold_mask', 'mask'),
                                           ('MELODICmix', 'design_file'),
                                           (('AROMAnoiseICs', csv_to_list),
                                            'filter_columns')]),
            (filter_regressor, outputnode, [('out_file', 'bold_regfilt')]),
        ])
    else:
        workflow.connect([
            (inputnode, outputnode, [('bold', 'bold_regfilt')]),
        ])

    return workflow
Ejemplo n.º 2
0
def init_skullstrip_watershed_wf(debug, name='skullstrip_watershed_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'source_file']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bias_corrected', 'out_file', 'out_mask']),
                         name='outputnode')

    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                                    copy_header=True),
                         name='n4_correct')

    t1_skull_strip = pe.Node(fs.WatershedSkullStrip(), name='t1_skull_strip')

    create_mask = pe.Node(fs.Binarize(min=0.0, dilate=0, out_type='.nii.gz'),
                          name='create_mask')

    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')

    workflow.connect([
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (n4_correct, t1_skull_strip, [('bias_corrected', 'in_file')]),
        (n4_correct, outputnode, [('bias_corrected', 'bias_corrected')]),
        (t1_skull_strip, create_mask, [('out_file', 'in_file')]),
        (create_mask, apply_mask, [('binary_file', 'mask_file')]),
        (inputnode, apply_mask, [('in_file', 'in_file')]),
        (create_mask, outputnode, [('binary_file', 'out_mask')]),
        (apply_mask, outputnode, [('out_file', 'out_file')])
    ])

    return workflow
Ejemplo n.º 3
0
def init_anat_reports_wf(reportlets_dir,
                         skull_strip_ants,
                         output_spaces,
                         template,
                         freesurfer,
                         name='anat_reports_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'summary_report', 't1_seg_report', 't1_2_mni_report',
        't1_skull_strip_report', 'recon_report'
    ]),
                        name='inputnode')

    ds_summary_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='summary'),
                                name='ds_summary_report',
                                run_without_submitting=True)

    ds_t1_seg_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_seg'),
                               name='ds_t1_seg_report',
                               run_without_submitting=True)

    ds_t1_2_mni_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_2_mni'),
                                 name='ds_t1_2_mni_report',
                                 run_without_submitting=True)

    ds_t1_skull_strip_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='t1_skull_strip'),
                                       name='ds_t1_skull_strip_report',
                                       run_without_submitting=True)

    ds_recon_report = pe.Node(DerivativesDataSink(
        base_directory=reportlets_dir, suffix='reconall'),
                              name='ds_recon_report',
                              run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_summary_report, [('source_file', 'source_file'),
                                        ('summary_report', 'in_file')]),
        (inputnode, ds_t1_seg_report, [('source_file', 'source_file'),
                                       ('t1_seg_report', 'in_file')]),
    ])

    if skull_strip_ants:
        workflow.connect([(inputnode, ds_t1_skull_strip_report,
                           [('source_file', 'source_file'),
                            ('t1_skull_strip_report', 'in_file')])])
    if freesurfer:
        workflow.connect([(inputnode, ds_recon_report,
                           [('source_file', 'source_file'),
                            ('recon_report', 'in_file')])])
    if 'template' in output_spaces:
        workflow.connect([(inputnode, ds_t1_2_mni_report,
                           [('source_file', 'source_file'),
                            ('t1_2_mni_report', 'in_file')])])

    return workflow
Ejemplo n.º 4
0
def avgb0_wf(name='AvgB0Workflow'):
    """
    Implements workflow to calculate the average of dwi b0 images.

    .. workflow::

        from mriqc.workflows.diffusion import avgb0_wf
        wf = avgb0_wf

    """

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_files']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                            name='outputnode')

    merge = pe.Node(fsl.Merge(dimension='t'), name='Merge')

    avg = pe.Node(fsl.maths.MeanImage(), name='AvgImage')

    workflow.connect([
        (inputnode, merge, [('in_files', 'in_files')]),
        (merge, avg, [('merged_file', 'in_file')]),
        (avg, outputnode, [('out_file', 'out_file')])
    ])
    return workflow
Ejemplo n.º 5
0
def init_fsl_bbr_wf(bold2t1w_dof, report, name='fsl_bbr_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fs_2_t1_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_matrix_file', 'out_reg_file', 'out_report', 'final_cost']),
                         name='outputnode')

    wm_mask = pe.Node(niu.Function(function=extract_wm), name='wm_mask')
    _FLIRT = FLIRTRPT if report else fsl.FLIRT
    flt_bbr_init = pe.Node(fsl.FLIRT(dof=6), name='flt_bbr_init')
    flt_bbr = pe.Node(_FLIRT(cost_func='bbr', dof=bold2t1w_dof, save_log=True),
                      name='flt_bbr')
    flt_bbr.inputs.schedule = op.join(os.getenv('FSLDIR'),
                                      'etc/flirtsch/bbr.sch')

    def get_final_cost(in_file):
        from niworkflows.nipype import logging
        with open(in_file, 'r') as fobj:
            for line in fobj:
                if line.startswith('>> print U:1'):
                    costs = next(fobj).split()
                    return float(costs[0])
        logger = logging.getLogger('interface')
        logger.error('No cost report found in log file. Please report this '
                     'issue, with contents of {}'.format(in_file))

    get_cost = pe.Node(niu.Function(function=get_final_cost),
                       name='get_cost',
                       run_without_submitting=True)

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_seg')]),
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1_brain', 'reference')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (inputnode, flt_bbr, [('in_file', 'in_file'),
                              ('t1_brain', 'reference')]),
        (wm_mask, flt_bbr, [('out', 'wm_seg')]),
        (flt_bbr, outputnode, [('out_matrix_file', 'out_matrix_file')]),
        (flt_bbr, get_cost, [('out_log', 'in_file')]),
        (get_cost, outputnode, [('out', 'final_cost')]),
    ])

    if report:
        flt_bbr.inputs.generate_report = True
        workflow.connect([(flt_bbr, outputnode, [('out_report', 'out_report')])
                          ])

    return workflow
Ejemplo n.º 6
0
def headmsk_wf(name='HeadMaskWorkflow', use_bet=True):
    """
    Computes a head mask as in [Mortamet2009]_.

    .. workflow::

        from mriqc.workflows.anatomical import headmsk_wf
        wf = headmsk_wf()

    """

    has_dipy = False
    try:
        from dipy.denoise import nlmeans
        has_dipy = True
    except ImportError:
        pass

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'in_segm']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), name='outputnode')

    if use_bet or not has_dipy:
        # Alternative for when dipy is not installed
        bet = pe.Node(fsl.BET(surfaces=True), name='fsl_bet')
        workflow.connect([
            (inputnode, bet, [('in_file', 'in_file')]),
            (bet, outputnode, [('outskin_mask_file', 'out_file')])
        ])

    else:
        from niworkflows.nipype.interfaces.dipy import Denoise
        enhance = pe.Node(niu.Function(
            input_names=['in_file'], output_names=['out_file'], function=_enhance), name='Enhance')
        estsnr = pe.Node(niu.Function(
            input_names=['in_file', 'seg_file'], output_names=['out_snr'],
            function=_estimate_snr), name='EstimateSNR')
        denoise = pe.Node(Denoise(), name='Denoise')
        gradient = pe.Node(niu.Function(
            input_names=['in_file', 'snr'], output_names=['out_file'], function=image_gradient), name='Grad')
        thresh = pe.Node(niu.Function(
            input_names=['in_file', 'in_segm'], output_names=['out_file'], function=gradient_threshold),
                         name='GradientThreshold')

        workflow.connect([
            (inputnode, estsnr, [('in_file', 'in_file'),
                                 ('in_segm', 'seg_file')]),
            (estsnr, denoise, [('out_snr', 'snr')]),
            (inputnode, enhance, [('in_file', 'in_file')]),
            (enhance, denoise, [('out_file', 'in_file')]),
            (estsnr, gradient, [('out_snr', 'snr')]),
            (denoise, gradient, [('out_file', 'in_file')]),
            (inputnode, thresh, [('in_segm', 'in_segm')]),
            (gradient, thresh, [('out_file', 'in_file')]),
            (thresh, outputnode, [('out_file', 'out_file')])
        ])

    return workflow
Ejemplo n.º 7
0
def init_fmriprep_wf(subject_list, task_id, run_uuid,
                     ignore, debug, anat_only, omp_nthreads,
                     skull_strip_ants, skull_strip_template, reportlets_dir, output_dir, bids_dir,
                     freesurfer, output_spaces, template, hires,
                     bold2t1w_dof, fmap_bspline, fmap_demean, use_syn, force_syn,
                     use_aroma, ignore_aroma_err, output_grid_ref,):
    fmriprep_wf = pe.Workflow(name='fmriprep_wf')

    if freesurfer:
        fsdir = pe.Node(
            BIDSFreeSurferDir(
                derivatives=output_dir,
                freesurfer_home=os.getenv('FREESURFER_HOME'),
                spaces=output_spaces),
            name='fsdir')

    for subject_id in subject_list:
        single_subject_wf = init_single_subject_wf(subject_id=subject_id,
                                                   task_id=task_id,
                                                   name="single_subject_" + subject_id + "_wf",
                                                   ignore=ignore,
                                                   debug=debug,
                                                   anat_only=anat_only,
                                                   omp_nthreads=omp_nthreads,
                                                   skull_strip_ants=skull_strip_ants,
                                                   skull_strip_template=skull_strip_template,
                                                   reportlets_dir=reportlets_dir,
                                                   output_dir=output_dir,
                                                   bids_dir=bids_dir,
                                                   freesurfer=freesurfer,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   hires=hires,
                                                   bold2t1w_dof=bold2t1w_dof,
                                                   fmap_bspline=fmap_bspline,
                                                   fmap_demean=fmap_demean,
                                                   use_syn=use_syn,
                                                   force_syn=force_syn,
                                                   output_grid_ref=output_grid_ref,
                                                   use_aroma=use_aroma,
                                                   ignore_aroma_err=ignore_aroma_err)

        single_subject_wf.config['execution']['crashdump_dir'] = (
            os.path.join(output_dir, "fmriprep", "sub-" + subject_id, 'log', run_uuid)
        )
        for node in single_subject_wf._get_all_nodes():
            node.config = deepcopy(single_subject_wf.config)
        if freesurfer:
            fmriprep_wf.connect(fsdir, 'subjects_dir',
                                single_subject_wf, 'inputnode.subjects_dir')
        else:
            fmriprep_wf.add_nodes([single_subject_wf])

    return fmriprep_wf
Ejemplo n.º 8
0
def spatial_normalization(settings,
                          mod='T1w',
                          name='SpatialNormalization',
                          resolution=2.0):
    """
    A simple workflow to perform spatial normalization

    """
    from niworkflows.data import getters as niwgetters

    # Have some settings handy
    tpl_id = settings.get('template_id', 'mni_icbm152_nlin_asym_09c')
    mni_template = getattr(niwgetters, 'get_{}'.format(tpl_id))()

    # Define workflow interface
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['moving_image', 'moving_mask']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['inverse_composite_transform', 'out_report']),
                         name='outputnode')

    # Spatial normalization
    norm = pe.Node(
        RobustMNINormalization(
            flavor='testing' if settings.get('testing', False) else 'fast',
            num_threads=settings.get('ants_nthreads'),
            float=settings.get('ants_float', False),
            template=tpl_id,
            template_resolution=2,
            reference=mod[:2],
            generate_report=True,
        ),
        name='SpatialNormalization',
        # Request all MultiProc processes when ants_nthreads > n_procs
        num_threads=min(
            settings.get('ants_nthreads', DEFAULTS['ants_nthreads']),
            settings.get('n_procs', 1)),
        estimated_memory_gb=3)
    norm.inputs.reference_mask = op.join(
        mni_template, '%dmm_brainmask.nii.gz' % int(resolution))

    workflow.connect([
        (inputnode, norm, [('moving_image', 'moving_image'),
                           ('moving_mask', 'moving_mask')]),
        (norm, outputnode, [('inverse_composite_transform',
                             'inverse_composite_transform'),
                            ('out_report', 'out_report')]),
    ])
    return workflow
Ejemplo n.º 9
0
def init_anat_reports_wf(reportlets_dir, output_spaces,
                         template, freesurfer, name='anat_reports_wf'):
    """
    Set up a battery of datasinks to store reports in the right location
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['source_file', 't1_conform_report', 'seg_report',
                    't1_2_mni_report', 'recon_report']),
        name='inputnode')

    ds_t1_conform_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir, suffix='conform'),
        name='ds_t1_conform_report', run_without_submitting=True)

    ds_t1_2_mni_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir, suffix='t1_2_mni'),
        name='ds_t1_2_mni_report', run_without_submitting=True)

    ds_t1_seg_mask_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir, suffix='seg_brainmask'),
        name='ds_t1_seg_mask_report', run_without_submitting=True)

    ds_recon_report = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir, suffix='reconall'),
        name='ds_recon_report', run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_t1_conform_report, [('source_file', 'source_file'),
                                           ('t1_conform_report', 'in_file')]),
        (inputnode, ds_t1_seg_mask_report, [('source_file', 'source_file'),
                                            ('seg_report', 'in_file')]),
    ])

    if freesurfer:
        workflow.connect([
            (inputnode, ds_recon_report, [('source_file', 'source_file'),
                                          ('recon_report', 'in_file')])
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_t1_2_mni_report, [('source_file', 'source_file'),
                                             ('t1_2_mni_report', 'in_file')])
        ])

    return workflow
Ejemplo n.º 10
0
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf',
                                        omp_nthreads=1):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'mask_file', 'skull_stripped_file', 'bias_corrected_file', 'out_report'
    ]),
                         name='outputnode')
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                                    copy_header=True,
                                                    num_threads=omp_nthreads),
                         name='n4_correct',
                         n_procs=omp_nthreads)
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    unifize = pe.Node(afni.Unifize(t2=True,
                                   outputtype='NIFTI_GZ',
                                   args='-clfrac 0.4',
                                   out_file="uni.nii.gz"),
                      name='unifize')
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
    mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')

    workflow.connect([
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
        (skullstrip_first_pass, unifize, [('out_file', 'in_file')]),
        (unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file')
                                                 ]),
        (unifize, apply_mask, [('out_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (n4_correct, mask_reportlet, [('output_image', 'background_file')]),
        (combine_masks, mask_reportlet, [('out_file', 'mask_file')]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        (mask_reportlet, outputnode, [('out_report', 'out_report')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
    ])

    return workflow
Ejemplo n.º 11
0
def airmsk_wf(name='AirMaskWorkflow'):
    """
    Implements the Step 1 of [Mortamet2009]_.

    .. workflow::

        from mriqc.workflows.anatomical import airmsk_wf
        wf = airmsk_wf()

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_mask', 'head_mask', 'inverse_composite_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'artifact_msk', 'rot_mask']),
        name='outputnode')

    rotmsk = pe.Node(RotationMask(), name='RotationMask')

    invt = pe.Node(ants.ApplyTransforms(dimension=3,
                                        default_value=0,
                                        interpolation='Linear',
                                        float=True),
                   name='invert_xfm')
    invt.inputs.input_image = op.join(get_mni_icbm152_nlin_asym_09c(),
                                      '1mm_headmask.nii.gz')

    binarize = pe.Node(niu.Function(function=_binarize), name='Binarize')

    qi1 = pe.Node(ArtifactMask(), name='ArtifactMask')

    workflow.connect([(inputnode, rotmsk, [('in_file', 'in_file')]),
                      (inputnode, qi1, [('in_file', 'in_file'),
                                        ('head_mask', 'head_mask')]),
                      (rotmsk, qi1, [('out_file', 'rot_mask')]),
                      (inputnode, invt, [('in_mask', 'reference_image'),
                                         ('inverse_composite_transform',
                                          'transforms')]),
                      (invt, binarize, [('output_image', 'in_file')]),
                      (binarize, qi1, [('out', 'nasion_post_mask')]),
                      (qi1, outputnode, [('out_air_msk', 'out_file'),
                                         ('out_art_msk', 'artifact_msk')]),
                      (rotmsk, outputnode, [('out_file', 'rot_mask')])])
    return workflow
Ejemplo n.º 12
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    """
    Extract surfaces from FreeSurfer derivatives folder and
    re-center GIFTI coordinates to align to native T1 space

    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(['subjects_dir', 'subject_id']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']), name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(
        MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'),
        iterfield='in_file',
        name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list', run_without_submitting=True)
    fs_2_gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                          iterfield='in_file', name='fs_2_gii')
    fix_surfs = pe.MapNode(NormalizeSurf(), iterfield='in_file', name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'),
                                      ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs_2_gii, [('out', 'in_file')]),
        (fs_2_gii, fix_surfs, [('converted', 'in_file')]),
        (fix_surfs, outputnode, [('out_file', 'surfaces')]),
    ])
    return workflow
Ejemplo n.º 13
0
def init_smooth_wf(name='smooth_wf', smooth=None):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(IdentityInterface(fields=['bold', 'bold_mask']),
                        name='inputnode')

    outputnode = pe.Node(IdentityInterface(fields=['bold_smooth']),
                         name='outputnode')

    if smooth:
        calc_median_val = pe.Node(ImageStats(op_string='-k %s -p 50'),
                                  name='calc_median_val')
        calc_bold_mean = pe.Node(MeanImage(), name='calc_bold_mean')

        def getusans_func(image, thresh):
            return [tuple([image, thresh])]

        def _getbtthresh(medianval):
            return 0.75 * medianval

        getusans = pe.Node(Function(function=getusans_func,
                                    output_names=['usans']),
                           name='getusans',
                           mem_gb=0.01)

        smooth = pe.Node(SUSAN(fwhm=smooth), name='smooth')

        workflow.connect([
            (inputnode, calc_median_val, [('bold', 'in_file'),
                                          ('bold_mask', 'mask_file')]),
            (inputnode, calc_bold_mean, [('bold', 'in_file')]),
            (calc_bold_mean, getusans, [('out_file', 'image')]),
            (calc_median_val, getusans, [('out_stat', 'thresh')]),
            (inputnode, smooth, [('bold', 'in_file')]),
            (getusans, smooth, [('usans', 'usans')]),
            (calc_median_val, smooth, [(('out_stat', _getbtthresh),
                                        'brightness_threshold')]),
            (smooth, outputnode, [('smoothed_file', 'bold_smooth')]),
        ])
    else:
        workflow.connect([
            (inputnode, outputnode, [('bold', 'bold_smooth')]),
        ])

    return workflow
Ejemplo n.º 14
0
def hmc_mcflirt(settings, name='fMRI_HMC_mcflirt'):
    """
    An :abbr:`HMC (head motion correction)` for functional scans
    using FSL MCFLIRT

    .. workflow::

      from mriqc.workflows.functional import hmc_mcflirt
      wf = hmc_mcflirt({'biggest_file_size_gb': 1})

    """

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_fd']),
                         name='outputnode')

    gen_ref = pe.Node(nwr.EstimateReferenceImage(mc_method="AFNI"),
                      name="gen_ref")

    mcflirt = pe.Node(fsl.MCFLIRT(save_plots=True, interpolation='sinc'),
                      name='MCFLIRT',
                      mem_gb=settings['biggest_file_size_gb'] * 2.5)

    fdnode = pe.Node(nac.FramewiseDisplacement(normalize=False,
                                               parameter_source="FSL"),
                     name='ComputeFD')

    workflow.connect([
        (inputnode, gen_ref, [('in_file', 'in_file')]),
        (gen_ref, mcflirt, [('ref_image', 'ref_file')]),
        (inputnode, mcflirt, [('in_file', 'in_file')]),
        (inputnode, fdnode, [('fd_radius', 'radius')]),
        (mcflirt, fdnode, [('par_file', 'in_file')]),
        (mcflirt, outputnode, [('out_file', 'out_file')]),
        (fdnode, outputnode, [('out_file', 'out_fd')]),
    ])

    return workflow
Ejemplo n.º 15
0
def init_skullstrip_ants_wf(debug, omp_nthreads, name='skullstrip_ants_wf'):
    from niworkflows.data import get_ants_oasis_template_ras

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'source_file']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bias_corrected', 'out_file', 'out_mask', 'out_report']),
                         name='outputnode')

    t1_skull_strip = pe.Node(BrainExtractionRPT(dimension=3,
                                                use_floatingpoint_precision=1,
                                                debug=debug,
                                                generate_report=True,
                                                num_threads=omp_nthreads,
                                                keep_temporary_files=1),
                             name='t1_skull_strip')

    # should not be necesssary byt does not hurt - make sure the multiproc
    # scheduler knows the resource limits
    t1_skull_strip.interface.num_threads = omp_nthreads

    t1_skull_strip.inputs.brain_template = op.join(
        get_ants_oasis_template_ras(), 'T_template0.nii.gz')
    t1_skull_strip.inputs.brain_probability_mask = op.join(
        get_ants_oasis_template_ras(),
        'T_template0_BrainCerebellumProbabilityMask.nii.gz')
    t1_skull_strip.inputs.extraction_registration_mask = op.join(
        get_ants_oasis_template_ras(),
        'T_template0_BrainCerebellumRegistrationMask.nii.gz')

    workflow.connect([
        (inputnode, t1_skull_strip, [('in_file', 'anatomical_image')]),
        (t1_skull_strip, outputnode, [('BrainExtractionMask', 'out_mask'),
                                      ('BrainExtractionBrain', 'out_file'),
                                      ('N4Corrected0', 'bias_corrected'),
                                      ('out_report', 'out_report')])
    ])

    return workflow
Ejemplo n.º 16
0
def fmri_bmsk_workflow(name='fMRIBrainMask', use_bet=False):
    """
    Computes a brain mask for the input :abbr:`fMRI (functional MRI)`
    dataset

    .. workflow::

      from mriqc.workflows.functional import fmri_bmsk_workflow
      wf = fmri_bmsk_workflow()


    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                         name='outputnode')

    if not use_bet:
        afni_msk = pe.Node(afni.Automask(
            outputtype='NIFTI_GZ'), name='afni_msk')

        # Connect brain mask extraction
        workflow.connect([
            (inputnode, afni_msk, [('in_file', 'in_file')]),
            (afni_msk, outputnode, [('out_file', 'out_file')])
        ])

    else:
        bet_msk = pe.Node(fsl.BET(mask=True, functional=True), name='bet_msk')
        erode = pe.Node(fsl.ErodeImage(), name='erode')

        # Connect brain mask extraction
        workflow.connect([
            (inputnode, bet_msk, [('in_file', 'in_file')]),
            (bet_msk, erode, [('mask_file', 'in_file')]),
            (erode, outputnode, [('out_file', 'out_file')])
        ])

    return workflow
Ejemplo n.º 17
0
def init_derivatives_wf(result_dir, name='nuisance_regression_wf'):
    """
    Set up a battery of datasinks to store derivatives in the right location
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        IdentityInterface(fields=['source_file', 'bold_clean']),
        name='inputnode')

    ds_bold_clean = pe.Node(DerivativesDataSink(
        base_directory=os.path.dirname(result_dir),
        suffix='clean',
        out_path_base=os.path.basename(result_dir)),
                            name='ds_bold_clean')

    workflow.connect([(inputnode, ds_bold_clean, [('source_file',
                                                   'source_file'),
                                                  ('bold_clean', 'in_file')])])

    return workflow
Ejemplo n.º 18
0
def init_skullstrip_epi_wf(name='skullstrip_epi_wf'):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['mask_file', 'skull_stripped_file', 'out_report']),
                         name='outputnode')
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
    mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')

    workflow.connect([
        (inputnode, skullstrip_first_pass, [('in_file', 'in_file')]),
        (skullstrip_first_pass, skullstrip_second_pass, [('out_file',
                                                          'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file')
                                                 ]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        # Masked file
        (inputnode, apply_mask, [('in_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        # Reportlet
        (inputnode, mask_reportlet, [('in_file', 'background_file')]),
        (combine_masks, mask_reportlet, [('out_file', 'mask_file')]),
        (mask_reportlet, outputnode, [('out_report', 'out_report')]),
    ])

    return workflow
Ejemplo n.º 19
0
def init_bold_preproc_report_wf(mem_gb,
                                reportlets_dir,
                                name='bold_preproc_report_wf'):
    """
    This workflow generates and saves a reportlet showing the effect of resampling
    the BOLD signal using the standard deviation maps.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.resampling import init_bold_preproc_report_wf
        wf = init_bold_preproc_report_wf(mem_gb=1, reportlets_dir='.')

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB
        reportlets_dir : str
            Directory in which to save reportlets
        name : str, optional
            Workflow name (default: bold_preproc_report_wf)

    **Inputs**

        in_pre
            BOLD time-series, before resampling
        in_post
            BOLD time-series, after resampling
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing

    """

    from niworkflows.nipype.algorithms.confounds import TSNR
    from niworkflows.interfaces import SimpleBeforeAfter
    from ...interfaces import DerivativesDataSink

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_pre', 'in_post', 'name_source']),
        name='inputnode')

    pre_tsnr = pe.Node(TSNR(), name='pre_tsnr', mem_gb=mem_gb * 4.5)
    pos_tsnr = pe.Node(TSNR(), name='pos_tsnr', mem_gb=mem_gb * 4.5)

    bold_rpt = pe.Node(SimpleBeforeAfter(), name='bold_rpt', mem_gb=0.1)
    bold_rpt_ds = pe.Node(DerivativesDataSink(base_directory=reportlets_dir,
                                              suffix='variant-preproc'),
                          name='bold_rpt_ds',
                          mem_gb=DEFAULT_MEMORY_MIN_GB,
                          run_without_submitting=True)

    workflow.connect([
        (inputnode, bold_rpt_ds, [('name_source', 'source_file')]),
        (inputnode, pre_tsnr, [('in_pre', 'in_file')]),
        (inputnode, pos_tsnr, [('in_post', 'in_file')]),
        (pre_tsnr, bold_rpt, [('stddev_file', 'before')]),
        (pos_tsnr, bold_rpt, [('stddev_file', 'after')]),
        (bold_rpt, bold_rpt_ds, [('out_report', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 20
0
def init_bold_surf_wf(mem_gb,
                      output_spaces,
                      medial_surface_nan,
                      name='bold_surf_wf'):
    """
    This workflow samples functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_surf_wf
        wf = init_bold_surf_wf(mem_gb=0.1,
                               output_spaces=['T1w', 'fsnative',
                                             'template', 'fsaverage5'],
                               medial_surface_nan=False)

    **Parameters**

        output_spaces : list
            List of output spaces functional images are to be resampled to
            Target spaces beginning with ``fs`` will be selected for resampling,
            such as ``fsaverage`` or related template spaces
            If the list contains ``fsnative``, images will be resampled to the
            individual subject's native surface
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files

    **Inputs**

        source_file
            Motion-corrected BOLD series in T1 space
        t1_preproc
            Bias-corrected structural template image
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    **Outputs**

        surfaces
            BOLD series, resampled to FreeSurfer surfaces

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 'subject_id', 'subjects_dir',
        't1_2_fsnative_forward_transform'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']),
                         name='outputnode')

    spaces = [space for space in output_spaces if space.startswith('fs')]

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'],
                         name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s',
                                       keep_ext=True),
                            iterfield='subject',
                            name='rename_src',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(fs.utils.LTAConvert(in_lta='identity.nofile',
                                                 out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'),
                             name='set_xfm_source')

    sampler = pe.MapNode(fs.SampleToSurface(sampling_method='average',
                                            sampling_range=(0, 1, 0.2),
                                            sampling_units='frac',
                                            interp_method='trilinear',
                                            cortex_mask=True,
                                            override_reg_subj=True,
                                            out_type='gii'),
                         iterfield=['source_file', 'target_subject'],
                         iterables=('hemi', ['lh', 'rh']),
                         name='sampler',
                         mem_gb=mem_gb * 3)

    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file', 'target_subject'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'],
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield='in_file',
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1_2_fsnative_forward_transform',
                                      'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if medial_surface_nan:
        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (sampler, medial_nans, [('out_file', 'in_file')]),
            (targets, medial_nans, [('out', 'target_subject')]),
            (medial_nans, merger, [('out', 'in1')]),
        ])
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
Ejemplo n.º 21
0
def init_bold_preproc_trans_wf(mem_gb,
                               omp_nthreads,
                               name='bold_preproc_trans_wf',
                               use_compression=True,
                               use_fieldwarp=False,
                               split_file=False,
                               interpolation='LanczosWindowedSinc'):
    """
    This workflow resamples the input fMRI in its native (original)
    space in a "single shot" from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_preproc_trans_wf
        wf = init_bold_preproc_trans_wf(mem_gb=3, omp_nthreads=1)

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI
        split_file : bool
            Whether the input file should be splitted (it is a 4D file)
            or it is a list of 3D files (default ``False``, do not split)
        interpolation : str
            Interpolation type to be used by ANTs' ``applyTransforms``
            (default ``'LanczosWindowedSinc'``)

    **Inputs**

        bold_file
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold
            BOLD series, resampled in native space, including all preprocessing
        bold_mask
            BOLD series mask calculated with the new time-series
        bold_ref
            BOLD reference image: an average-like 3D image of the time-series
        bold_ref_brain
            Same as ``bold_ref``, but once the brain mask has been applied

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'name_source', 'bold_file', 'bold_mask', 'hmc_xforms', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bold', 'bold_mask', 'bold_ref', 'bold_ref_brain']),
                         name='outputnode')

    bold_transform = pe.Node(MultiApplyTransforms(interpolation=interpolation,
                                                  float=True,
                                                  copy_dtype=True),
                             name='bold_transform',
                             mem_gb=mem_gb * 3 * omp_nthreads,
                             n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    # Generate a new BOLD reference
    bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads)

    workflow.connect([
        (inputnode, merge, [('name_source', 'header_source')]),
        (bold_transform, merge, [('out_files', 'in_files')]),
        (merge, bold_reference_wf, [('out_file', 'inputnode.bold_file')]),
        (merge, outputnode, [('out_file', 'bold')]),
        (bold_reference_wf, outputnode,
         [('outputnode.ref_image', 'bold_ref'),
          ('outputnode.ref_image_brain', 'bold_ref_brain'),
          ('outputnode.bold_mask', 'bold_mask')]),
    ])

    # Input file is not splitted
    if split_file:
        bold_split = pe.Node(FSLSplit(dimension='t'),
                             name='bold_split',
                             mem_gb=mem_gb * 3)
        workflow.connect([(inputnode, bold_split, [('bold_file', 'in_file')]),
                          (bold_split, bold_transform, [
                              ('out_files', 'input_image'),
                              (('out_files', _first), 'reference_image'),
                          ])])
    else:
        workflow.connect([
            (inputnode, bold_transform, [('bold_file', 'input_image'),
                                         (('bold_file', _first),
                                          'reference_image')]),
        ])

    if use_fieldwarp:
        merge_xforms = pe.Node(niu.Merge(2),
                               name='merge_xforms',
                               run_without_submitting=True,
                               mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, merge_xforms, [('fieldwarp', 'in1'),
                                       ('hmc_xforms', 'in2')]),
            (merge_xforms, bold_transform, [('out', 'transforms')]),
        ])
    else:

        def _aslist(val):
            return [val]

        workflow.connect([
            (inputnode, bold_transform, [(('hmc_xforms', _aslist),
                                          'transforms')]),
        ])

    # Code ready to generate a pre/post processing report
    # bold_bold_report_wf = init_bold_preproc_report_wf(
    #     mem_gb=mem_gb['resampled'],
    #     reportlets_dir=reportlets_dir
    # )
    # workflow.connect([
    #     (inputnode, bold_bold_report_wf, [
    #         ('bold_file', 'inputnode.name_source'),
    #         ('bold_file', 'inputnode.in_pre')]),  # This should be after STC
    #     (bold_bold_trans_wf, bold_bold_report_wf, [
    #         ('outputnode.bold', 'inputnode.in_post')]),
    # ])

    return workflow
Ejemplo n.º 22
0
def init_bold_mni_trans_wf(template,
                           mem_gb,
                           omp_nthreads,
                           name='bold_mni_trans_wf',
                           template_out_grid='2mm',
                           use_compression=True,
                           use_fieldwarp=False):
    """
    This workflow samples functional images to the MNI template in a "single shot"
    from the original BOLD series.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_mni_trans_wf
        wf = init_bold_mni_trans_wf(template='MNI152NLin2009cAsym',
                                    mem_gb=3,
                                    omp_nthreads=1,
                                    template_out_grid='native')

    **Parameters**

        template : str
            Name of template targeted by ``template`` output space
        mem_gb : float
            Size of BOLD file in GB
        omp_nthreads : int
            Maximum number of threads an individual process may use
        name : str
            Name of workflow (default: ``bold_mni_trans_wf``)
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization.
        use_compression : bool
            Save registered BOLD series as ``.nii.gz``
        use_fieldwarp : bool
            Include SDC warp in single-shot transform from BOLD to MNI

    **Inputs**

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        bold_split
            Individual 3D volumes, not motion corrected
        bold_mask
            Skull-stripping mask of reference image
        name_source
            BOLD series NIfTI file
            Used to recover original information lost during processing
        hmc_xforms
            List of affine transforms aligning each volume to ``ref_image`` in ITK format
        fieldwarp
            a :abbr:`DFM (displacements field map)` in ITK format

    **Outputs**

        bold_mni
            BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space

    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'itk_bold_to_t1', 't1_2_mni_forward_transform', 'name_source',
        'bold_split', 'bold_mask', 'hmc_xforms', 'fieldwarp'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['bold_mni', 'bold_mask_mni']),
        name='outputnode')

    def _aslist(in_value):
        if isinstance(in_value, list):
            return in_value
        return [in_value]

    gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref',
                      mem_gb=0.3)  # 256x256x256 * 64 / 8 ~ 150MB)
    template_str = nid.TEMPLATE_MAP[template]
    gen_ref.inputs.fixed_image = op.join(nid.get_dataset(template_str),
                                         '1mm_T1.nii.gz')

    mask_mni_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                           float=True),
                           name='mask_mni_tfm',
                           mem_gb=1)

    # Write corrected file in the designated output dir
    mask_merge_tfms = pe.Node(niu.Merge(2),
                              name='mask_merge_tfms',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)

    nxforms = 4 if use_fieldwarp else 3
    merge_xforms = pe.Node(niu.Merge(nxforms),
                           name='merge_xforms',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([(inputnode, merge_xforms, [('hmc_xforms',
                                                  'in%d' % nxforms)])])

    if use_fieldwarp:
        workflow.connect([(inputnode, merge_xforms, [('fieldwarp', 'in3')])])

    workflow.connect([
        (inputnode, gen_ref, [(('bold_split', _first), 'moving_image')]),
        (inputnode, mask_merge_tfms, [('t1_2_mni_forward_transform', 'in1'),
                                      (('itk_bold_to_t1', _aslist), 'in2')]),
        (mask_merge_tfms, mask_mni_tfm, [('out', 'transforms')]),
        (mask_mni_tfm, outputnode, [('output_image', 'bold_mask_mni')]),
        (inputnode, mask_mni_tfm, [('bold_mask', 'input_image')])
    ])

    bold_to_mni_transform = pe.Node(MultiApplyTransforms(
        interpolation="LanczosWindowedSinc", float=True, copy_dtype=True),
                                    name='bold_to_mni_transform',
                                    mem_gb=mem_gb * 3 * omp_nthreads,
                                    n_procs=omp_nthreads)

    merge = pe.Node(Merge(compress=use_compression),
                    name='merge',
                    mem_gb=mem_gb * 3)

    workflow.connect([
        (inputnode, merge_xforms, [('t1_2_mni_forward_transform', 'in1'),
                                   (('itk_bold_to_t1', _aslist), 'in2')]),
        (merge_xforms, bold_to_mni_transform, [('out', 'transforms')]),
        (inputnode, merge, [('name_source', 'header_source')]),
        (inputnode, bold_to_mni_transform, [('bold_split', 'input_image')]),
        (bold_to_mni_transform, merge, [('out_files', 'in_files')]),
        (merge, outputnode, [('out_file', 'bold_mni')]),
    ])

    if template_out_grid == 'native':
        workflow.connect([
            (gen_ref, mask_mni_tfm, [('out_file', 'reference_image')]),
            (gen_ref, bold_to_mni_transform, [('out_file', 'reference_image')
                                              ]),
        ])
    elif template_out_grid == '1mm' or template_out_grid == '2mm':
        mask_mni_tfm.inputs.reference_image = op.join(
            nid.get_dataset(template_str),
            '%s_brainmask.nii.gz' % template_out_grid)
        bold_to_mni_transform.inputs.reference_image = op.join(
            nid.get_dataset(template_str), '%s_T1.nii.gz' % template_out_grid)
    else:
        mask_mni_tfm.inputs.reference_image = template_out_grid
        bold_to_mni_transform.inputs.reference_image = template_out_grid
    return workflow
Ejemplo n.º 23
0
def init_anat_derivatives_wf(output_dir,
                             output_spaces,
                             template,
                             freesurfer,
                             name='anat_derivatives_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 't1_mask', 't1_seg', 't1_tpms',
        't1_2_mni_forward_transform', 't1_2_mni', 'mni_mask', 'mni_seg',
        'mni_tpms', 'surfaces'
    ]),
                        name='inputnode')

    ds_t1_preproc = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                suffix='preproc'),
                            name='ds_t1_preproc',
                            run_without_submitting=True)

    ds_t1_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix='brainmask'),
                         name='ds_t1_mask',
                         run_without_submitting=True)

    ds_t1_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix='dtissue'),
                        name='ds_t1_seg',
                        run_without_submitting=True)

    ds_t1_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir, suffix='class-{extra_value}_probtissue'),
                         name='ds_t1_tpms',
                         run_without_submitting=True)
    ds_t1_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    suffix_fmt = 'space-{}_{}'.format
    ds_t1_mni = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            suffix=suffix_fmt(
                                                template, 'preproc')),
                        name='ds_t1_mni',
                        run_without_submitting=True)

    ds_mni_mask = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              suffix=suffix_fmt(
                                                  template, 'brainmask')),
                          name='ds_mni_mask',
                          run_without_submitting=True)

    ds_mni_seg = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix=suffix_fmt(
                                                 template, 'dtissue')),
                         name='ds_mni_seg',
                         run_without_submitting=True)

    ds_mni_tpms = pe.Node(DerivativesDataSink(
        base_directory=output_dir,
        suffix=suffix_fmt(template, 'class-{extra_value}_probtissue')),
                          name='ds_mni_tpms',
                          run_without_submitting=True)
    ds_mni_tpms.inputs.extra_values = ['CSF', 'GM', 'WM']

    ds_t1_mni_warp = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 suffix=suffix_fmt(
                                                     template, 'warp')),
                             name='ds_t1_mni_warp',
                             run_without_submitting=True)

    def get_gifti_name(in_file):
        import os
        import re
        in_format = re.compile(r'(?P<LR>[lr])h.(?P<surf>.+)_converted.gii')
        name = os.path.basename(in_file)
        info = in_format.match(name).groupdict()
        info['LR'] = info['LR'].upper()
        return '{surf}.{LR}.surf'.format(**info)

    name_surfs = pe.MapNode(niu.Function(function=get_gifti_name),
                            iterfield='in_file',
                            name='name_surfs')

    ds_surfs = pe.MapNode(DerivativesDataSink(base_directory=output_dir),
                          iterfield=['in_file', 'suffix'],
                          name='ds_surfs',
                          run_without_submitting=True)

    workflow.connect([
        (inputnode, ds_t1_preproc, [('source_file', 'source_file'),
                                    ('t1_preproc', 'in_file')]),
        (inputnode, ds_t1_mask, [('source_file', 'source_file'),
                                 ('t1_mask', 'in_file')]),
        (inputnode, ds_t1_seg, [('source_file', 'source_file'),
                                ('t1_seg', 'in_file')]),
        (inputnode, ds_t1_tpms, [('source_file', 'source_file'),
                                 ('t1_tpms', 'in_file')]),
    ])

    if freesurfer:
        workflow.connect([
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_surfs, [('source_file', 'source_file'),
                                   ('surfaces', 'in_file')]),
            (name_surfs, ds_surfs, [('out', 'suffix')]),
        ])
    if 'template' in output_spaces:
        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('source_file', 'source_file'),
                                         ('t1_2_mni_forward_transform',
                                          'in_file')]),
            (inputnode, ds_t1_mni, [('source_file', 'source_file'),
                                    ('t1_2_mni', 'in_file')]),
            (inputnode, ds_mni_mask, [('source_file', 'source_file'),
                                      ('mni_mask', 'in_file')]),
            (inputnode, ds_mni_seg, [('source_file', 'source_file'),
                                     ('mni_seg', 'in_file')]),
            (inputnode, ds_mni_tpms, [('source_file', 'source_file'),
                                      ('mni_tpms', 'in_file')]),
        ])

    return workflow
Ejemplo n.º 24
0
def init_autorecon_resume_wf(omp_nthreads, name='autorecon_resume_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['subjects_dir', 'subject_id', 'use_T2']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['subjects_dir', 'subject_id', 'out_report']),
                         name='outputnode')

    autorecon2_vol = pe.Node(fs.ReconAll(directive='autorecon2-volonly',
                                         openmp=omp_nthreads),
                             n_procs=omp_nthreads,
                             name='autorecon2_vol')

    autorecon_surfs = pe.MapNode(fs.ReconAll(
        directive='autorecon-hemi',
        flags=[
            '-noparcstats', '-nocortparc2', '-noparcstats2', '-nocortparc3',
            '-noparcstats3', '-nopctsurfcon', '-nohyporelabel',
            '-noaparc2aseg', '-noapas2aseg', '-nosegstats', '-nowmparc',
            '-nobalabels'
        ],
        openmp=omp_nthreads),
                                 iterfield='hemi',
                                 n_procs=omp_nthreads,
                                 name='autorecon_surfs')
    autorecon_surfs.inputs.hemi = ['lh', 'rh']

    autorecon3 = pe.MapNode(fs.ReconAll(directive='autorecon3',
                                        openmp=omp_nthreads),
                            iterfield='hemi',
                            n_procs=omp_nthreads,
                            name='autorecon3')
    autorecon3.inputs.hemi = ['lh', 'rh']

    # Only generate the report once; should be nothing to do
    recon_report = pe.Node(ReconAllRPT(directive='autorecon3',
                                       generate_report=True),
                           name='recon_report')

    def _dedup(in_list):
        vals = set(in_list)
        if len(vals) > 1:
            raise ValueError(
                "Non-identical values can't be deduplicated:\n{!r}".format(
                    in_list))
        return vals.pop()

    workflow.connect([
        (inputnode, autorecon3, [('use_T2', 'use_T2')]),
        (inputnode, autorecon2_vol, [('subjects_dir', 'subjects_dir'),
                                     ('subject_id', 'subject_id')]),
        (autorecon2_vol, autorecon_surfs, [('subjects_dir', 'subjects_dir'),
                                           ('subject_id', 'subject_id')]),
        (autorecon_surfs, autorecon3,
         [(('subjects_dir', _dedup), 'subjects_dir'),
          (('subject_id', _dedup), 'subject_id')]),
        (autorecon3, outputnode, [(('subjects_dir', _dedup), 'subjects_dir'),
                                  (('subject_id', _dedup), 'subject_id')]),
        (autorecon3, recon_report, [(('subjects_dir', _dedup), 'subjects_dir'),
                                    (('subject_id', _dedup), 'subject_id')]),
        (recon_report, outputnode, [('out_report', 'out_report')]),
    ])

    return workflow
Ejemplo n.º 25
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(['subjects_dir', 'subject_id']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']),
                         name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(MakeMidthickness(thickness=True,
                                               distance=0.5,
                                               out_name='midthickness'),
                              iterfield='in_file',
                              name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list',
                           run_without_submitting=True)
    fs_2_gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                          iterfield='in_file',
                          name='fs_2_gii')

    def normalize_surfs(in_file):
        """ Re-center GIFTI coordinates to fit align to native T1 space

        For midthickness surfaces, add MidThickness metadata

        Coordinate update based on:
        https://github.com/Washington-University/workbench/blob/1b79e56/src/Algorithms/AlgorithmSurfaceApplyAffine.cxx#L73-L91
        and
        https://github.com/Washington-University/Pipelines/blob/ae69b9a/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L147
        """
        import os
        import numpy as np
        import nibabel as nib
        img = nib.load(in_file)
        pointset = img.get_arrays_from_intent('NIFTI_INTENT_POINTSET')[0]
        coords = pointset.data
        c_ras_keys = ('VolGeomC_R', 'VolGeomC_A', 'VolGeomC_S')
        ras = np.array([float(pointset.metadata[key]) for key in c_ras_keys])
        # Apply C_RAS translation to coordinates
        pointset.data = (coords + ras).astype(coords.dtype)

        secondary = nib.gifti.GiftiNVPairs('AnatomicalStructureSecondary',
                                           'MidThickness')
        geom_type = nib.gifti.GiftiNVPairs('GeometricType', 'Anatomical')
        has_ass = has_geo = False
        for nvpair in pointset.meta.data:
            # Remove C_RAS translation from metadata to avoid double-dipping in FreeSurfer
            if nvpair.name in c_ras_keys:
                nvpair.value = '0.000000'
            # Check for missing metadata
            elif nvpair.name == secondary.name:
                has_ass = True
            elif nvpair.name == geom_type.name:
                has_geo = True
        fname = os.path.basename(in_file)
        # Update metadata for MidThickness/graymid surfaces
        if 'midthickness' in fname.lower() or 'graymid' in fname.lower():
            if not has_ass:
                pointset.meta.data.insert(1, secondary)
            if not has_geo:
                pointset.meta.data.insert(2, geom_type)
        img.to_filename(fname)
        return os.path.abspath(fname)

    fix_surfs = pe.MapNode(niu.Function(function=normalize_surfs),
                           iterfield='in_file',
                           name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'), ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs_2_gii, [('out', 'in_file')]),
        (fs_2_gii, fix_surfs, [('converted', 'in_file')]),
        (fix_surfs, outputnode, [('out', 'surfaces')]),
    ])

    return workflow
Ejemplo n.º 26
0
def init_surface_recon_wf(omp_nthreads, hires, name='surface_recon_wf'):

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        't1w', 't2w', 'skullstripped_t1', 'subjects_dir', 'subject_id'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'subjects_dir', 'subject_id', 'fs_2_t1_transform', 'surfaces',
        'out_report'
    ]),
                         name='outputnode')

    def detect_inputs(t1w_list, t2w_list=[], hires_enabled=True):
        from niworkflows.nipype.interfaces.base import isdefined
        from niworkflows.nipype.utils.filemanip import filename_to_list
        from niworkflows.nipype.interfaces.traits_extension import Undefined
        import nibabel as nib
        t1w_list = filename_to_list(t1w_list)
        t2w_list = filename_to_list(t2w_list) if isdefined(t2w_list) else []
        t1w_ref = nib.load(t1w_list[0])
        # Use high resolution preprocessing if voxel size < 1.0mm
        # Tolerance of 0.05mm requires that rounds down to 0.9mm or lower
        hires = hires_enabled and max(t1w_ref.header.get_zooms()) < 1 - 0.05

        t2w = Undefined
        if t2w_list and max(nib.load(t2w_list[0]).header.get_zooms()) < 1.2:
            t2w = t2w_list[0]

        # https://surfer.nmr.mgh.harvard.edu/fswiki/SubmillimeterRecon
        mris_inflate = '-n 50' if hires else Undefined
        return (t2w, isdefined(t2w), hires, mris_inflate)

    recon_config = pe.Node(niu.Function(
        function=detect_inputs,
        output_names=['t2w', 'use_T2', 'hires', 'mris_inflate']),
                           name='recon_config')
    recon_config.inputs.hires_enabled = hires

    autorecon1 = pe.Node(fs.ReconAll(directive='autorecon1',
                                     flags='-noskullstrip',
                                     openmp=omp_nthreads),
                         name='autorecon1')
    autorecon1.interface._can_resume = False
    autorecon1.interface.num_threads = omp_nthreads

    def inject_skullstripped(subjects_dir, subject_id, skullstripped):
        import os
        import nibabel as nib
        from nilearn.image import resample_to_img, new_img_like
        from niworkflows.nipype.utils.filemanip import copyfile
        mridir = os.path.join(subjects_dir, subject_id, 'mri')
        t1 = os.path.join(mridir, 'T1.mgz')
        bm_auto = os.path.join(mridir, 'brainmask.auto.mgz')
        bm = os.path.join(mridir, 'brainmask.mgz')

        if not os.path.exists(bm_auto):
            img = nib.load(t1)
            mask = nib.load(skullstripped)
            bmask = new_img_like(mask, mask.get_data() > 0)
            resampled_mask = resample_to_img(bmask, img, 'nearest')
            masked_image = new_img_like(
                img,
                img.get_data() * resampled_mask.get_data())
            masked_image.to_filename(bm_auto)

        if not os.path.exists(bm):
            copyfile(bm_auto, bm, copy=True, use_hardlink=True)

        return subjects_dir, subject_id

    skull_strip_extern = pe.Node(niu.Function(
        function=inject_skullstripped,
        output_names=['subjects_dir', 'subject_id']),
                                 name='skull_strip_extern')

    fs_transform = pe.Node(fs.Tkregister2(fsl_out='freesurfer2subT1.mat',
                                          reg_header=True),
                           name='fs_transform')

    autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads)
    gifti_surface_wf = init_gifti_surface_wf()

    workflow.connect([
        # Configuration
        (inputnode, recon_config, [('t1w', 't1w_list'), ('t2w', 't2w_list')]),
        # Passing subjects_dir / subject_id enforces serial order
        (inputnode, autorecon1, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id')]),
        (autorecon1, skull_strip_extern, [('subjects_dir', 'subjects_dir'),
                                          ('subject_id', 'subject_id')]),
        (skull_strip_extern, autorecon_resume_wf,
         [('subjects_dir', 'inputnode.subjects_dir'),
          ('subject_id', 'inputnode.subject_id')]),
        (autorecon_resume_wf, gifti_surface_wf,
         [('outputnode.subjects_dir', 'inputnode.subjects_dir'),
          ('outputnode.subject_id', 'inputnode.subject_id')]),
        # Reconstruction phases
        (inputnode, autorecon1, [('t1w', 'T1_files')]),
        (
            recon_config,
            autorecon1,
            [
                ('t2w', 'T2_file'),
                ('hires', 'hires'),
                # First run only (recon-all saves expert options)
                ('mris_inflate', 'mris_inflate')
            ]),
        (inputnode, skull_strip_extern, [('skullstripped_t1', 'skullstripped')
                                         ]),
        (recon_config, autorecon_resume_wf, [('use_T2', 'inputnode.use_T2')]),
        # Construct transform from FreeSurfer conformed image to FMRIPREP
        # reoriented image
        (inputnode, fs_transform, [('t1w', 'target_image')]),
        (autorecon1, fs_transform, [('T1', 'moving_image')]),
        # Output
        (autorecon_resume_wf, outputnode,
         [('outputnode.subjects_dir', 'subjects_dir'),
          ('outputnode.subject_id', 'subject_id'),
          ('outputnode.out_report', 'out_report')]),
        (gifti_surface_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        (fs_transform, outputnode, [('fsl_file', 'fs_2_t1_transform')]),
    ])

    return workflow
Ejemplo n.º 27
0
def init_prepare_epi_wf(ants_nthreads, name="prepare_epi_wf"):
    """
    This workflow takes in a set of EPI files with with the same phase
    encoding direction and returns a single 3D volume ready to be used in
    field distortion estimation.

    The procedure involves: estimating a robust template using FreeSurfer's
    'mri_robust_template', bias field correction using ANTs N4BiasFieldCorrection
    and AFNI 3dUnifize, skullstripping using FSL BET and AFNI 3dAutomask,
    and rigid coregistration to the reference using ANTs.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.unwarp import init_prepare_epi_wf
        wf = init_prepare_epi_wf(ants_nthreads=8)


    Inputs

        fmaps
            list of 3D or 4D NIfTI images
        ref_brain
            coregistration reference (skullstripped and bias field corrected)

    Outputs

        out_file
            single 3D NIfTI file

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=['fmaps', 'ref_brain']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                         name='outputnode')

    split = pe.MapNode(fsl.Split(dimension='t'),
                       iterfield='in_file',
                       name='split')

    merge = pe.Node(
        StructuralReference(
            auto_detect_sensitivity=True,
            initial_timepoint=1,
            fixed_timepoint=True,  # Align to first image
            intensity_scaling=True,
            # 7-DOF (rigid + intensity)
            no_iteration=True,
            subsample_threshold=200,
            out_file='template.nii.gz'),
        name='merge')

    enhance_and_skullstrip_epi_wf = init_enhance_and_skullstrip_epi_wf()

    ants_settings = pkgr.resource_filename('fmriprep',
                                           'data/translation_rigid.json')
    fmap2ref_reg = pe.Node(ants.Registration(from_file=ants_settings,
                                             output_warped_image=True,
                                             num_threads=ants_nthreads),
                           name='fmap2ref_reg')
    fmap2ref_reg.interface.num_threads = ants_nthreads

    workflow = pe.Workflow(name=name)

    def _flatten(l):
        return [item for sublist in l for item in sublist]

    workflow.connect([
        (inputnode, split, [('fmaps', 'in_file')]),
        (split, merge, [(('out_files', _flatten), 'in_files')]),
        (merge, enhance_and_skullstrip_epi_wf, [('out_file',
                                                 'inputnode.in_file')]),
        (enhance_and_skullstrip_epi_wf, fmap2ref_reg,
         [('outputnode.skull_stripped_file', 'moving_image')]),
        (inputnode, fmap2ref_reg, [('ref_brain', 'fixed_image')]),
        (fmap2ref_reg, outputnode, [('warped_image', 'out_file')]),
    ])

    return workflow
Ejemplo n.º 28
0
def init_anat_preproc_wf(skull_strip_ants,
                         skull_strip_template,
                         output_spaces,
                         template,
                         debug,
                         freesurfer,
                         omp_nthreads,
                         hires,
                         reportlets_dir,
                         output_dir,
                         name='anat_preproc_wf'):
    """T1w images preprocessing pipeline"""

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['t1w', 't2w', 'subjects_dir']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 't1_2_mni',
        't1_2_mni_forward_transform', 't1_2_mni_reverse_transform', 'mni_mask',
        'mni_seg', 'mni_tpms', 'subjects_dir', 'subject_id',
        'fs_2_t1_transform', 'surfaces'
    ]),
                         name='outputnode')

    def bidsinfo(in_file):
        from fmriprep.interfaces.bids import BIDS_NAME
        match = BIDS_NAME.search(in_file)
        params = match.groupdict() if match is not None else {}
        return tuple(
            map(params.get, [
                'subject_id', 'ses_id', 'task_id', 'acq_id', 'rec_id', 'run_id'
            ]))

    bids_info = pe.Node(niu.Function(function=bidsinfo,
                                     output_names=[
                                         'subject_id', 'ses_id', 'task_id',
                                         'acq_id', 'rec_id', 'run_id'
                                     ]),
                        name='bids_info',
                        run_without_submitting=True)

    summary = pe.Node(AnatomicalSummary(output_spaces=output_spaces,
                                        template=template),
                      name='summary')

    # 0. Reorient T1w image(s) to RAS and resample to common voxel space
    t1_conform = pe.Node(ConformSeries(), name='t1_conform')

    # 1. Align and merge if several T1w images are provided
    t1_merge = pe.Node(
        # StructuralReference is fs.RobustTemplate if > 1 volume, copying otherwise
        StructuralReference(
            auto_detect_sensitivity=True,
            initial_timepoint=1,
            fixed_timepoint=True,  # Align to first image
            intensity_scaling=True,  # 7-DOF (rigid + intensity)
            no_iteration=True,
            subsample_threshold=200,
        ),
        name='t1_merge')

    # 2. T1 Bias Field Correction
    # Bias field correction is handled in skull strip workflows.

    # 3. Skull-stripping
    #skullstrip_wf = init_skullstrip_afni_wf(name='skullstrip_afni_wf')
    skullstrip_wf = init_skullstrip_watershed_wf(
        name='skullstrip_watershed_wf')
    if skull_strip_ants:
        skullstrip_wf = init_skullstrip_ants_wf(
            name='skullstrip_ants_wf',
            debug=debug,
            omp_nthreads=omp_nthreads,
            skull_strip_template=skull_strip_template)

    # 4. Segmentation
    t1_seg = pe.Node(FASTRPT(generate_report=True,
                             segments=True,
                             no_bias=True,
                             probability_maps=True),
                     name='t1_seg')

    # 5. Spatial normalization (T1w to MNI registration)
    t1_2_mni = pe.Node(RobustMNINormalizationRPT(
        generate_report=True,
        num_threads=omp_nthreads,
        flavor='testing' if debug else 'precise',
    ),
                       name='t1_2_mni')
    # should not be necessary but does not hurt - make sure the multiproc
    # scheduler knows the resource limits
    t1_2_mni.interface.num_threads = omp_nthreads

    # Resample the brain mask and the tissue probability maps into mni space
    mni_mask = pe.Node(ants.ApplyTransforms(dimension=3,
                                            default_value=0,
                                            float=True,
                                            interpolation='NearestNeighbor'),
                       name='mni_mask')

    mni_seg = pe.Node(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           float=True,
                                           interpolation='NearestNeighbor'),
                      name='mni_seg')

    mni_tpms = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                               default_value=0,
                                               float=True,
                                               interpolation='Linear'),
                          iterfield=['input_image'],
                          name='mni_tpms')

    workflow.connect([
        (inputnode, bids_info, [(('t1w', fix_multi_T1w_source_name), 'in_file')
                                ]),
        (inputnode, t1_conform, [('t1w', 't1w_list')]),
        (t1_conform, t1_merge, [('t1w_list', 'in_files'),
                                (('t1w_list', add_suffix, '_template'),
                                 'out_file')]),
        (t1_merge, skullstrip_wf, [('out_file', 'inputnode.in_file')]),
        (skullstrip_wf, t1_seg, [('outputnode.out_file', 'in_files')]),
        (skullstrip_wf, outputnode, [('outputnode.bias_corrected',
                                      't1_preproc'),
                                     ('outputnode.out_file', 't1_brain'),
                                     ('outputnode.out_mask', 't1_mask')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_seg'),
                              ('probability_maps', 't1_tpms')]),
        (inputnode, summary, [('t1w', 't1w')]),
    ])
    if 'template' in output_spaces:
        template_str = nid.TEMPLATE_MAP[template]
        ref_img = op.join(nid.get_dataset(template_str), '1mm_T1.nii.gz')

        t1_2_mni.inputs.template = template_str
        mni_mask.inputs.reference_image = ref_img
        mni_seg.inputs.reference_image = ref_img
        mni_tpms.inputs.reference_image = ref_img

        workflow.connect([
            (skullstrip_wf, t1_2_mni, [('outputnode.bias_corrected',
                                        'moving_image')]),
            (skullstrip_wf, t1_2_mni, [('outputnode.out_mask', 'moving_mask')
                                       ]),
            (skullstrip_wf, mni_mask, [('outputnode.out_mask', 'input_image')
                                       ]),
            (t1_2_mni, mni_mask, [('composite_transform', 'transforms')]),
            (t1_seg, mni_seg, [('tissue_class_map', 'input_image')]),
            (t1_2_mni, mni_seg, [('composite_transform', 'transforms')]),
            (t1_seg, mni_tpms, [('probability_maps', 'input_image')]),
            (t1_2_mni, mni_tpms, [('composite_transform', 'transforms')]),
            (t1_2_mni, outputnode,
             [('warped_image', 't1_2_mni'),
              ('composite_transform', 't1_2_mni_forward_transform'),
              ('inverse_composite_transform', 't1_2_mni_reverse_transform')]),
            (mni_mask, outputnode, [('output_image', 'mni_mask')]),
            (mni_seg, outputnode, [('output_image', 'mni_seg')]),
            (mni_tpms, outputnode, [('output_image', 'mni_tpms')]),
        ])

    # 6. FreeSurfer reconstruction
    if freesurfer:
        surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf',
                                                 omp_nthreads=omp_nthreads,
                                                 hires=hires)

        workflow.connect([
            (inputnode, summary, [('subjects_dir', 'subjects_dir')]),
            (bids_info, summary, [('subject_id', 'subject_id')]),
            (inputnode, surface_recon_wf, [('t2w', 'inputnode.t2w'),
                                           ('subjects_dir',
                                            'inputnode.subjects_dir')]),
            (summary, surface_recon_wf, [('subject_id', 'inputnode.subject_id')
                                         ]),
            (t1_merge, surface_recon_wf, [('out_file', 'inputnode.t1w')]),
            (skullstrip_wf, surface_recon_wf,
             [('outputnode.out_file', 'inputnode.skullstripped_t1')]),
            (surface_recon_wf, outputnode,
             [('outputnode.subjects_dir', 'subjects_dir'),
              ('outputnode.subject_id', 'subject_id'),
              ('outputnode.fs_2_t1_transform', 'fs_2_t1_transform'),
              ('outputnode.surfaces', 'surfaces')]),
        ])

    anat_reports_wf = init_anat_reports_wf(reportlets_dir=reportlets_dir,
                                           skull_strip_ants=skull_strip_ants,
                                           output_spaces=output_spaces,
                                           template=template,
                                           freesurfer=freesurfer)
    workflow.connect([
        (inputnode, anat_reports_wf, [(('t1w', fix_multi_T1w_source_name),
                                       'inputnode.source_file')]),
        (t1_seg, anat_reports_wf, [('out_report', 'inputnode.t1_seg_report')]),
        (summary, anat_reports_wf, [('out_report', 'inputnode.summary_report')
                                    ]),
    ])

    if skull_strip_ants:
        workflow.connect([(skullstrip_wf, anat_reports_wf, [
            ('outputnode.out_report', 'inputnode.t1_skull_strip_report')
        ])])
    if freesurfer:
        workflow.connect([(surface_recon_wf, anat_reports_wf, [
            ('outputnode.out_report', 'inputnode.recon_report')
        ])])
    if 'template' in output_spaces:
        workflow.connect([
            (t1_2_mni, anat_reports_wf, [('out_report',
                                          'inputnode.t1_2_mni_report')]),
        ])

    anat_derivatives_wf = init_anat_derivatives_wf(output_dir=output_dir,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   freesurfer=freesurfer)

    workflow.connect([
        (inputnode, anat_derivatives_wf, [(('t1w', fix_multi_T1w_source_name),
                                           'inputnode.source_file')]),
        (outputnode, anat_derivatives_wf, [
            ('t1_preproc', 'inputnode.t1_preproc'),
            ('t1_mask', 'inputnode.t1_mask'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('t1_tpms', 'inputnode.t1_tpms'),
            ('t1_2_mni_forward_transform',
             'inputnode.t1_2_mni_forward_transform'),
            ('t1_2_mni', 'inputnode.t1_2_mni'),
            ('mni_mask', 'inputnode.mni_mask'),
            ('mni_seg', 'inputnode.mni_seg'),
            ('mni_tpms', 'inputnode.mni_tpms'),
            ('surfaces', 'inputnode.surfaces'),
        ]),
    ])

    return workflow
Ejemplo n.º 29
0
def init_func_derivatives_wf(output_dir, output_spaces, template, freesurfer,
                             use_aroma, cifti_output, name='func_derivatives_wf'):
    """
    Set up a battery of datasinks to store derivatives in the right location
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['source_file', 'bold_t1', 'bold_mask_t1', 'bold_mni', 'bold_mask_mni',
                    'bold_aseg_t1', 'bold_aparc_t1', 'cifti_variant_key',
                    'confounds', 'surfaces', 'aroma_noise_ics', 'melodic_mix',
                    'nonaggr_denoised_file', 'bold_cifti', 'cifti_variant']),
        name='inputnode')

    suffix_fmt = 'space-{}_{}'.format
    variant_suffix_fmt = 'space-{}_variant-{}_{}'.format

    ds_confounds = pe.Node(DerivativesDataSink(
        base_directory=output_dir, suffix='confounds'),
        name="ds_confounds", run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([
        (inputnode, ds_confounds, [('source_file', 'source_file'),
                                   ('confounds', 'in_file')]),
    ])

    # Resample to T1w space
    if 'T1w' in output_spaces:
        ds_bold_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix=suffix_fmt('T1w', 'preproc'), compress=True),
            name='ds_bold_t1', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)

        ds_bold_mask_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix=suffix_fmt('T1w', 'brainmask')),
            name='ds_bold_mask_t1', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, ds_bold_t1, [('source_file', 'source_file'),
                                     ('bold_t1', 'in_file')]),
            (inputnode, ds_bold_mask_t1, [('source_file', 'source_file'),
                                          ('bold_mask_t1', 'in_file')]),
        ])

    # Resample to template (default: MNI)
    if 'template' in output_spaces:
        ds_bold_mni = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix=suffix_fmt(template, 'preproc')),
            name='ds_bold_mni', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_mni = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix=suffix_fmt(template, 'brainmask')),
            name='ds_bold_mask_mni', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, ds_bold_mni, [('source_file', 'source_file'),
                                      ('bold_mni', 'in_file')]),
            (inputnode, ds_bold_mask_mni, [('source_file', 'source_file'),
                                           ('bold_mask_mni', 'in_file')]),
        ])

    if freesurfer:
        ds_bold_aseg_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix='space-T1w_label-aseg_roi'),
            name='ds_bold_aseg_t1', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_aparc_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix='space-T1w_label-aparcaseg_roi'),
            name='ds_bold_aparc_t1', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([
            (inputnode, ds_bold_aseg_t1, [('source_file', 'source_file'),
                                          ('bold_aseg_t1', 'in_file')]),
            (inputnode, ds_bold_aparc_t1, [('source_file', 'source_file'),
                                           ('bold_aparc_t1', 'in_file')]),
        ])

    # fsaverage space
    if freesurfer and any(space.startswith('fs') for space in output_spaces):
        name_surfs = pe.MapNode(GiftiNameSource(
            pattern=r'(?P<LR>[lr])h.(?P<space>\w+).gii', template='space-{space}.{LR}.func'),
            iterfield='in_file', name='name_surfs', mem_gb=DEFAULT_MEMORY_MIN_GB,
            run_without_submitting=True)
        ds_bold_surfs = pe.MapNode(DerivativesDataSink(base_directory=output_dir),
                                   iterfield=['in_file', 'suffix'], name='ds_bold_surfs',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_bold_surfs, [('source_file', 'source_file'),
                                        ('surfaces', 'in_file')]),
            (name_surfs, ds_bold_surfs, [('out_name', 'suffix')]),
        ])

        # CIFTI output
        if cifti_output and 'template' in output_spaces:
            name_cifti = pe.MapNode(
                CiftiNameSource(), iterfield=['variant'], name='name_cifti',
                mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True)
            cifti_bolds = pe.MapNode(DerivativesDataSink(
                base_directory=output_dir, compress=False),
                iterfield=['in_file', 'suffix'], name='cifti_bolds',
                run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB)
            cifti_key = pe.MapNode(DerivativesDataSink(
                base_directory=output_dir), iterfield=['in_file', 'suffix'],
                name='cifti_key', run_without_submitting=True,
                mem_gb=DEFAULT_MEMORY_MIN_GB)
            workflow.connect([
                (inputnode, name_cifti, [('cifti_variant', 'variant')]),
                (inputnode, cifti_bolds, [('bold_cifti', 'in_file'),
                                          ('source_file', 'source_file')]),
                (name_cifti, cifti_bolds, [('out_name', 'suffix')]),
                (name_cifti, cifti_key, [('out_name', 'suffix')]),
                (inputnode, cifti_key, [('source_file', 'source_file'),
                                        ('cifti_variant_key', 'in_file')]),
            ])

    if use_aroma:
        ds_aroma_noise_ics = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix='AROMAnoiseICs'),
            name="ds_aroma_noise_ics", run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_melodic_mix = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix='MELODICmix'),
            name="ds_melodic_mix", run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_aroma_mni = pe.Node(DerivativesDataSink(
            base_directory=output_dir, suffix=variant_suffix_fmt(
                template, 'smoothAROMAnonaggr', 'preproc')),
            name='ds_aroma_mni', run_without_submitting=True,
            mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_aroma_noise_ics, [('source_file', 'source_file'),
                                             ('aroma_noise_ics', 'in_file')]),
            (inputnode, ds_melodic_mix, [('source_file', 'source_file'),
                                         ('melodic_mix', 'in_file')]),
            (inputnode, ds_aroma_mni, [('source_file', 'source_file'),
                                       ('nonaggr_denoised_file', 'in_file')]),
        ])

    return workflow
Ejemplo n.º 30
0
def init_func_preproc_wf(bold_file, ignore, freesurfer,
                         use_bbr, t2s_coreg, bold2t1w_dof, reportlets_dir,
                         output_spaces, template, output_dir, omp_nthreads,
                         fmap_bspline, fmap_demean, use_syn, force_syn,
                         use_aroma, ignore_aroma_err, aroma_melodic_dim,
                         medial_surface_nan, cifti_output,
                         debug, low_mem, template_out_grid, layout=None):
    """
    This workflow controls the functional preprocessing stages of FMRIPREP.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold import init_func_preproc_wf
        wf = init_func_preproc_wf('/completely/made/up/path/sub-01_task-nback_bold.nii.gz',
                                  omp_nthreads=1,
                                  ignore=[],
                                  freesurfer=True,
                                  reportlets_dir='.',
                                  output_dir='.',
                                  template='MNI152NLin2009cAsym',
                                  output_spaces=['T1w', 'fsnative',
                                                 'template', 'fsaverage5'],
                                  debug=False,
                                  use_bbr=True,
                                  t2s_coreg=False,
                                  bold2t1w_dof=9,
                                  fmap_bspline=True,
                                  fmap_demean=True,
                                  use_syn=True,
                                  force_syn=True,
                                  low_mem=False,
                                  template_out_grid='native',
                                  medial_surface_nan=False,
                                  cifti_output=False,
                                  use_aroma=False,
                                  ignore_aroma_err=False,
                                  aroma_melodic_dim=None)

    **Parameters**

        bold_file : str
            BOLD series NIfTI file
        ignore : list
            Preprocessing steps to skip (may include "slicetiming", "fieldmaps")
        freesurfer : bool
            Enable FreeSurfer functional registration (bbregister) and resampling
            BOLD series to FreeSurfer surface meshes.
        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        t2s_coreg : bool
            Use multiple BOLD echos to create T2*-map for T2*-driven coregistration
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        reportlets_dir : str
            Directory in which to save reportlets
        output_spaces : list
            List of output spaces functional images are to be resampled to.
            Some parts of pipeline will only be instantiated for some output spaces.

            Valid spaces:

                - T1w
                - template
                - fsnative
                - fsaverage (or other pre-existing FreeSurfer templates)
        template : str
            Name of template targeted by ``template`` output space
        output_dir : str
            Directory in which to save derivatives
        omp_nthreads : int
            Maximum number of threads an individual process may use
        fmap_bspline : bool
            **Experimental**: Fit B-Spline field using least-squares
        fmap_demean : bool
            Demean voxel-shift map during unwarp
        use_syn : bool
            **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC).
            If fieldmaps are present and enabled, this is not run, by default.
        force_syn : bool
            **Temporary**: Always run SyN-based SDC
        use_aroma : bool
            Perform ICA-AROMA on MNI-resampled functional series
        ignore_aroma_err : bool
            Do not fail on ICA-AROMA errors
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files
        cifti_output : bool
            Generate bold CIFTI file in output spaces
        debug : bool
            Enable debugging outputs
        low_mem : bool
            Write uncompressed .nii files in some cases to reduce memory usage
        template_out_grid : str
            Keyword ('native', '1mm' or '2mm') or path of custom reference
            image for normalization
        layout : BIDSLayout
            BIDSLayout structure to enable metadata retrieval

    **Inputs**

        bold_file
            BOLD series NIfTI file
        t1_preproc
            Bias-corrected structural template image
        t1_brain
            Skull-stripped ``t1_preproc``
        t1_mask
            Mask of the skull-stripped template image
        t1_seg
            Segmentation of preprocessed structural image, including
            gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF)
        t1_tpms
            List of tissue probability maps in T1w space
        t1_2_mni_forward_transform
            ANTs-compatible affine-and-warp transform file
        t1_2_mni_reverse_transform
            ANTs-compatible affine-and-warp transform file (inverse)
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
        t1_2_fsnative_reverse_transform
            LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w


    **Outputs**

        bold_t1
            BOLD series, resampled to T1w space
        bold_mask_t1
            BOLD series mask in T1w space
        bold_mni
            BOLD series, resampled to template space
        bold_mask_mni
            BOLD series mask in template space
        confounds
            TSV of confounds
        surfaces
            BOLD series, resampled to FreeSurfer surfaces
        aroma_noise_ics
            Noise components identified by ICA-AROMA
        melodic_mix
            FSL MELODIC mixing matrix
        bold_cifti
            BOLD CIFTI image
        cifti_variant
            combination of target spaces for `bold_cifti`


    **Subworkflows**

        * :py:func:`~fmriprep.workflows.bold.util.init_bold_reference_wf`
        * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf`
        * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf`
        * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf`
        * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf`
        * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_mni_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf`
        * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.pepolar.init_pepolar_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_fmap_estimator_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_sdc_unwarp_wf`
        * :py:func:`~fmriprep.workflows.fieldmap.init_nonlinear_sdc_wf`

    """
    from ..fieldmap.base import init_sdc_wf  # Avoid circular dependency (#1066)

    ref_file = bold_file
    mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1}
    bold_tlen = 10
    multiecho = isinstance(bold_file, list)

    if multiecho:
        tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file]
        ref_file = dict(zip(tes, bold_file))[min(tes)]

    if os.path.isfile(ref_file):
        bold_tlen, mem_gb = _create_mem_gb(ref_file)

    wf_name = _get_wf_name(ref_file)
    LOGGER.log(25, ('Creating bold processing workflow for "%s" (%.2f GB / %d TRs). '
                    'Memory resampled/largemem=%.2f/%.2f GB.'),
               ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem'])

    # For doc building purposes
    if layout is None or bold_file == 'bold_preprocesing':
        LOGGER.log(25, 'No valid layout: building empty workflow.')
        metadata = {
            'RepetitionTime': 2.0,
            'SliceTiming': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
            'PhaseEncodingDirection': 'j',
        }
        fmaps = [{
            'type': 'phasediff',
            'phasediff': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_phasediff.nii.gz',
            'magnitude1': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude1.nii.gz',
            'magnitude2': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude2.nii.gz',
        }]
        run_stc = True
        multiecho = False
    else:
        metadata = layout.get_metadata(ref_file)

        # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn)
        fmaps = []
        if 'fieldmaps' not in ignore:
            fmaps = layout.get_fieldmap(ref_file, return_list=True)
            for fmap in fmaps:
                fmap['metadata'] = layout.get_metadata(fmap[fmap['type']])

        # Run SyN if forced or in the absence of fieldmap correction
        if force_syn or (use_syn and not fmaps):
            fmaps.append({'type': 'syn'})

        # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort'
        run_stc = ("SliceTiming" in metadata and
                   'slicetiming' not in ignore and
                   (_get_series_len(ref_file) > 4 or "TooShort"))

    # Use T2* as target for ME-EPI in co-registration
    if t2s_coreg and not multiecho:
        LOGGER.warning("No multiecho BOLD images found for T2* coregistration. "
                       "Using standard EPI-T1 coregistration.")
        t2s_coreg = False

    # Switch stc off
    if multiecho and run_stc is True:
        LOGGER.warning('Slice-timing correction is not available for '
                       'multiecho BOLD data (not implemented).')
        run_stc = False

    # Build workflow
    workflow = pe.Workflow(name=wf_name)
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bold_file', 'subjects_dir', 'subject_id',
                't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms',
                't1_aseg', 't1_aparc',
                't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform']),
        name='inputnode')
    inputnode.inputs.bold_file = bold_file

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bold_t1', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'cifti_variant',
                'bold_mni', 'bold_mask_mni', 'bold_cifti', 'confounds', 'surfaces',
                't2s_map', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file',
                'cifti_variant_key']),
        name='outputnode')

    # BOLD buffer: an identity used as a pointer to either the original BOLD
    # or the STC'ed one for further use.
    boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer')

    summary = pe.Node(
        FunctionalSummary(output_spaces=output_spaces,
                          slice_timing=run_stc,
                          registration='FreeSurfer' if freesurfer else 'FSL',
                          registration_dof=bold2t1w_dof,
                          pe_direction=metadata.get("PhaseEncodingDirection")),
        name='summary', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True)

    func_derivatives_wf = init_func_derivatives_wf(output_dir=output_dir,
                                                   output_spaces=output_spaces,
                                                   template=template,
                                                   freesurfer=freesurfer,
                                                   use_aroma=use_aroma,
                                                   cifti_output=cifti_output)

    workflow.connect([
        (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]),
        (outputnode, func_derivatives_wf, [
            ('bold_t1', 'inputnode.bold_t1'),
            ('bold_aseg_t1', 'inputnode.bold_aseg_t1'),
            ('bold_aparc_t1', 'inputnode.bold_aparc_t1'),
            ('bold_mask_t1', 'inputnode.bold_mask_t1'),
            ('bold_mni', 'inputnode.bold_mni'),
            ('bold_mask_mni', 'inputnode.bold_mask_mni'),
            ('confounds', 'inputnode.confounds'),
            ('surfaces', 'inputnode.surfaces'),
            ('aroma_noise_ics', 'inputnode.aroma_noise_ics'),
            ('melodic_mix', 'inputnode.melodic_mix'),
            ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'),
            ('bold_cifti', 'inputnode.bold_cifti'),
            ('cifti_variant', 'inputnode.cifti_variant'),
            ('cifti_variant_key', 'inputnode.cifti_variant_key')
        ]),
    ])

    # The first reference uses T2 contrast enhancement
    bold_reference_wf = init_bold_reference_wf(
        omp_nthreads=omp_nthreads, enhance_t2=True)

    # Top-level BOLD splitter
    bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split',
                         mem_gb=mem_gb['filesize'] * 3)

    # HMC on the BOLD
    bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf',
                                   mem_gb=mem_gb['filesize'],
                                   omp_nthreads=omp_nthreads)

    # mean BOLD registration to T1w
    bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf',
                                   freesurfer=freesurfer,
                                   use_bbr=use_bbr,
                                   bold2t1w_dof=bold2t1w_dof,
                                   mem_gb=mem_gb['resampled'],
                                   omp_nthreads=omp_nthreads,
                                   use_compression=False,
                                   use_fieldwarp=(fmaps is not None or use_syn))

    # get confounds
    bold_confounds_wf = init_bold_confs_wf(
        mem_gb=mem_gb['largemem'],
        metadata=metadata,
        name='bold_confounds_wf')
    bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False]

    # Apply transforms in 1 shot
    # Only use uncompressed output if AROMA is to be run
    bold_bold_trans_wf = init_bold_preproc_trans_wf(
        mem_gb=mem_gb['resampled'],
        omp_nthreads=omp_nthreads,
        use_compression=not low_mem,
        use_fieldwarp=(fmaps is not None or use_syn),
        name='bold_bold_trans_wf'
    )

    # SLICE-TIME CORRECTION (or bypass) #############################################
    if run_stc is True:  # bool('TooShort') == True, so check True explicitly
        bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata)
        workflow.connect([
            (bold_reference_wf, bold_stc_wf, [('outputnode.bold_file', 'inputnode.bold_file'),
                                              ('outputnode.skip_vols', 'inputnode.skip_vols')]),
            (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]),
        ])
    else:  # bypass STC from original BOLD to the splitter through boldbuffer
        workflow.connect([
            (bold_reference_wf, boldbuffer, [
                ('outputnode.bold_file', 'bold_file')]),
        ])

    # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ##########################
    bold_sdc_wf = init_sdc_wf(
        fmaps, metadata, omp_nthreads=omp_nthreads,
        debug=debug, fmap_demean=fmap_demean, fmap_bspline=fmap_bspline)
    bold_sdc_wf.inputs.inputnode.template = template

    if not fmaps:
        LOGGER.warning('SDC: no fieldmaps found or they were ignored (%s).',
                       ref_file)
    elif fmaps[0]['type'] == 'syn':
        LOGGER.warning(
            'SDC: no fieldmaps found or they were ignored. '
            'Using EXPERIMENTAL "fieldmap-less SyN" correction '
            'for dataset %s.', ref_file)
    else:
        LOGGER.log(25, 'SDC: fieldmap estimation of type "%s" intended for %s found.',
                   fmaps[0]['type'], ref_file)

    # MAIN WORKFLOW STRUCTURE #######################################################
    workflow.connect([
        # Generate early reference
        (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]),
        # BOLD buffer has slice-time corrected if it was run, original otherwise
        (boldbuffer, bold_split, [('bold_file', 'in_file')]),
        # HMC
        (bold_reference_wf, bold_hmc_wf, [
            ('outputnode.raw_ref_image', 'inputnode.raw_ref_image'),
            ('outputnode.bold_file', 'inputnode.bold_file')]),
        # EPI-T1 registration workflow
        (inputnode, bold_reg_wf, [
            ('bold_file', 'inputnode.name_source'),
            ('t1_preproc', 'inputnode.t1_preproc'),
            ('t1_brain', 'inputnode.t1_brain'),
            ('t1_mask', 'inputnode.t1_mask'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('t1_aseg', 'inputnode.t1_aseg'),
            ('t1_aparc', 'inputnode.t1_aparc'),
            # Undefined if --no-freesurfer, but this is safe
            ('subjects_dir', 'inputnode.subjects_dir'),
            ('subject_id', 'inputnode.subject_id'),
            ('t1_2_fsnative_reverse_transform', 'inputnode.t1_2_fsnative_reverse_transform')]),
        (bold_split, bold_reg_wf, [('out_files', 'inputnode.bold_split')]),
        (bold_hmc_wf, bold_reg_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]),
        (bold_reg_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'),
                                   ('outputnode.bold_aseg_t1', 'bold_aseg_t1'),
                                   ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]),
        (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]),
        # SDC (or pass-through workflow)
        (inputnode, bold_sdc_wf, [
            ('t1_brain', 'inputnode.t1_brain'),
            ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform')]),
        (bold_reference_wf, bold_sdc_wf, [
            ('outputnode.ref_image', 'inputnode.bold_ref'),
            ('outputnode.ref_image_brain', 'inputnode.bold_ref_brain'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, bold_reg_wf, [
            ('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
            ('outputnode.bold_mask', 'inputnode.ref_bold_mask'),
            ('outputnode.out_warp', 'inputnode.fieldwarp')]),
        (bold_sdc_wf, bold_bold_trans_wf, [
            ('outputnode.out_warp', 'inputnode.fieldwarp'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction')]),
        # Connect bold_confounds_wf
        (inputnode, bold_confounds_wf, [('t1_tpms', 'inputnode.t1_tpms'),
                                        ('t1_mask', 'inputnode.t1_mask')]),
        (bold_hmc_wf, bold_confounds_wf, [
            ('outputnode.movpar_file', 'inputnode.movpar_file')]),
        (bold_reg_wf, bold_confounds_wf, [
            ('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]),
        (bold_confounds_wf, outputnode, [
            ('outputnode.confounds_file', 'confounds'),
        ]),
        # Connect bold_bold_trans_wf
        (inputnode, bold_bold_trans_wf, [
            ('bold_file', 'inputnode.name_source')]),
        (bold_split, bold_bold_trans_wf, [
            ('out_files', 'inputnode.bold_file')]),
        (bold_hmc_wf, bold_bold_trans_wf, [
            ('outputnode.xforms', 'inputnode.hmc_xforms')]),
        (bold_bold_trans_wf, bold_confounds_wf, [
            ('outputnode.bold', 'inputnode.bold'),
            ('outputnode.bold_mask', 'inputnode.bold_mask')]),
        # Summary
        (outputnode, summary, [('confounds', 'confounds_file')]),
    ])

    if fmaps:
        from ..fieldmap.unwarp import init_fmap_unwarp_report_wf
        sdc_type = fmaps[0]['type']

        # Report on BOLD correction
        fmap_unwarp_report_wf = init_fmap_unwarp_report_wf(
            suffix='sdc_%s' % sdc_type)
        workflow.connect([
            (inputnode, fmap_unwarp_report_wf, [
                ('t1_seg', 'inputnode.in_seg')]),
            (bold_reference_wf, fmap_unwarp_report_wf, [
                ('outputnode.ref_image', 'inputnode.in_pre')]),
            (bold_reg_wf, fmap_unwarp_report_wf, [
                ('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
            (bold_sdc_wf, fmap_unwarp_report_wf, [
                ('outputnode.bold_ref', 'inputnode.in_post')]),
        ])

        if force_syn and sdc_type != 'syn':
            syn_unwarp_report_wf = init_fmap_unwarp_report_wf(
                suffix='forcedsyn', name='syn_unwarp_report_wf')
            workflow.connect([
                (inputnode, syn_unwarp_report_wf, [
                    ('t1_seg', 'inputnode.in_seg')]),
                (bold_reference_wf, syn_unwarp_report_wf, [
                    ('outputnode.ref_image', 'inputnode.in_pre')]),
                (bold_reg_wf, syn_unwarp_report_wf, [
                    ('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]),
                (bold_sdc_wf, syn_unwarp_report_wf, [
                    ('outputnode.syn_bold_ref', 'inputnode.in_post')]),
            ])

    # if multiecho data, select first echo for hmc correction
    if multiecho:
        inputnode.iterables = ('bold_file', bold_file)

        me_first_echo = pe.Node(FirstEcho(
            te_list=tes, in_files=bold_file, ref_imgs=bold_file),
            name='me_first_echo')
        # Replace reference with the echo selected with FirstEcho
        workflow.disconnect([
            (inputnode, bold_reference_wf, [
                ('bold_file', 'inputnode.bold_file')]),
            (bold_reference_wf, boldbuffer, [
                ('outputnode.bold_file', 'bold_file')]),
        ])
        workflow.connect([
            (me_first_echo, bold_reference_wf, [
                ('first_image', 'inputnode.bold_file')]),
            (inputnode, boldbuffer, [
                ('bold_file', 'bold_file')]),
        ])

        if t2s_coreg:
            # create a T2* map
            bold_t2s_wf = init_bold_t2s_wf(bold_echos=bold_file,
                                           echo_times=tes,
                                           mem_gb=mem_gb['resampled'],
                                           omp_nthreads=omp_nthreads,
                                           name='bold_t2s_wf')
            bold_t2s_wf.inputs.inputnode.name_source = ref_file

            # Replace EPI-to-T1w registration inputs
            workflow.disconnect([
                (bold_sdc_wf, bold_reg_wf, [
                    ('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'),
                    ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]),
            ])
            workflow.connect([
                (bold_hmc_wf, bold_t2s_wf, [
                    ('outputnode.xforms', 'inputnode.hmc_xforms')]),
                (bold_t2s_wf, bold_reg_wf, [
                    ('outputnode.t2s_map', 'inputnode.ref_bold_brain'),
                    ('outputnode.oc_mask', 'inputnode.ref_bold_mask')]),
            ])

    # Map final BOLD mask into T1w space (if required)
    if 'T1w' in output_spaces:
        from niworkflows.interfaces.fixes import (
            FixHeaderApplyTransforms as ApplyTransforms
        )

        boldmask_to_t1w = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True),
            name='boldmask_to_t1w', mem_gb=0.1
        )
        workflow.connect([
            (bold_bold_trans_wf, boldmask_to_t1w, [
                ('outputnode.bold_mask', 'input_image')]),
            (bold_reg_wf, boldmask_to_t1w, [
                ('outputnode.bold_mask_t1', 'reference_image'),
                ('outputnode.itk_bold_to_t1', 'transforms')]),
            (boldmask_to_t1w, outputnode, [
                ('output_image', 'bold_mask_t1')]),
        ])

    if 'template' in output_spaces:
        # Apply transforms in 1 shot
        # Only use uncompressed output if AROMA is to be run
        bold_mni_trans_wf = init_bold_mni_trans_wf(
            template=template,
            mem_gb=mem_gb['resampled'],
            omp_nthreads=omp_nthreads,
            template_out_grid=template_out_grid,
            use_compression=not low_mem,
            use_fieldwarp=fmaps is not None,
            name='bold_mni_trans_wf'
        )
        carpetplot_wf = init_carpetplot_wf(
            mem_gb=mem_gb['resampled'],
            metadata=metadata,
            name='carpetplot_wf')

        workflow.connect([
            (inputnode, bold_mni_trans_wf, [
                ('bold_file', 'inputnode.name_source'),
                ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform')]),
            (bold_split, bold_mni_trans_wf, [
                ('out_files', 'inputnode.bold_split')]),
            (bold_hmc_wf, bold_mni_trans_wf, [
                ('outputnode.xforms', 'inputnode.hmc_xforms')]),
            (bold_reg_wf, bold_mni_trans_wf, [
                ('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]),
            (bold_bold_trans_wf, bold_mni_trans_wf, [
                ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (bold_sdc_wf, bold_mni_trans_wf, [
                ('outputnode.out_warp', 'inputnode.fieldwarp')]),
            (bold_mni_trans_wf, outputnode, [('outputnode.bold_mni', 'bold_mni'),
                                             ('outputnode.bold_mask_mni', 'bold_mask_mni')]),
            (bold_bold_trans_wf, carpetplot_wf, [
                ('outputnode.bold', 'inputnode.bold'),
                ('outputnode.bold_mask', 'inputnode.bold_mask')]),
            (inputnode, carpetplot_wf, [
                ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform')]),
            (bold_reg_wf, carpetplot_wf, [
                ('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]),
            (bold_confounds_wf, carpetplot_wf, [
                ('outputnode.confounds_file', 'inputnode.confounds_file')]),
        ])

        if use_aroma:
            # ICA-AROMA workflow
            # Internally resamples to MNI152 Linear (2006)
            from .confounds import init_ica_aroma_wf
            from ...interfaces import JoinTSVColumns

            ica_aroma_wf = init_ica_aroma_wf(
                template=template,
                metadata=metadata,
                mem_gb=mem_gb['resampled'],
                omp_nthreads=omp_nthreads,
                use_fieldwarp=fmaps is not None,
                ignore_aroma_err=ignore_aroma_err,
                aroma_melodic_dim=aroma_melodic_dim,
                name='ica_aroma_wf')

            join = pe.Node(JoinTSVColumns(), name='aroma_confounds')

            workflow.disconnect([
                (bold_confounds_wf, outputnode, [
                    ('outputnode.confounds_file', 'confounds'),
                ]),
            ])
            workflow.connect([
                (inputnode, ica_aroma_wf, [
                    ('bold_file', 'inputnode.name_source'),
                    ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform')]),
                (bold_split, ica_aroma_wf, [
                    ('out_files', 'inputnode.bold_split')]),
                (bold_hmc_wf, ica_aroma_wf, [
                    ('outputnode.movpar_file', 'inputnode.movpar_file'),
                    ('outputnode.xforms', 'inputnode.hmc_xforms')]),
                (bold_reg_wf, ica_aroma_wf, [
                    ('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]),
                (bold_bold_trans_wf, ica_aroma_wf, [
                    ('outputnode.bold_mask', 'inputnode.bold_mask')]),
                (bold_sdc_wf, ica_aroma_wf, [
                    ('outputnode.out_warp', 'inputnode.fieldwarp')]),
                (bold_confounds_wf, join, [
                    ('outputnode.confounds_file', 'in_file')]),
                (ica_aroma_wf, join,
                    [('outputnode.aroma_confounds', 'join_file')]),
                (ica_aroma_wf, outputnode,
                    [('outputnode.aroma_noise_ics', 'aroma_noise_ics'),
                     ('outputnode.melodic_mix', 'melodic_mix'),
                     ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file')]),
                (join, outputnode, [('out_file', 'confounds')]),
            ])

    # SURFACES ##################################################################################
    if freesurfer and any(space.startswith('fs') for space in output_spaces):
        LOGGER.log(25, 'Creating BOLD surface-sampling workflow.')
        bold_surf_wf = init_bold_surf_wf(mem_gb=mem_gb['resampled'],
                                         output_spaces=output_spaces,
                                         medial_surface_nan=medial_surface_nan,
                                         name='bold_surf_wf')
        workflow.connect([
            (inputnode, bold_surf_wf, [
                ('t1_preproc', 'inputnode.t1_preproc'),
                ('subjects_dir', 'inputnode.subjects_dir'),
                ('subject_id', 'inputnode.subject_id'),
                ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform')]),
            (bold_reg_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]),
            (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        ])

        # CIFTI output
        if cifti_output and 'template' in output_spaces:
            gen_cifti = pe.MapNode(GenerateCifti(), iterfield=["surface_target", "gifti_files"],
                                   name="gen_cifti")
            gen_cifti.inputs.TR = metadata.get("RepetitionTime")

            workflow.connect([
                (bold_surf_wf, gen_cifti, [
                    ('targets.out', 'surface_target'),
                    ('outputnode.surfaces', 'gifti_files')]),
                (inputnode, gen_cifti, [('subjects_dir', 'subjects_dir')]),
                (bold_mni_trans_wf, gen_cifti, [('outputnode.bold_mni', 'bold_file')]),
                (gen_cifti, outputnode, [('out_file', 'bold_cifti'),
                                         ('variant', 'cifti_variant'),
                                         ('variant_key', 'cifti_variant_key')]),
            ])

    # REPORTING ############################################################
    ds_report_summary = pe.Node(
        DerivativesDataSink(suffix='summary'),
        name='ds_report_summary', run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB)

    ds_report_validation = pe.Node(
        DerivativesDataSink(base_directory=reportlets_dir,
                            suffix='validation'),
        name='ds_report_validation', run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (summary, ds_report_summary, [('out_report', 'in_file')]),
        (bold_reference_wf, ds_report_validation, [
            ('outputnode.validation_report', 'in_file')]),
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = ref_file

    return workflow