示例#1
0
def warp_dwi(name='WarpDWIWorkflow'):
    """
    Distorts the splitted dwis given at the input following the
    theoretically - correct warping that corresponds to the bmap input
    """
    from nipype.pipeline import engine as pe
    from nipype.interfaces import utility as niu
    from nipype.interfaces import io as nio
    from nipype.algorithms.mesh import WarpPoints

    from .registration import apply_dfm
    from .fieldmap import vsm_fmb

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_dwis', 'dwi_mask', 'surf', 'mr_param', 'bmap']),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['dwis', 'dwi_mask', 'surf']),
        name='outputnode')

    params = pe.Node(nio.JSONFileGrabber(), name='DWIparams')
    vsm = vsm_fmb(phase_unwrap=False)

    warp_data = apply_dfm(name='WarpData')
    warp_surf = pe.MapNode(WarpPoints(),
                           iterfield=['points'],
                           name='WarpSurfs')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, params, [('mr_param', 'in_file')]),
                (inputnode, vsm, [('bmap', 'inputnode.in_bmap'),
                                  ('dwi_mask', 'inputnode.in_mask')]),
                (inputnode, warp_surf, [('surf', 'points')]),
                (inputnode, warp_data, [('in_dwis', 'inputnode.in_files'),
                                        ('dwi_mask', 'inputnode.in_mask')]),
                (params, vsm, [('delta_te', 'inputnode.delta_te'),
                               ('echospacing', 'inputnode.echospacing'),
                               ('epi_acc', 'inputnode.acc_factor'),
                               ('enc_dir', 'inputnode.enc_dir')]),
                (vsm, warp_data, [('outputnode.dfm_inv', 'inputnode.dfm'),
                                  ('outputnode.jac_inv', 'inputnode.jac')]),
                (vsm, warp_surf, [('outputnode.dfm', 'warp')]),
                (warp_data, outputnode, [('outputnode.out_files', 'dwis'),
                                         ('outputnode.out_mask', 'dwi_mask')]),
                (warp_surf, outputnode, [('out_points', 'surf')])])
    return wf
示例#2
0
def map_energy(name='EnergyMapping', out_csv='energiesmapping.csv'):

    out_csv = op.abspath(out_csv)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'reference', 'surfaces0', 'surfaces1', 'in_mask', 'subject_id'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['desc_zero', 'out_diff']),
        name='outputnode')

    ref_e = pe.Node(ComputeEnergy(), name='ComputeZeroEnergy')
    diff = pe.MapNode(namesh.ComputeMeshWarp(),
                      name='ComputeError',
                      iterfield=['surface1', 'surface2'])

    getval = pe.Node(nio.JSONFileGrabber(), name='GetZeroEnergy')
    csv = pe.Node(namisc.AddCSVRow(in_file=out_csv), name="AddReferenceRow")
    csv.inputs.error = 0.0

    mapper = warp_n_map(out_csv=out_csv)
    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, ref_e, [('reference', 'reference'),
                                    ('surfaces0', 'surfaces'),
                                    ('in_mask', 'in_mask')]),
                (ref_e, outputnode, [('out_file', 'desc_zero')]),
                (ref_e, getval, [('out_file', 'in_file')]),
                (inputnode, csv, [('subject_id', 'subject_id')]),
                (getval, csv, [('total', 'total')]),
                (inputnode, diff, [('surfaces0', 'surface1'),
                                   ('surfaces1', 'surface2')]),
                (diff, outputnode, [('out_warp', 'out_diff')]),
                (inputnode, mapper, [('subject_id', 'inputnode.subject_id'),
                                     ('reference', 'inputnode.reference'),
                                     ('in_mask', 'inputnode.in_mask')]),
                (diff, mapper, [('out_warp', 'inputnode.surf_warp')]),
                (ref_e, mapper, [('out_desc', 'inputnode.descriptors')])])
    return wf
示例#3
0
def warp_n_map(name='EnergyWarpAndMap', out_csv='energies.csv'):
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'reference', 'surf_warp', 'in_mask', 'errfactor', 'descriptors',
        'subject_id'
    ]),
                        name='inputnode')
    inputnode.iterables = ('errfactor',
                           np.linspace(-1.2, 1.2, num=100,
                                       endpoint=True).tolist())

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_energy']),
                         name='outputnode')

    applyef = pe.MapNode(namesh.MeshWarpMaths(operation='mul'),
                         name='MeshMaths',
                         iterfield=['in_surf'])
    mapeneg = pe.Node(ComputeEnergy(), name='ComputeEnergy')
    getval = pe.Node(nio.JSONFileGrabber(), name='GetEnergy')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, applyef, [('surf_warp', 'in_surf'),
                                      ('errfactor', 'operator')]),
                (applyef, mapeneg, [('out_file', 'surfaces')]),
                (inputnode, mapeneg, [('reference', 'reference'),
                                      ('in_mask', 'in_mask'),
                                      ('descriptors', 'descriptors')]),
                (mapeneg, getval, [('out_file', 'in_file')]),
                (mapeneg, outputnode, [('out_file', 'out_energy')])])

    csv = pe.Node(namisc.AddCSVRow(in_file=out_csv), name="AddRow")

    wf.connect([(getval, csv, [('total', 'total')]),
                (inputnode, csv, [('errfactor', 'error'),
                                  ('subject_id', 'subject_id')])])

    return wf
示例#4
0
def t1w_preprocessing(name='t1w_preprocessing', settings=None):
    """T1w images preprocessing pipeline"""

    if settings is None:
        raise RuntimeError('Workflow settings are missing')

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['t1w']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['t1_seg', 'bias_corrected_t1', 't1_brain', 't1_2_mni',
                't1_2_mni_forward_transform', 't1_2_mni_reverse_transform',
                't1_segmentation']), name='outputnode')

    # 1. Reorient T1
    arw = pe.Node(fs.MRIConvert(out_type='niigz', out_orientation='RAS'), name='Reorient')

    # 2. T1 Bias Field Correction
    inu_n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias_Field_Correction')

    # 3. Skull-stripping
    asw = skullstrip_wf()
    if settings.get('skull_strip_ants', False):
        asw = skullstrip_ants(settings=settings)

    # 4. Segmentation
    t1_seg = pe.Node(fsl.FAST(no_bias=True, probability_maps=True), name='T1_Segmentation')

    # 5. T1w to MNI registration
    t1_2_mni = pe.Node(ants.Registration(), name='T1_2_MNI_Registration')
    t1_2_mni.inputs.collapse_output_transforms = False
    t1_2_mni.inputs.write_composite_transform = False
    t1_2_mni.inputs.output_transform_prefix = 'T1_to_MNI_'
    t1_2_mni.inputs.output_warped_image = 't1_to_mni.nii.gz'
    t1_2_mni.inputs.num_threads = 4
    t1_2_mni.inputs.fixed_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')
    t1_2_mni.inputs.fixed_image_mask = op.join(
        get_mni_template_ras(), 'MNI152_T1_1mm_brain_mask.nii.gz')

    # Hack to avoid re-running ANTs all the times
    grabber_interface = nio.JSONFileGrabber()
    setattr(grabber_interface, '_always_run', False)
    t1_2_mni_params = pe.Node(grabber_interface, name='t1_2_mni_params')
    t1_2_mni_params.inputs.in_file = (
        pkgr.resource_filename('fmriprep', 'data/{}.json'.format(
            settings.get('ants_t1-mni_settings', 't1-mni_registration2')))
    )

    # Resampe the brain mask and the tissue probability maps into mni space
    bmask_mni = pe.Node(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='NearestNeighbor'), name='brain_mni_warp')
    bmask_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')
    tpms_mni = pe.MapNode(ants.ApplyTransforms(dimension=3, default_value=0, interpolation='Linear'),
                          iterfield=['input_image'], name='tpms_mni_warp')
    tpms_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')


    workflow.connect([
        (inputnode, arw, [('t1w', 'in_file')]),
        (arw, inu_n4, [('out_file', 'input_image')]),
        (inu_n4, asw, [('output_image', 'inputnode.in_file')]),
        (asw, t1_seg, [('outputnode.out_file', 'in_files')]),
        (inu_n4, t1_2_mni, [('output_image', 'moving_image')]),
        (asw, t1_2_mni, [('outputnode.out_mask', 'moving_image_mask')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_seg')]),
        (inu_n4, outputnode, [('output_image', 'bias_corrected_t1')]),
        (t1_seg, outputnode, [('tissue_class_map', 't1_segmentation')]),
        (t1_2_mni, outputnode, [
            ('warped_image', 't1_2_mni'),
            ('forward_transforms', 't1_2_mni_forward_transform'),
            ('reverse_transforms', 't1_2_mni_reverse_transform')
        ]),
        (asw, bmask_mni, [('outputnode.out_mask', 'input_image')]),
        (t1_2_mni, bmask_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (t1_seg, tpms_mni, [('probability_maps', 'input_image')]),
        (t1_2_mni, tpms_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (asw, outputnode, [
            ('outputnode.out_file', 't1_brain')]),
    ])

    # Connect reporting nodes
    t1_stripped_overlay = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'], output_names=['out_file'],
        function=stripped_brain_overlay), name='PNG_T1_SkullStrip')
    t1_stripped_overlay.inputs.out_file = 't1_stripped_overlay.png'

    # The T1-to-MNI will be plotted using the segmentation. That's why we transform it first
    seg_2_mni = pe.Node(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='NearestNeighbor'), name='T1_2_MNI_warp')
    seg_2_mni.inputs.reference_image = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')

    t1_2_mni_overlay = pe.Node(niu.Function(
        input_names=['in_file', 'overlay_file', 'out_file'], output_names=['out_file'],
        function=stripped_brain_overlay), name='PNG_T1_to_MNI')
    t1_2_mni_overlay.inputs.out_file = 't1_to_mni_overlay.png'
    t1_2_mni_overlay.inputs.overlay_file = op.join(get_mni_template_ras(), 'MNI152_T1_1mm.nii.gz')

    datasink = pe.Node(
        interface=nio.DataSink(
            base_directory=op.join(settings['output_dir'], 'images')),
        name='datasink',
        parameterization=False
    )

    workflow.connect([
        (inu_n4, t1_stripped_overlay, [('output_image', 'overlay_file')]),
        (asw, t1_stripped_overlay, [('outputnode.out_mask', 'in_file')]),
        (t1_stripped_overlay, datasink, [('out_file', '@t1_stripped_overlay')]),
        (t1_seg, seg_2_mni, [('tissue_class_map', 'input_image')]),
        (t1_2_mni, seg_2_mni, [('forward_transforms', 'transforms'),
                               ('forward_invert_flags', 'invert_transform_flags')]),
        (seg_2_mni, t1_2_mni_overlay, [('output_image', 'in_file')]),
        (t1_2_mni_overlay, datasink, [('out_file', '@t1_2_mni_overlay')]),
    ])

    # Write corrected file in the designated output dir
    ds_t1_bias = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='inu'), name='DerivT1_inu')
    ds_t1_seg = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='inu_seg'), name='DerivT1_seg')
    ds_mask = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='bmask'), name='DerivT1_mask')

    ds_t1_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='mni'), name='DerivT1w_MNI')
    ds_t1_mni_aff = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='mni_affine'), name='DerivT1w_MNI_affine')

    ds_bmask_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='bmask_mni'), name='DerivT1_Mask_MNI')
    ds_tpms_mni = pe.Node(
        DerivativesDataSink(base_directory=settings['output_dir'],
            suffix='tpm_mni'), name='DerivT1_TPMs_MNI')

    if settings.get('debug', False):
        workflow.connect([
            (t1_2_mni, ds_t1_mni_aff, [('forward_transforms', 'in_file')])
        ])
    else:
        ds_t1_mni_warp = pe.Node(
            DerivativesDataSink(base_directory=settings['output_dir'],
                suffix='mni_warp'), name='DerivT1w_MNI_warp')

        def _get_aff(inlist):
            return inlist[:-1]

        def _get_warp(inlist):
            return inlist[-1]

        workflow.connect([
            (inputnode, ds_t1_mni_warp, [('t1w', 'source_file')]),
            (t1_2_mni, ds_t1_mni_aff, [
                (('forward_transforms', _get_aff), 'in_file')]),
            (t1_2_mni, ds_t1_mni_warp, [
                (('forward_transforms', _get_warp), 'in_file')])
        ])

    workflow.connect([
        (inputnode, ds_t1_bias, [('t1w', 'source_file')]),
        (inputnode, ds_t1_seg, [('t1w', 'source_file')]),
        (inputnode, ds_mask, [('t1w', 'source_file')]),
        (inputnode, ds_t1_mni, [('t1w', 'source_file')]),
        (inputnode, ds_t1_mni_aff, [('t1w', 'source_file')]),
        (inputnode, ds_bmask_mni, [('t1w', 'source_file')]),
        (inputnode, ds_tpms_mni, [('t1w', 'source_file')]),
        (asw, ds_t1_bias, [('outputnode.out_file', 'in_file')]),
        (t1_seg, ds_t1_seg, [('tissue_class_map', 'in_file')]),
        (asw, ds_mask, [('outputnode.out_mask', 'in_file')]),
        (t1_2_mni, ds_t1_mni, [('warped_image', 'in_file')]),
        (bmask_mni, ds_bmask_mni, [('output_image', 'in_file')]),
        (tpms_mni, ds_tpms_mni, [('output_image', 'in_file')])

    ])

    # ANTs inputs connected here for clarity
    workflow.connect([
        (t1_2_mni_params, t1_2_mni, [
            ('metric', 'metric'),
            ('metric_weight', 'metric_weight'),
            ('dimension', 'dimension'),
            ('write_composite_transform', 'write_composite_transform'),
            ('radius_or_number_of_bins', 'radius_or_number_of_bins'),
            ('shrink_factors', 'shrink_factors'),
            ('smoothing_sigmas', 'smoothing_sigmas'),
            ('sigma_units', 'sigma_units'),
            ('output_transform_prefix', 'output_transform_prefix'),
            ('transforms', 'transforms'),
            ('transform_parameters', 'transform_parameters'),
            ('initial_moving_transform_com', 'initial_moving_transform_com'),
            ('number_of_iterations', 'number_of_iterations'),
            ('convergence_threshold', 'convergence_threshold'),
            ('convergence_window_size', 'convergence_window_size'),
            ('sampling_strategy', 'sampling_strategy'),
            ('sampling_percentage', 'sampling_percentage'),
            ('output_warped_image', 'output_warped_image'),
            ('use_histogram_matching', 'use_histogram_matching'),
            ('use_estimate_learning_rate_once',
             'use_estimate_learning_rate_once'),
            ('collapse_output_transforms', 'collapse_output_transforms'),
            ('winsorize_lower_quantile', 'winsorize_lower_quantile'),
            ('winsorize_upper_quantile', 'winsorize_upper_quantile'),
        ])
    ])

    return workflow
示例#5
0
def sdc_t2b(name='SDC_T2B', icorr=True, num_threads=1):
    """
    The T2w-registration based method (T2B) implements an SDC by nonlinear
    registration of the anatomically correct *T2w* image to the *b0* image
    of the *dMRI* dataset. The implementation here tries to reproduce the one
    included in ExploreDTI `(Leemans et al., 2009)
    <http://www.exploredti.com/ref/ExploreDTI_ISMRM_2009.pdf>`_, which is
    also used by `(Irfanoglu et al., 2012)
    <http://dx.doi.org/10.1016/j.neuroimage.2012.02.054>`_.

    :param str name: a unique name for the workflow.

    :inputs:

        * in_t2w: the reference T2w image

    :outputs:

        * outputnode.corrected_image: the dMRI image after correction


    Example::

    >>> t2b = sdc_t2b()
    >>> t2b.inputs.inputnode.in_dwi = 'dwi_brain.nii'
    >>> t2b.inputs.inputnode.in_bval = 'dwi.bval'
    >>> t2b.inputs.inputnode.in_mask = 'b0_mask.nii'
    >>> t2b.inputs.inputnode.in_t2w = 't2w_brain.nii'
    >>> t2b.inputs.inputnode.in_param = 'parameters.txt'
    >>> t2b.run() # doctest: +SKIP

    """
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_dwi', 'in_bval', 'in_t2w', 'dwi_mask', 't2w_mask',
                'in_param', 'in_surf']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['dwi', 'dwi_mask', 'out_surf']), name='outputnode')

    avg_b0 = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval'], output_names=['out_file'],
        function=b0_average), name='AverageB0')
    n4_b0 = pe.Node(N4BiasFieldCorrection(dimension=3), name='BiasB0')
    n4_t2 = pe.Node(N4BiasFieldCorrection(dimension=3), name='BiasT2')

    getparam = pe.Node(nio.JSONFileGrabber(defaults={'enc_dir': 'y'}),
                       name='GetEncDir')
    reg = pe.Node(nex.Registration(num_threads=1), name='Elastix')
    tfx_b0 = pe.Node(nex.EditTransform(), name='tfm_b0')
    split_dwi = pe.Node(fsl.utils.Split(dimension='t'), name='split_dwi')
    warp = pe.MapNode(nex.ApplyWarp(), iterfield=['moving_image'],
                      name='UnwarpDWIs')
    warp_prop = pe.Node(nex.AnalyzeWarp(), name='DisplFieldAnalysis')
    warpbuff = pe.Node(niu.IdentityInterface(fields=['unwarped']),
                       name='UnwarpedCache')
    mskdwis = pe.MapNode(fs.ApplyMask(), iterfield='in_file', name='MaskDWIs')
    thres = pe.MapNode(Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegs')
    merge_dwi = pe.Node(fsl.utils.Merge(dimension='t'), name='merge_dwis')
    tfx_msk = pe.Node(nex.EditTransform(
        interpolation='nearest', output_type='unsigned char'),
        name='MSKInterpolator')
    corr_msk = pe.Node(nex.ApplyWarp(), name='UnwarpMsk')
    closmsk = pe.Node(fsl.maths.MathsCommand(
        nan2zeros=True, args='-kernel sphere 3 -dilM -kernel sphere 2 -ero'),
        name='MaskClosing')

    swarp = pe.MapNode(nex.PointsWarp(), iterfield=['points_file'],
                       name='UnwarpSurfs')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,     avg_b0, [('in_dwi', 'in_dwi'),
                                 ('in_bval', 'in_bval')]),
        (inputnode,   getparam, [('in_param', 'in_file')]),
        (inputnode,  split_dwi, [('in_dwi', 'in_file')]),
        (inputnode,   corr_msk, [('dwi_mask', 'moving_image')]),
        (inputnode,      swarp, [('in_surf', 'points_file')]),
        (inputnode,        reg, [('t2w_mask', 'fixed_mask'),
                                 ('dwi_mask', 'moving_mask')]),
        (inputnode,      n4_t2, [('in_t2w', 'input_image'),
                                 ('t2w_mask', 'mask_image')]),
        (inputnode,      n4_b0, [('dwi_mask', 'mask_image')]),
        (avg_b0,         n4_b0, [('out_file', 'input_image')]),
        (getparam,         reg, [
            (('enc_dir', _default_params), 'parameters')]),
        (n4_t2,            reg, [('output_image', 'fixed_image')]),
        (n4_b0,            reg, [('output_image', 'moving_image')]),
        (reg,           tfx_b0, [
            (('transform', _get_last), 'transform_file')]),
        (avg_b0,        tfx_b0, [('out_file', 'reference_image')]),
        (tfx_b0,     warp_prop, [('output_file', 'transform_file')]),
        (tfx_b0,          warp, [('output_file', 'transform_file')]),
        (split_dwi,       warp, [('out_files', 'moving_image')]),
        (warpbuff,     mskdwis, [('unwarped', 'in_file')]),
        (closmsk,      mskdwis, [('out_file', 'mask_file')]),
        (mskdwis,        thres, [('out_file', 'in_file')]),
        (thres,      merge_dwi, [('out_file', 'in_files')]),
        (reg,          tfx_msk, [
            (('transform', _get_last), 'transform_file')]),
        (tfx_b0,         swarp, [('output_file', 'transform_file')]),
        (avg_b0,       tfx_msk, [('out_file', 'reference_image')]),
        (tfx_msk,     corr_msk, [('output_file', 'transform_file')]),
        (corr_msk,     closmsk, [('warped_file', 'in_file')]),
        (merge_dwi, outputnode, [('merged_file', 'dwi')]),
        (closmsk,   outputnode, [('out_file', 'dwi_mask')]),
        (warp_prop, outputnode, [('jacdet_map', 'jacobian')]),
        (swarp,     outputnode, [('warped_file', 'out_surf')])
    ])

    if icorr:
        jac_mask = pe.Node(fs.ApplyMask(), name='mask_jac')
        mult = pe.MapNode(MultiImageMaths(op_string='-mul %s'),
                          iterfield=['in_file'], name='ModulateDWIs')
        wf.connect([
            (closmsk,      jac_mask, [('out_file', 'mask_file')]),
            (warp_prop,    jac_mask, [('jacdet_map', 'in_file')]),
            (warp,             mult, [('warped_file', 'in_file')]),
            (jac_mask,         mult, [('out_file', 'operand_files')]),
            (mult,         warpbuff, [('out_file', 'unwarped')])
        ])
    else:
        wf.connect([
            (warp,         warpbuff, [('warped_file', 'unwarped')])
        ])

    return wf
示例#6
0
def sdc_unwarp(name=SDC_UNWARP_NAME, ref_vol=None, method='jac'):
    """
    This workflow takes an estimated fieldmap and a target image and applies TOPUP,
    an :abbr:`SDC (susceptibility-derived distortion correction)` method in FSL to
    unwarp the target image.

    Input fields:
    ~~~~~~~~~~~~~

      inputnode.in_file - the image(s) to which this correction will be applied
      inputnode.in_mask - a brain mask corresponding to the in_file image
      inputnode.fmap_ref - the fieldmap reference (generally, a *magnitude* image or the
                           resulting SE image)
      inputnode.fmap_mask - a brain mask in fieldmap-space
      inputnode.fmap - a fieldmap in Hz
      inputnode.hmc_movpar - the head motion parameters (iff inputnode.in_file is only
                             one 4D file)

    Output fields:
    ~~~~~~~~~~~~~~

      outputnode.out_file - the in_file after susceptibility-distortion correction.

    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fmap_ref', 'fmap_mask', 'fmap',
                'hmc_movpar']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), name='outputnode')

    # Compute movpar file iff we have several images with different
    # PE directions.
    align = pe.Node(niu.Function(
        input_names=['in_files', 'in_movpar'],
        output_names=['out_file', 'ref_vol', 'ref_mask', 'out_movpar'],
        function=_multiple_pe_hmc), name='AlignMultiplePE')
    align.inputs.in_ref = ref_vol

    # Read metadata
    meta = pe.MapNode(niu.Function(
        input_names=['in_file'], output_names=['out_dict'], function=_get_metadata),
        iterfield=['in_file'], name='metadata')

    encfile = pe.Node(interface=niu.Function(
        input_names=['input_images', 'in_dict'], output_names=['parameters_file'],
        function=create_encoding_file), name='TopUp_encfile', updatehash=True)

    fslsplit = pe.Node(fsl.Split(dimension='t'), name='ImageHMCSplit')

    # Register the reference of the fieldmap to the reference
    # of the target image (the one that shall be corrected)
    fmap2ref = pe.Node(ants.Registration(output_warped_image=True),
                       name='Fieldmap2ImageRegistration')

    grabber = nio.JSONFileGrabber()
    setattr(grabber, '_always_run', False)
    fmap2ref_params = pe.Node(grabber, name='Fieldmap2ImageRegistration_params')
    fmap2ref_params.inputs.in_file = (
        pkgr.resource_filename('fmriprep', 'data/fmap-any_registration.json'))

    applyxfm = pe.Node(ants.ApplyTransforms(
        dimension=3, interpolation='Linear'), name='Fieldmap2ImageApply')

    topup_adapt = pe.Node(niu.Function(
        input_names=['in_file', 'in_ref', 'in_movpar'],
        output_names=['out_fieldcoef', 'out_movpar'],
        function=_gen_coeff), name='TopUpAdapt')

    # Use the least-squares method to correct the dropout of the SBRef images
    unwarp = pe.Node(fsl.ApplyTOPUP(method=method), name='TopUpApply')


    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, align, [('in_file', 'in_files'),
                            ('hmc_movpar', 'in_movpar')]),
        (inputnode, applyxfm, [('fmap', 'input_image')]),
        (inputnode, encfile, [('in_file', 'input_images')]),
        (inputnode, fmap2ref, [('fmap_ref', 'moving_image'),
                               ('fmap_mask', 'moving_image_mask')]),

        (align, fmap2ref, [('ref_vol', 'fixed_image'),
                           ('ref_mask', 'fixed_image_mask')]),
        (align, applyxfm, [('ref_vol', 'reference_image')]),
        (align, topup_adapt, [('ref_vol', 'in_ref'),
                              ('out_movpar', 'in_movpar')]),

        (meta, encfile, [('out_dict', 'in_dict')]),

        (fmap2ref, applyxfm, [
            ('forward_transforms', 'transforms'),
            ('forward_invert_flags', 'invert_transform_flags')]),
        (align, fslsplit, [('out_file', 'in_file')]),
        (applyxfm, topup_adapt, [('output_image', 'in_file')]),
        (fslsplit, unwarp, [('out_files', 'in_files'),
                            (('out_files', gen_list), 'in_index')]),
        (topup_adapt, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'),
                               ('out_movpar', 'in_topup_movpar')]),
        (encfile, unwarp, [('parameters_file', 'encoding_file')]),
        (unwarp, outputnode, [('out_corrected', 'out_file')])
    ])

    # Connect registration settings in the end, not to clutter the code
    workflow.connect([
        (fmap2ref_params, fmap2ref, [
            ('transforms', 'transforms'),
            ('transform_parameters', 'transform_parameters'),
            ('number_of_iterations', 'number_of_iterations'),
            ('dimension', 'dimension'),
            ('metric', 'metric'),
            ('metric_weight', 'metric_weight'),
            ('radius_or_number_of_bins', 'radius_or_number_of_bins'),
            ('sampling_strategy', 'sampling_strategy'),
            ('sampling_percentage', 'sampling_percentage'),
            ('convergence_threshold', 'convergence_threshold'),
            ('convergence_window_size', 'convergence_window_size'),
            ('smoothing_sigmas', 'smoothing_sigmas'),
            ('sigma_units', 'sigma_units'),
            ('shrink_factors', 'shrink_factors'),
            ('use_estimate_learning_rate_once', 'use_estimate_learning_rate_once'),
            ('use_histogram_matching', 'use_histogram_matching'),
            ('initial_moving_transform_com', 'initial_moving_transform_com'),
            ('collapse_output_transforms', 'collapse_output_transforms'),
            ('winsorize_upper_quantile', 'winsorize_upper_quantile'),
            ('winsorize_lower_quantile', 'winsorize_lower_quantile')
        ])
    ])

    return workflow
def airmsk_wf(name='AirMaskWorkflow', save_memory=False, ants_settings=None):
    """Implements the Step 1 of [Mortamet2009]_."""
    import pkg_resources as pkgr
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'in_noinu', 'in_mask', 'head_mask']),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'artifact_msk']),
        name='outputnode')

    antsparms = pe.Node(nio.JSONFileGrabber(), name='ants_settings')
    antsparms.inputs.in_file = (
        ants_settings if ants_settings is not None else pkgr.resource_filename(
            'structural_dhcp_mriqc', 'data/ants_settings.json'))

    def _invt_flags(transforms):
        return [True] * len(transforms)

    # Spatial normalization, using ANTs
    norm = pe.Node(ants.Registration(dimension=3), name='normalize')

    if save_memory:
        norm.inputs.fixed_image = op.join(get_mni_template(),
                                          'MNI152_T1_2mm.nii.gz')
        norm.inputs.fixed_image_mask = op.join(
            get_mni_template(), 'MNI152_T1_2mm_brain_mask.nii.gz')
    else:
        norm.inputs.fixed_image = op.join(get_mni_template(),
                                          'MNI152_T1_1mm.nii.gz')
        norm.inputs.fixed_image_mask = op.join(
            get_mni_template(), 'MNI152_T1_1mm_brain_mask.nii.gz')

    invt = pe.Node(ants.ApplyTransforms(dimension=3,
                                        default_value=1,
                                        interpolation='NearestNeighbor'),
                   name='invert_xfm')
    invt.inputs.input_image = op.join(get_mni_template(),
                                      'MNI152_T1_1mm_brain_bottom.nii.gz')

    # Combine and invert mask
    combine = pe.Node(niu.Function(input_names=['head_mask', 'artifact_msk'],
                                   output_names=['out_file'],
                                   function=combine_masks),
                      name='combine_masks')

    qi1 = pe.Node(ArtifactMask(), name='ArtifactMask')

    workflow.connect([(antsparms, norm, [
        ('initial_moving_transform_com', 'initial_moving_transform_com'),
        ('winsorize_lower_quantile', 'winsorize_lower_quantile'),
        ('winsorize_upper_quantile', 'winsorize_upper_quantile'),
        ('float', 'float'), ('transforms', 'transforms'),
        ('transform_parameters', 'transform_parameters'),
        ('number_of_iterations', 'number_of_iterations'),
        ('convergence_window_size', 'convergence_window_size'),
        ('metric', 'metric'), ('metric_weight', 'metric_weight'),
        ('radius_or_number_of_bins', 'radius_or_number_of_bins'),
        ('sampling_strategy', 'sampling_strategy'),
        ('sampling_percentage', 'sampling_percentage'),
        ('smoothing_sigmas', 'smoothing_sigmas'),
        ('shrink_factors', 'shrink_factors'),
        ('convergence_threshold', 'convergence_threshold'),
        ('sigma_units', 'sigma_units'),
        ('use_estimate_learning_rate_once', 'use_estimate_learning_rate_once'),
        ('use_histogram_matching', 'use_histogram_matching')
    ]), (inputnode, qi1, [('in_file', 'in_file')]),
                      (inputnode, norm, [('in_noinu', 'moving_image'),
                                         ('in_mask', 'moving_image_mask')]),
                      (norm, invt, [('forward_transforms', 'transforms'),
                                    (('forward_transforms', _invt_flags),
                                     'invert_transform_flags')]),
                      (inputnode, invt, [('in_mask', 'reference_image')]),
                      (inputnode, combine, [('head_mask', 'head_mask')]),
                      (invt, combine, [('output_image', 'artifact_msk')]),
                      (combine, qi1, [('out_file', 'air_msk')]),
                      (qi1, outputnode, [('out_air_msk', 'out_file'),
                                         ('out_art_msk', 'artifact_msk')])])
    return workflow