Пример #1
0
def init_rodent_brain_extraction_wf(
    ants_affine_init=False,
    factor=20,
    arc=0.12,
    step=4,
    grid=(0, 4, 4),
    debug=False,
    interim_checkpoints=True,
    mem_gb=3.0,
    mri_scheme="T2w",
    name="rodent_brain_extraction_wf",
    omp_nthreads=None,
    output_dir=None,
    template_id="Fischer344",
    template_specs=None,
    use_float=True,
):
    """
    Build an atlas-based brain extraction pipeline for rodent T1w and T2w MRI data.

    Parameters
    ----------
    ants_affine_init : :obj:`bool`, optional
        Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images.

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_files", "in_mask"]),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["out_corrected", "out_brain", "out_mask"]),
        name="outputnode",
    )

    template_specs = template_specs or {}
    if template_id == "WHS" and "resolution" not in template_specs:
        template_specs["resolution"] = 2

    # Find a suitable target template in TemplateFlow
    tpl_target_path = get_template(
        template_id,
        suffix=mri_scheme,
        **template_specs,
    )
    if not tpl_target_path:
        raise RuntimeError(
            f"An instance of template <tpl-{template_id}> with MR scheme '{mri_scheme}'"
            " could not be found.")

    tpl_brainmask_path = get_template(
        template_id,
        atlas=None,
        hemi=None,
        desc="brain",
        suffix="probseg",
        **template_specs,
    ) or get_template(
        template_id,
        atlas=None,
        hemi=None,
        desc="brain",
        suffix="mask",
        **template_specs,
    )

    tpl_regmask_path = get_template(
        template_id,
        atlas=None,
        desc="BrainCerebellumExtraction",
        suffix="mask",
        **template_specs,
    )

    denoise = pe.Node(DenoiseImage(dimension=3, copy_header=True),
                      name="denoise",
                      n_procs=omp_nthreads)

    # Resample template to a controlled, isotropic resolution
    res_tmpl = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS, smooth=True),
                       name="res_tmpl")

    # Create Laplacian images
    lap_tmpl = pe.Node(ImageMath(operation="Laplacian", copy_header=True),
                       name="lap_tmpl")
    tmpl_sigma = pe.Node(niu.Function(function=_lap_sigma),
                         name="tmpl_sigma",
                         run_without_submitting=True)
    norm_lap_tmpl = pe.Node(niu.Function(function=_norm_lap),
                            name="norm_lap_tmpl")

    lap_target = pe.Node(ImageMath(operation="Laplacian", copy_header=True),
                         name="lap_target")
    target_sigma = pe.Node(niu.Function(function=_lap_sigma),
                           name="target_sigma",
                           run_without_submitting=True)
    norm_lap_target = pe.Node(niu.Function(function=_norm_lap),
                              name="norm_lap_target")

    # Set up initial spatial normalization
    ants_params = "testing" if debug else "precise"
    norm = pe.Node(
        Registration(from_file=pkgr_fn(
            "nirodents",
            f"data/artsBrainExtraction_{ants_params}_{mri_scheme}.json")),
        name="norm",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm.inputs.float = use_float

    # main workflow
    wf = pe.Workflow(name)

    # truncate target intensity for N4 correction
    clip_target = pe.Node(IntensityClip(p_min=15, p_max=99.9),
                          name="clip_target")

    # truncate template intensity to match target
    clip_tmpl = pe.Node(IntensityClip(p_min=5, p_max=98), name="clip_tmpl")
    clip_tmpl.inputs.in_file = _pop(tpl_target_path)

    # set INU bspline grid based on voxel size
    bspline_grid = pe.Node(niu.Function(function=_bspline_grid),
                           name="bspline_grid")

    # INU correction of the target image
    init_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=False,
            copy_header=True,
            n_iterations=[50] * (4 - debug),
            convergence_threshold=1e-7,
            shrink_factor=4,
            rescale_intensities=True,
        ),
        n_procs=omp_nthreads,
        name="init_n4",
    )
    clip_inu = pe.Node(IntensityClip(p_min=1, p_max=99.8), name="clip_inu")

    # Create a buffer interface as a cache for the actual inputs to registration
    buffernode = pe.Node(niu.IdentityInterface(fields=["hires_target"]),
                         name="buffernode")

    # Merge image nodes
    mrg_target = pe.Node(niu.Merge(2), name="mrg_target")
    mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")

    # fmt: off
    wf.connect([
        # Target image massaging
        (inputnode, denoise, [(("in_files", _pop), "input_image")]),
        (inputnode, bspline_grid, [(("in_files", _pop), "in_file")]),
        (bspline_grid, init_n4, [("out", "args")]),
        (denoise, clip_target, [("output_image", "in_file")]),
        (clip_target, init_n4, [("out_file", "input_image")]),
        (init_n4, clip_inu, [("output_image", "in_file")]),
        (clip_inu, target_sigma, [("out_file", "in_file")]),
        (clip_inu, buffernode, [("out_file", "hires_target")]),
        (buffernode, lap_target, [("hires_target", "op1")]),
        (target_sigma, lap_target, [("out", "op2")]),
        (lap_target, norm_lap_target, [("output_image", "in_file")]),
        (buffernode, mrg_target, [("hires_target", "in1")]),
        (norm_lap_target, mrg_target, [("out", "in2")]),
        # Template massaging
        (clip_tmpl, res_tmpl, [("out_file", "in_file")]),
        (res_tmpl, tmpl_sigma, [("out_file", "in_file")]),
        (res_tmpl, lap_tmpl, [("out_file", "op1")]),
        (tmpl_sigma, lap_tmpl, [("out", "op2")]),
        (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]),
        (res_tmpl, mrg_tmpl, [("out_file", "in1")]),
        (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]),
        # Setup inputs to spatial normalization
        (mrg_target, norm, [("out", "moving_image")]),
        (mrg_tmpl, norm, [("out", "fixed_image")]),
    ])
    # fmt: on

    # Graft a template registration-mask if present
    if tpl_regmask_path:
        hires_mask = pe.Node(
            ApplyTransforms(
                input_image=_pop(tpl_regmask_path),
                transforms="identity",
                interpolation="Gaussian",
                float=True,
            ),
            name="hires_mask",
            mem_gb=1,
        )

        # fmt: off
        wf.connect([
            (res_tmpl, hires_mask, [("out_file", "reference_image")]),
            (hires_mask, norm, [("output_image", "fixed_image_masks")]),
        ])
        # fmt: on

    # Finally project brain mask and refine INU correction
    map_brainmask = pe.Node(
        ApplyTransforms(interpolation="Gaussian", float=True),
        name="map_brainmask",
        mem_gb=1,
    )
    map_brainmask.inputs.input_image = str(tpl_brainmask_path)

    thr_brainmask = pe.Node(Binarize(thresh_low=0.50), name="thr_brainmask")

    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 4,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    final_mask = pe.Node(ApplyMask(), name="final_mask")

    # fmt: off
    wf.connect([
        (inputnode, map_brainmask, [(("in_files", _pop), "reference_image")]),
        (bspline_grid, final_n4, [("out", "args")]),
        (denoise, final_n4, [("output_image", "input_image")]),
        # Project template's brainmask into subject space
        (norm, map_brainmask, [("reverse_transforms", "transforms"),
                               ("reverse_invert_flags",
                                "invert_transform_flags")]),
        (map_brainmask, thr_brainmask, [("output_image", "in_file")]),
        # take a second pass of N4
        (map_brainmask, final_n4, [("output_image", "mask_image")]),
        (final_n4, final_mask, [("output_image", "in_file")]),
        (thr_brainmask, final_mask, [("out_mask", "in_mask")]),
        (final_n4, outputnode, [("output_image", "out_corrected")]),
        (thr_brainmask, outputnode, [("out_mask", "out_mask")]),
        (final_mask, outputnode, [("out_file", "out_brain")]),
    ])
    # fmt: on

    if interim_checkpoints:
        final_apply = pe.Node(
            ApplyTransforms(interpolation="BSpline", float=True),
            name="final_apply",
            mem_gb=1,
        )
        final_report = pe.Node(
            SimpleBeforeAfter(after_label="target",
                              before_label=f"tpl-{template_id}"),
            name="final_report",
        )
        # fmt: off
        wf.connect([
            (inputnode, final_apply, [(("in_files", _pop), "reference_image")
                                      ]),
            (res_tmpl, final_apply, [("out_file", "input_image")]),
            (norm, final_apply, [("reverse_transforms", "transforms"),
                                 ("reverse_invert_flags",
                                  "invert_transform_flags")]),
            (final_apply, final_report, [("output_image", "before")]),
            (outputnode, final_report, [("out_corrected", "after"),
                                        ("out_mask", "wm_seg")]),
        ])
        # fmt: on

    if ants_affine_init:
        # Initialize transforms with antsAI
        lowres_tmpl = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS, smooth=True),
                              name="lowres_tmpl")
        lowres_trgt = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS, smooth=True),
                              name="lowres_trgt")

        init_aff = pe.Node(
            AI(
                convergence=(100, 1e-6, 10),
                metric=("Mattes", 32, "Random", 0.25),
                principal_axes=False,
                search_factor=(factor, arc),
                search_grid=(step, grid),
                transform=("Affine", 0.1),
                verbose=True,
            ),
            name="init_aff",
            n_procs=omp_nthreads,
        )
        # fmt: off
        wf.connect([
            (clip_inu, lowres_trgt, [("out_file", "in_file")]),
            (lowres_trgt, init_aff, [("out_file", "moving_image")]),
            (clip_tmpl, lowres_tmpl, [("out_file", "in_file")]),
            (lowres_tmpl, init_aff, [("out_file", "fixed_image")]),
            (init_aff, norm, [("output_transform", "initial_moving_transform")
                              ]),
        ])
        # fmt: on

        if tpl_regmask_path:
            lowres_mask = pe.Node(
                ApplyTransforms(
                    input_image=_pop(tpl_regmask_path),
                    transforms="identity",
                    interpolation="MultiLabel",
                ),
                name="lowres_mask",
                mem_gb=1,
            )
            # fmt: off
            wf.connect([
                (lowres_tmpl, lowres_mask, [("out_file", "reference_image")]),
                (lowres_mask, init_aff, [("output_image", "fixed_image_mask")
                                         ]),
            ])
            # fmt: on

        if interim_checkpoints:
            init_apply = pe.Node(
                ApplyTransforms(interpolation="BSpline",
                                invert_transform_flags=[True]),
                name="init_apply",
                mem_gb=1,
            )
            init_mask = pe.Node(
                ApplyTransforms(interpolation="Gaussian",
                                invert_transform_flags=[True]),
                name="init_mask",
                mem_gb=1,
            )
            init_mask.inputs.input_image = str(tpl_brainmask_path)
            init_report = pe.Node(
                SimpleBeforeAfter(
                    out_report="init_report.svg",
                    before_label="target",
                    after_label="template",
                ),
                name="init_report",
            )
            # fmt: off
            wf.connect([
                (lowres_trgt, init_apply, [("out_file", "reference_image")]),
                (lowres_tmpl, init_apply, [("out_file", "input_image")]),
                (init_aff, init_apply, [("output_transform", "transforms")]),
                (lowres_trgt, init_report, [("out_file", "before")]),
                (init_apply, init_report, [("output_image", "after")]),
                (lowres_trgt, init_mask, [("out_file", "reference_image")]),
                (init_aff, init_mask, [("output_transform", "transforms")]),
                (init_mask, init_report, [("output_image", "wm_seg")]),
            ])
            # fmt: on
    else:
        norm.inputs.initial_moving_transform_com = 1

    if output_dir:
        ds_final_inu = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir),
            desc="preproc",
            compress=True,
        ),
                               name="ds_final_inu",
                               run_without_submitting=True)
        ds_final_msk = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir),
            desc="brain",
            suffix="mask",
            compress=True,
        ),
                               name="ds_final_msk",
                               run_without_submitting=True)

        # fmt: off
        wf.connect([
            (inputnode, ds_final_inu, [("in_files", "source_file")]),
            (inputnode, ds_final_msk, [("in_files", "source_file")]),
            (outputnode, ds_final_inu, [("out_corrected", "in_file")]),
            (outputnode, ds_final_msk, [("out_mask", "in_file")]),
        ])
        # fmt: on

        if interim_checkpoints:
            ds_report = pe.Node(DerivativesDataSink(
                base_directory=str(output_dir),
                desc="brain",
                suffix="mask",
                datatype="figures"),
                                name="ds_report",
                                run_without_submitting=True)
            # fmt: off
            wf.connect([
                (inputnode, ds_report, [("in_files", "source_file")]),
                (final_report, ds_report, [("out_report", "in_file")]),
            ])
            # fmt: on

        if ants_affine_init and interim_checkpoints:
            ds_report_init = pe.Node(DerivativesDataSink(
                base_directory=str(output_dir),
                desc="init",
                suffix="mask",
                datatype="figures"),
                                     name="ds_report_init",
                                     run_without_submitting=True)
            # fmt: off
            wf.connect([
                (inputnode, ds_report_init, [("in_files", "source_file")]),
                (init_report, ds_report_init, [("out_report", "in_file")]),
            ])
            # fmt: on

    return wf
Пример #2
0
def init_syn_sdc_wf(omp_nthreads,
                    bold_pe=None,
                    atlas_threshold=3,
                    name='syn_sdc_wf'):
    """
    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.syn import init_syn_sdc_wf
        wf = init_syn_sdc_wf(
            bold_pe='j',
            omp_nthreads=8)

    **Inputs**

        bold_ref
            reference image
        bold_ref_brain
            skull-stripped reference image
        template : str
            Name of template targeted by ``template`` output space
        t1_brain
            skull-stripped, bias-corrected structural image
        t1_2_mni_reverse_transform
            inverse registration transform of T1w image to MNI template

    **Outputs**

        out_reference
            the ``bold_ref`` image after unwarping
        out_reference_brain
            the ``bold_ref_brain`` image after unwarping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_mask
            mask of the unwarped input file

    """

    if bold_pe is None or bold_pe[0] not in ['i', 'j']:
        LOGGER.warning(
            'Incorrect phase-encoding direction, assuming PA (posterior-to-anterior).'
        )
        bold_pe = 'j'

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the BOLD reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration` (ANTs {ants_ver}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
""".format(ants_ver=Registration().version or '<ver>')
    inputnode = pe.Node(niu.IdentityInterface([
        'bold_ref', 'bold_ref_brain', 'template', 't1_brain',
        't1_2_mni_reverse_transform'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['out_reference', 'out_reference_brain', 'out_mask', 'out_warp']),
                         name='outputnode')

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = pkgr.resource_filename('fmriprep', 'data/fmap_atlas.nii.gz')
    # Registration specifications
    affine_transform = pkgr.resource_filename('fmriprep', 'data/affine.json')
    syn_transform = pkgr.resource_filename('fmriprep',
                                           'data/susceptibility_syn.json')

    invert_t1w = pe.Node(Rescale(invert=True), name='invert_t1w', mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1',
                       n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref',
                       n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3),
                             name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref',
        n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(fsl.maths.MathsCommand(
        args='-thr {:.8g} -bin'.format(atlas_threshold),
        output_datatype='char'),
                              name='threshold_atlas',
                              mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2),
                                name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(bold_pe[0] == 'i'), int(bold_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(Registration(from_file=syn_transform,
                               restrict_deformation=restrict),
                  name='syn',
                  n_procs=omp_nthreads)

    unwarp_ref = pe.Node(ApplyTransforms(dimension=3,
                                         float=True,
                                         interpolation='LanczosWindowedSinc'),
                         name='unwarp_ref')

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1_brain', 'in_file'),
                                 ('bold_ref', 'ref_file')]),
        (inputnode, ref_2_t1, [('bold_ref_brain', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('bold_ref', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [('t1_2_mni_reverse_transform', 'in2'),
                                     (('template', _prior_path), 'in3')]),
        (inputnode, atlas_2_ref, [('bold_ref', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('bold_ref_brain', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (syn, unwarp_ref, [('forward_transforms', 'transforms')]),
        (inputnode, unwarp_ref, [('bold_ref', 'reference_image'),
                                 ('bold_ref', 'input_image')]),
        (unwarp_ref, skullstrip_bold_wf, [('output_image', 'inputnode.in_file')
                                          ]),
        (unwarp_ref, outputnode, [('output_image', 'out_reference')]),
        (skullstrip_bold_wf, outputnode,
         [('outputnode.skull_stripped_file', 'out_reference_brain'),
          ('outputnode.mask_file', 'out_mask')]),
    ])

    return workflow
Пример #3
0
def init_anat_seg_wf(
    age_months=None,
    anat_modality="T1w",
    template_dir=None,
    sloppy=False,
    omp_nthreads=1,
    name="anat_seg_wf",
):
    """
    Calculate brain tissue segmentations from either:
     A) a collection of manually segmented templates
     B) FSL's FAST
    """

    if anat_modality != "T1w":
        raise NotImplementedError(
            "Only T1w images are currently accepted for the segmentation workflow."
        )

    wf = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=["anat_brain"]),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(fields=["anat_aseg", "anat_dseg", "anat_tpms"]),
        name="outputnode",
    )

    # Coerce segmentation labels to BIDS
    lut_anat_dseg = pe.Node(niu.Function(function=_apply_bids_lut),
                            name="lut_anat_dseg")

    if template_dir is None:
        # Use FSL FAST for segmentations
        anat_dseg = pe.Node(fsl.FAST(segments=True,
                                     no_bias=True,
                                     probability_maps=True),
                            name='anat_dseg',
                            mem_gb=3)
        lut_anat_dseg.inputs.lut = (0, 3, 1, 2
                                    )  # Maps: 0 -> 0, 3 -> 1, 1 -> 2, 2 -> 3.
        fast2bids = pe.Node(niu.Function(function=_probseg_fast2bids),
                            name="fast2bids",
                            run_without_submitting=True)

        wf.connect([
            (inputnode, anat_dseg, [
                ('anat_brain', 'in_files'),
            ]),
            (anat_dseg, lut_anat_dseg, [
                ('partial_volume_map', 'in_dseg'),
            ]),
            (lut_anat_dseg, outputnode, [
                ('out', 'anat_dseg'),
            ]),
            (anat_dseg, fast2bids, [
                ('partial_volume_files', 'inlist'),
            ]),
            (fast2bids, outputnode, [
                ('out', 'anat_tpms'),
            ]),
        ])
        return wf

    # Otherwise, register to templates and run ANTs JointFusion
    lut_anat_dseg.inputs.lut = _aseg_to_three()
    tmpl_anats, tmpl_segs = _parse_segmentation_atlases(
        anat_modality, template_dir)

    # register to templates
    ants_params = "testing" if sloppy else "precise"
    # Register to each subject space
    norm = pe.MapNode(
        Registration(from_file=pkgr_fn(
            "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")),
        name="norm",
        iterfield=["moving_image"],
        n_procs=omp_nthreads,
        mem_gb=DEFAULT_MEMORY_MIN_GB,
    )
    norm.inputs.moving_image = tmpl_anats
    norm.inputs.float = True

    apply_atlas = pe.MapNode(
        ApplyTransforms(
            dimension=3,
            interpolation="NearestNeighbor",
            float=True,
        ),
        iterfield=["transforms", "input_image"],
        name="apply_atlas",
    )
    apply_atlas.inputs.input_image = tmpl_anats

    apply_seg = pe.MapNode(
        ApplyTransforms(dimension=3,
                        interpolation="MultiLabel"),  # NearestNeighbor?
        name="apply_seg",
        iterfield=["transforms", "input_image"],
    )
    apply_seg.inputs.input_image = tmpl_segs

    jointfusion = pe.Node(
        JointFusion(
            dimension=3,
            out_label_fusion="fusion_labels.nii.gz",
            num_threads=omp_nthreads,
        ),
        name="jointfusion",
    )

    jf_label = pe.Node(niu.Function(function=_to_dtype), name="jf_label")

    # split each tissue into individual masks
    split_seg = pe.Node(niu.Function(function=_split_segments),
                        name="split_seg")
    to_list = pe.Node(niu.Function(function=_to_list), name='to_list')

    # fmt: off
    wf.connect([
        (inputnode, norm, [('anat_brain', 'fixed_image')]),
        (norm, apply_atlas, [('forward_transforms', 'transforms')]),
        (inputnode, apply_atlas, [('anat_brain', 'reference_image')]),
        (norm, apply_seg, [('forward_transforms', 'transforms')]),
        (inputnode, apply_seg, [('anat_brain', 'reference_image')]),
        (inputnode, to_list, [('anat_brain', 'in_file')]),
        (to_list, jointfusion, [('out', 'target_image')]),
        (apply_atlas, jointfusion, [('output_image', 'atlas_image')]),
        (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')
                                  ]),
        (jointfusion, jf_label, [('out_label_fusion', 'in_file')]),
        (jf_label, outputnode, [('out', 'anat_aseg')]),
        (jf_label, lut_anat_dseg, [('out', 'in_dseg')]),
        (lut_anat_dseg, outputnode, [('out', 'anat_dseg')]),
        (lut_anat_dseg, split_seg, [('out', 'in_file')]),
        (split_seg, outputnode, [('out', 'anat_tpms')]),
    ])
    # fmt: on

    return wf
Пример #4
0
def init_enhance_and_skullstrip_bold_wf(
        name='enhance_and_skullstrip_bold_wf',
        pre_mask=False,
        omp_nthreads=1):
    """
    This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)`
    :abbr:`fMRI (functional MRI)` average/summary (e.g. a reference image
    averaging non-steady-state timepoints), and sharpens the histogram
    with the application of the N4 algorithm for removing the
    :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal
    mask.

    Steps of this workflow are:

      1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s
         :abbr:`EPI (echo-planar imaging)` -*boldref* template, which
         is in MNI space.
         The tentative mask is obtained by resampling the MNI template's
         brainmask into *boldref*-space.
      2. Binary dilation of the tentative mask with a sphere of 3mm diameter.
      3. Run ANTs' ``N4BiasFieldCorrection`` on the input
         :abbr:`BOLD (blood-oxygen level-dependant)` average, using the
         mask generated in 1) instead of the internal Otsu thresholding.
      4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology
         dilation of one iteration and a sphere of 6mm as structuring element.
      5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image
         with the latest mask calculated in 3), then use AFNI's ``3dUnifize``
         to *standardize* the T2* contrast distribution.
      6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast
         enhancement of 4).
      7. Calculate a final mask as the intersection of 4) and 6).
      8. Apply final mask on the enhanced reference.

    Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and
    a tentative mask is passed in to the workflow throught the ``pre_mask``
    Nipype input.


    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.util import init_enhance_and_skullstrip_bold_wf
        wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1)

    **Parameters**
        name : str
            Name of workflow (default: ``enhance_and_skullstrip_bold_wf``)
        pre_mask : bool
            Indicates whether the ``pre_mask`` input will be set (and thus, step 1
            should be skipped).
        omp_nthreads : int
            number of threads available to parallel nodes

    **Inputs**

        in_file
            BOLD image (single volume)
        pre_mask : bool
            A tentative brain mask to initialize the workflow (requires ``pre_mask``
            parameter set ``True``).


    **Outputs**

        bias_corrected_file
            the ``in_file`` after `N4BiasFieldCorrection`_
        skull_stripped_file
            the ``bias_corrected_file`` after skull-stripping
        mask_file
            mask of the skull-stripped input file
        out_report
            reportlet for the skull-stripping

    .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053
    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode')

    # Dilate pre_mask
    pre_dilate = pe.Node(fsl.DilateImage(
        operation='max', kernel_shape='sphere', kernel_size=3.0,
        internal_datatype='char'), name='pre_mask_dilate')

    # Ensure mask's header matches reference's
    check_hdr = pe.Node(MatchHeader(), name='check_hdr',
                        run_without_submitting=True)

    # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                         name='n4_correct', n_procs=1)

    # Create a generous BET mask out of the bias-corrected EPI
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    bet_dilate = pe.Node(fsl.DilateImage(
        operation='max', kernel_shape='sphere', kernel_size=6.0,
        internal_datatype='char'), name='skullstrip_first_dilate')
    bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask')

    # Use AFNI's unifize for T2 constrast & fix header
    unifize = pe.Node(afni.Unifize(
        t2=True, outputtype='NIFTI_GZ',
        # Default -clfrac is 0.1, 0.4 was too conservative
        # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation)
        args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
        out_file="uni.nii.gz"), name='unifize')
    fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)

    # Run ANFI's 3dAutomask to extract a refined brain mask
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1)

    # Take intersection of both masks
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')

    # Compute masked brain
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')

    if not pre_mask:
        bold_template = get_template('MNI152NLin2009cAsym', 'res-02_desc-fMRIPrep_boldref.nii.gz')
        brain_mask = get_template('MNI152NLin2009cAsym', 'res-02_desc-brain_mask.nii.gz')

        # Initialize transforms with antsAI
        init_aff = pe.Node(AI(
            fixed_image=str(bold_template),
            fixed_image_mask=str(brain_mask),
            metric=('Mattes', 32, 'Regular', 0.2),
            transform=('Affine', 0.1),
            search_factor=(20, 0.12),
            principal_axes=False,
            convergence=(10, 1e-6, 10),
            verbose=True),
            name='init_aff',
            n_procs=omp_nthreads)

        # Registration().version may be None
        if parseversion(Registration().version or '0.0.0') > Version('2.2.0'):
            init_aff.inputs.search_grid = (40, (0, 40, 40))

        # Set up spatial normalization
        norm = pe.Node(Registration(
            from_file=pkgr_fn(
                'fmriprep.data',
                'epi_atlasbased_brainmask.json')),
            name='norm',
            n_procs=omp_nthreads)
        norm.inputs.fixed_image = str(bold_template)
        map_brainmask = pe.Node(
            ApplyTransforms(interpolation='MultiLabel', float=True, input_image=str(brain_mask)),
            name='map_brainmask'
        )
        workflow.connect([
            (inputnode, init_aff, [('in_file', 'moving_image')]),
            (inputnode, map_brainmask, [('in_file', 'reference_image')]),
            (inputnode, norm, [('in_file', 'moving_image')]),
            (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
            (norm, map_brainmask, [
                ('reverse_invert_flags', 'invert_transform_flags'),
                ('reverse_transforms', 'transforms')]),
            (map_brainmask, pre_dilate, [('output_image', 'in_file')]),
        ])
    else:
        workflow.connect([
            (inputnode, pre_dilate, [('pre_mask', 'in_file')]),
        ])

    workflow.connect([
        (inputnode, check_hdr, [('in_file', 'reference')]),
        (pre_dilate, check_hdr, [('out_file', 'in_file')]),
        (check_hdr, n4_correct, [('out_file', 'mask_image')]),
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
        (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
        (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
        (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]),
        (bet_dilate, bet_mask, [('out_file', 'mask_file')]),
        (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]),
        (bet_mask, unifize, [('out_file', 'in_file')]),
        (unifize, fixhdr_unifize, [('out_file', 'in_file')]),
        (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')]),
        (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
        (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
    ])

    return workflow
Пример #5
0
def init_syn_sdc_wf(
    *,
    atlas_threshold=3,
    sloppy=False,
    debug=False,
    name="syn_sdc_wf",
    omp_nthreads=1,
):
    """
    Build the *fieldmap-less* susceptibility-distortion estimation workflow.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.


    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.syn import init_syn_sdc_wf
            wf = init_syn_sdc_wf(omp_nthreads=8)

    Parameters
    ----------
    atlas_threshold : :obj:`float`
        Exclude from the registration metric computation areas with average distortions
        below this threshold (in mm).
    sloppy : :obj:`bool`
        Whether a fast (less accurate) configuration of the workflow should be applied.
    debug : :obj:`bool`
        Run in debug mode
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    epi_ref : :obj:`tuple` (:obj:`str`, :obj:`dict`)
        A tuple, where the first element is the path of the distorted EPI
        reference map (e.g., an average of *b=0* volumes), and the second
        element is a dictionary of associated metadata.
    epi_mask : :obj:`str`
        A path to a brain mask corresponding to ``epi_ref``.
    anat_ref : :obj:`str`
        A preprocessed, skull-stripped anatomical (T1w or T2w) image resampled in EPI space.
    anat_mask : :obj:`str`
        Path to the brain mask corresponding to ``anat_ref`` in EPI space.
    sd_prior : :obj:`str`
        A template map of areas with strong susceptibility distortions (SD) to regularize
        the cost function of SyN

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of files in ``epi_ref``.
    fmap_coeff : :obj:`str` or :obj:`list` of :obj:`str`
        The path(s) of the B-Spline coefficients supporting the fieldmap.
    out_warp : :obj:`str`
        The path of the corresponding displacements field transform to unwarp
        susceptibility distortions.
    method: :obj:`str`
        Short description of the estimation method that was run.

    """
    from pkg_resources import resource_filename as pkgrf
    from packaging.version import parse as parseversion, Version
    from nipype.interfaces.ants import ImageMath
    from niworkflows.interfaces.fixes import (
        FixHeaderApplyTransforms as ApplyTransforms,
        FixHeaderRegistration as Registration,
    )
    from niworkflows.interfaces.nibabel import (
        Binarize,
        IntensityClip,
        RegridToZooms,
    )
    from ...utils.misc import front as _pop, last as _pull
    from ...interfaces.epi import GetReadoutTime
    from ...interfaces.fmap import DisplacementsField2Fieldmap
    from ...interfaces.bspline import (
        ApplyCoeffsField,
        BSplineApprox,
        DEFAULT_LF_ZOOMS_MM,
        DEFAULT_HF_ZOOMS_MM,
        DEFAULT_ZOOMS_MM,
    )
    from ...interfaces.brainmask import BinaryDilation, Union

    ants_version = Registration().version
    if ants_version and parseversion(ants_version) < Version("2.2.0"):
        raise RuntimeError(
            f"Please upgrade ANTs to 2.2 or older ({ants_version} found)."
        )

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the EPI reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration`
(ANTs {ants_version or "-- version unknown"}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
"""
    inputnode = pe.Node(niu.IdentityInterface(INPUT_FIELDS), name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(
            ["fmap", "fmap_ref", "fmap_coeff", "fmap_mask", "out_warp", "method"]
        ),
        name="outputnode",
    )
    outputnode.inputs.method = 'FLB ("fieldmap-less", SyN-based)'

    readout_time = pe.Node(
        GetReadoutTime(),
        name="readout_time",
        run_without_submitting=True,
    )

    warp_dir = pe.Node(
        niu.Function(function=_warp_dir),
        run_without_submitting=True,
        name="warp_dir",
    )
    warp_dir.inputs.nlevels = 2
    atlas_msk = pe.Node(Binarize(thresh_low=atlas_threshold), name="atlas_msk")
    anat_dilmsk = pe.Node(BinaryDilation(), name="anat_dilmsk")
    amask2epi = pe.Node(
        ApplyTransforms(interpolation="MultiLabel", transforms="identity"),
        name="amask2epi",
    )

    # Calculate laplacian maps
    lap_anat = pe.Node(
        ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True), name="lap_anat"
    )
    lap_anat_norm = pe.Node(niu.Function(function=_norm_lap), name="lap_anat_norm")
    anat_merge = pe.Node(
        niu.Merge(2),
        name="anat_merge",
        run_without_submitting=True,
    )

    clip_epi = pe.Node(IntensityClip(p_min=35.0, p_max=99.9), name="clip_epi")
    lap_epi = pe.Node(
        ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True), name="lap_epi"
    )
    lap_epi_norm = pe.Node(niu.Function(function=_norm_lap), name="lap_epi_norm")
    epi_merge = pe.Node(
        niu.Merge(2),
        name="epi_merge",
        run_without_submitting=True,
    )

    epi_umask = pe.Node(Union(), name="epi_umask")
    moving_masks = pe.Node(
        niu.Merge(3),
        name="moving_masks",
        run_without_submitting=True,
    )

    fixed_masks = pe.Node(
        niu.Merge(3),
        name="fixed_masks",
        mem_gb=DEFAULT_MEMORY_MIN_GB,
        run_without_submitting=True,
    )

    # Set a manageable size for the epi reference
    find_zooms = pe.Node(niu.Function(function=_adjust_zooms), name="find_zooms")
    zooms_epi = pe.Node(RegridToZooms(), name="zooms_epi")

    # SyN Registration Core
    syn = pe.Node(
        Registration(
            from_file=pkgrf("sdcflows", f"data/sd_syn{'_sloppy' * sloppy}.json")
        ),
        name="syn",
        n_procs=omp_nthreads,
    )
    syn.inputs.output_warped_image = debug
    syn.inputs.output_inverse_warped_image = debug

    if debug:
        syn.inputs.args = "--write-interval-volumes 2"

    # Extract the corresponding fieldmap in Hz
    extract_field = pe.Node(
        DisplacementsField2Fieldmap(demean=True), name="extract_field"
    )

    unwarp = pe.Node(ApplyCoeffsField(), name="unwarp")

    # Check zooms (avoid very expensive B-Splines fitting)
    zooms_field = pe.Node(
        ApplyTransforms(
            interpolation="BSpline", transforms="identity", args="-u float"
        ),
        name="zooms_field",
    )
    zooms_bmask = pe.Node(
        ApplyTransforms(
            interpolation="MultiLabel", transforms="identity", args="-u uchar"
        ),
        name="zooms_bmask",
    )

    # Regularize with B-Splines
    bs_filter = pe.Node(BSplineApprox(), n_procs=omp_nthreads, name="bs_filter")
    bs_filter.interface._always_run = debug
    bs_filter.inputs.bs_spacing = (
        [DEFAULT_LF_ZOOMS_MM, DEFAULT_HF_ZOOMS_MM] if not sloppy else [DEFAULT_ZOOMS_MM]
    )
    bs_filter.inputs.extrapolate = not debug

    # fmt: off
    workflow.connect([
        (inputnode, readout_time, [(("epi_ref", _pop), "in_file"),
                                   (("epi_ref", _pull), "metadata")]),
        (inputnode, atlas_msk, [("sd_prior", "in_file")]),
        (inputnode, clip_epi, [(("epi_ref", _pop), "in_file")]),
        (inputnode, unwarp, [(("epi_ref", _pop), "in_data")]),
        (inputnode, amask2epi, [("epi_mask", "reference_image")]),
        (inputnode, zooms_bmask, [("anat_mask", "input_image")]),
        (inputnode, fixed_masks, [("anat_mask", "in1"),
                                  ("anat_mask", "in2")]),
        (inputnode, anat_dilmsk, [("anat_mask", "in_file")]),
        (inputnode, warp_dir, [("anat_ref", "fixed_image")]),
        (inputnode, anat_merge, [("anat_ref", "in1")]),
        (inputnode, lap_anat, [("anat_ref", "op1")]),
        (inputnode, find_zooms, [("anat_ref", "in_anat"),
                                 (("epi_ref", _pop), "in_epi")]),
        (inputnode, zooms_field, [(("epi_ref", _pop), "reference_image")]),
        (inputnode, epi_umask, [("epi_mask", "in1")]),
        (lap_anat, lap_anat_norm, [("output_image", "in_file")]),
        (lap_anat_norm, anat_merge, [("out", "in2")]),
        (epi_umask, moving_masks, [("out_file", "in1"),
                                   ("out_file", "in2"),
                                   ("out_file", "in3")]),
        (clip_epi, epi_merge, [("out_file", "in1")]),
        (clip_epi, lap_epi, [("out_file", "op1")]),
        (clip_epi, zooms_epi, [("out_file", "in_file")]),
        (lap_epi, lap_epi_norm, [("output_image", "in_file")]),
        (lap_epi_norm, epi_merge, [("out", "in2")]),
        (find_zooms, zooms_epi, [("out", "zooms")]),
        (atlas_msk, fixed_masks, [("out_mask", "in3")]),
        (anat_dilmsk, amask2epi, [("out_file", "input_image")]),
        (amask2epi, epi_umask, [("output_image", "in2")]),
        (readout_time, warp_dir, [("pe_direction", "pe_dir")]),
        (warp_dir, syn, [("out", "restrict_deformation")]),
        (anat_merge, syn, [("out", "fixed_image")]),
        (fixed_masks, syn, [("out", "fixed_image_masks")]),
        (epi_merge, syn, [("out", "moving_image")]),
        (moving_masks, syn, [("out", "moving_image_masks")]),
        (syn, extract_field, [(("forward_transforms", _pop), "transform")]),
        (readout_time, extract_field, [("readout_time", "ro_time"),
                                       ("pe_direction", "pe_dir")]),
        (extract_field, zooms_field, [("out_file", "input_image")]),
        (zooms_field, zooms_bmask, [("output_image", "reference_image")]),
        (zooms_field, bs_filter, [("output_image", "in_data")]),
        # Setting a mask ends up over-fitting the field
        # - it's better to have all those ~zero around.
        # (zooms_bmask, bs_filter, [("output_image", "in_mask")]),
        (bs_filter, unwarp, [("out_coeff", "in_coeff")]),
        (readout_time, unwarp, [("readout_time", "ro_time"),
                                ("pe_direction", "pe_dir")]),
        (zooms_bmask, outputnode, [("output_image", "fmap_mask")]),
        (bs_filter, outputnode, [("out_coeff", "fmap_coeff")]),
        (unwarp, outputnode, [("out_corrected", "fmap_ref"),
                              ("out_field", "fmap"),
                              ("out_warp", "out_warp")]),
    ])
    # fmt: on

    return workflow
Пример #6
0
def init_nonlinear_sdc_wf(bold_file,
                          freesurfer,
                          bold2t1w_dof,
                          template,
                          omp_nthreads,
                          bold_pe='j',
                          atlas_threshold=3,
                          name='nonlinear_sdc_wf'):
    """
    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.syn import init_nonlinear_sdc_wf
        wf = init_nonlinear_sdc_wf(
            bold_file='/dataset/sub-01/func/sub-01_task-rest_bold.nii.gz',
            bold_pe='j',
            freesurfer=True,
            bold2t1w_dof=9,
            template='MNI152NLin2009cAsym',
            omp_nthreads=8)

    **Inputs**

        t1_brain
            skull-stripped, bias-corrected structural image
        bold_ref
            skull-stripped reference image
        t1_seg
            FAST segmentation white and gray matter, in native T1w space
        t1_2_mni_reverse_transform
            inverse registration transform of T1w image to MNI template

    **Outputs**

        out_reference_brain
            the ``bold_ref`` image after unwarping
        out_warp
            the corresponding :abbr:`DFM (displacements field map)` compatible with
            ANTs
        out_mask
            mask of the unwarped input file
        out_mask_report
            reportlet for the skullstripping

    .. [Huntenburg2014] Huntenburg, J. M. (2014) Evaluating Nonlinear
                        Coregistration of BOLD EPI and T1w Images. Berlin: Master
                        Thesis, Freie Universität. `PDF
                        <http://pubman.mpdl.mpg.de/pubman/item/escidoc:2327525:5/component/escidoc:2327523/master_thesis_huntenburg_4686947.pdf>`_.
    .. [Treiber2016] Treiber, J. M. et al. (2016) Characterization and Correction
                     of Geometric Distortions in 814 Diffusion Weighted Images,
                     PLoS ONE 11(3): e0152472. doi:`10.1371/journal.pone.0152472
                     <https://doi.org/10.1371/journal.pone.0152472>`_.
    .. [Wang2017] Wang S, et al. (2017) Evaluation of Field Map and Nonlinear
                  Registration Methods for Correction of Susceptibility Artifacts
                  in Diffusion MRI. Front. Neuroinform. 11:17.
                  doi:`10.3389/fninf.2017.00017
                  <https://doi.org/10.3389/fninf.2017.00017>`_.
    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(
        ['t1_brain', 'bold_ref', 't1_2_mni_reverse_transform', 't1_seg']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface([
        'out_reference_brain', 'out_mask', 'out_warp', 'out_warp_report',
        'out_mask_report'
    ]),
                         name='outputnode')

    if bold_pe is None or bold_pe[0] not in ['i', 'j']:
        LOGGER.warning(
            'Incorrect phase-encoding direction, assuming PA (posterior-to-anterior'
        )
        bold_pe = 'j'

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = pkgr.resource_filename('fmriprep', 'data/fmap_atlas.nii.gz')
    atlas_2_template_affine = pkgr.resource_filename(
        'fmriprep', 'data/fmap_atlas_2_{}_affine.mat'.format(template))
    # Registration specifications
    affine_transform = pkgr.resource_filename('fmriprep', 'data/affine.json')
    syn_transform = pkgr.resource_filename('fmriprep',
                                           'data/susceptibility_syn.json')

    invert_t1w = pe.Node(InvertT1w(), name='invert_t1w', mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1',
                       n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref',
                       n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3),
                             name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
    transform_list.inputs.in3 = atlas_2_template_affine

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref',
        n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(fsl.maths.MathsCommand(
        args='-thr {:.8g} -bin'.format(atlas_threshold),
        output_datatype='char'),
                              name='threshold_atlas',
                              mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2),
                                name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(bold_pe[0] == 'i'), int(bold_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(Registration(from_file=syn_transform,
                               restrict_deformation=restrict),
                  name='syn',
                  n_procs=omp_nthreads)

    seg_2_ref = pe.Node(ApplyTransforms(interpolation='NearestNeighbor',
                                        float=True,
                                        invert_transform_flags=[True]),
                        name='seg_2_ref',
                        n_procs=omp_nthreads,
                        mem_gb=0.3)
    sel_wm = pe.Node(niu.Function(function=extract_wm),
                     name='sel_wm',
                     mem_gb=DEFAULT_MEMORY_MIN_GB)
    syn_rpt = pe.Node(SimpleBeforeAfter(), name='syn_rpt', mem_gb=0.1)

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1_brain', 'in_file'),
                                 ('bold_ref', 'ref_file')]),
        (inputnode, ref_2_t1, [('bold_ref', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('bold_ref', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [('t1_2_mni_reverse_transform', 'in2')]),
        (inputnode, atlas_2_ref, [('bold_ref', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('bold_ref', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (inputnode, seg_2_ref, [('t1_seg', 'input_image')]),
        (ref_2_t1, seg_2_ref, [('forward_transforms', 'transforms')]),
        (syn, seg_2_ref, [('warped_image', 'reference_image')]),
        (seg_2_ref, sel_wm, [('output_image', 'in_seg')]),
        (inputnode, syn_rpt, [('bold_ref', 'before')]),
        (syn, syn_rpt, [('warped_image', 'after')]),
        (sel_wm, syn_rpt, [('out', 'wm_seg')]),
        (syn, skullstrip_bold_wf, [('warped_image', 'inputnode.in_file')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (skullstrip_bold_wf, outputnode,
         [('outputnode.skull_stripped_file', 'out_reference_brain'),
          ('outputnode.mask_file', 'out_mask'),
          ('outputnode.out_report', 'out_mask_report')]),
        (syn_rpt, outputnode, [('out_report', 'out_warp_report')])
    ])

    return workflow
Пример #7
0
def init_infant_brain_extraction_wf(
    age_months=None,
    ants_affine_init=False,
    bspline_fitting_distance=200,
    sloppy=False,
    skull_strip_template="UNCInfant",
    template_specs=None,
    mem_gb=3.0,
    debug=False,
    name="infant_brain_extraction_wf",
    omp_nthreads=None,
):
    """
    Build an atlas-based brain extraction pipeline for infant T2w MRI data.

    Pros/Cons of available templates
    --------------------------------
    * MNIInfant
     + More cohorts available for finer-grain control
     + T1w/T2w images available
     - Template masks are poor

    * UNCInfant
     + Accurate masks
     - No T2w image available


    Parameters
    ----------
    age_months : :obj:`int`
        Age of this participant, in months.
    ants_affine_init : :obj:`bool`, optional
        Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images.
    bspline_fitting_distance : :obj:`float`
        Distance in mm between B-Spline control points for N4 INU estimation.
    sloppy : :obj:`bool`
        Run in *sloppy* mode.
    skull_strip_template : :obj:`str`
        A TemplateFlow ID indicating which template will be used as target for atlas-based
        segmentation.
    template_specs : :obj:`dict`
        Additional template specifications (e.g., resolution or cohort) to correctly select
        the adequate template instance.
    mem_gb : :obj:`float`
        Base memory fingerprint unit.
    name : :obj:`str`
        This particular workflow's unique name (Nipype requirement).
    omp_nthreads : :obj:`int`
        The number of threads for individual processes in this workflow.
    debug : :obj:`bool`
        Produce intermediate registration files

    Inputs
    ------
    in_t2w : :obj:`str`
        The unprocessed input T2w image.

    Outputs
    -------
    t2w_preproc : :obj:`str`
        The preprocessed T2w image (INU and clipping).
    t2w_brain : :obj:`str`
        The preprocessed, brain-extracted T2w image.
    out_mask : :obj:`str`
        The brainmask projected from the template into the T2w, after
        binarization.
    out_probmap : :obj:`str`
        The same as above, before binarization.

    """
    from nipype.interfaces.ants import N4BiasFieldCorrection, ImageMath

    # niworkflows
    from niworkflows.interfaces.nibabel import ApplyMask, Binarize, IntensityClip
    from niworkflows.interfaces.fixes import (
        FixHeaderRegistration as Registration,
        FixHeaderApplyTransforms as ApplyTransforms,
    )
    from templateflow.api import get as get_template

    from ...interfaces.nibabel import BinaryDilation
    from ...utils.misc import cohort_by_months

    # handle template specifics
    template_specs = template_specs or {}
    if skull_strip_template == "MNIInfant":
        template_specs["resolution"] = 2 if sloppy else 1

    if not template_specs.get("cohort"):
        if age_months is None:
            raise KeyError(
                f"Age or cohort for {skull_strip_template} must be provided!")
        template_specs["cohort"] = cohort_by_months(skull_strip_template,
                                                    age_months)

    tpl_target_path = get_template(
        skull_strip_template,
        suffix="T1w",  # no T2w template
        desc=None,
        **template_specs,
    )
    if not tpl_target_path:
        raise RuntimeError(
            f"An instance of template <tpl-{skull_strip_template}> with T1w suffix "
            "could not be found.")

    tpl_brainmask_path = get_template(skull_strip_template,
                                      label="brain",
                                      suffix="probseg",
                                      **template_specs) or get_template(
                                          skull_strip_template,
                                          desc="brain",
                                          suffix="mask",
                                          **template_specs)

    tpl_regmask_path = get_template(
        skull_strip_template,
        label="BrainCerebellumExtraction",
        suffix="mask",
        **template_specs,
    )

    # main workflow
    workflow = pe.Workflow(name)

    inputnode = pe.Node(niu.IdentityInterface(fields=["in_t2w"]),
                        name="inputnode")
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["t2w_preproc", "t2w_brain", "out_mask", "out_probmap"]),
        name="outputnode",
    )

    # Ensure template comes with a range of intensities ANTs will like
    clip_tmpl = pe.Node(IntensityClip(p_max=99), name="clip_tmpl")
    clip_tmpl.inputs.in_file = _pop(tpl_target_path)

    # Generate laplacian registration targets
    lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"),
                       name="lap_tmpl")
    lap_t2w = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"),
                      name="lap_t2w")
    norm_lap_tmpl = pe.Node(niu.Function(function=_norm_lap),
                            name="norm_lap_tmpl")
    norm_lap_t2w = pe.Node(niu.Function(function=_norm_lap),
                           name="norm_lap_t2w")

    # Merge image nodes
    mrg_tmpl = pe.Node(niu.Merge(2),
                       name="mrg_tmpl",
                       run_without_submitting=True)
    mrg_t2w = pe.Node(niu.Merge(2),
                      name="mrg_t2w",
                      run_without_submitting=True)
    bin_regmask = pe.Node(Binarize(thresh_low=0.20), name="bin_regmask")
    bin_regmask.inputs.in_file = str(tpl_brainmask_path)
    refine_mask = pe.Node(BinaryDilation(radius=3, iterations=2),
                          name="refine_mask")

    fixed_masks = pe.Node(niu.Merge(4),
                          name="fixed_masks",
                          run_without_submitting=True)
    fixed_masks.inputs.in1 = "NULL"
    fixed_masks.inputs.in2 = "NULL"
    fixed_masks.inputs.in3 = "NULL" if not tpl_regmask_path else _pop(
        tpl_regmask_path)

    # Set up initial spatial normalization
    ants_params = "testing" if sloppy else "precise"
    norm = pe.Node(
        Registration(from_file=pkgr_fn(
            "nibabies.data", f"antsBrainExtraction_{ants_params}.json")),
        name="norm",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm.inputs.float = sloppy
    if debug:
        norm.inputs.args = "--write-interval-volumes 5"

    map_mask_t2w = pe.Node(
        ApplyTransforms(interpolation="Gaussian", float=True),
        name="map_mask_t2w",
        mem_gb=1,
    )

    # map template brainmask to t2w space
    map_mask_t2w.inputs.input_image = str(tpl_brainmask_path)

    thr_t2w_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_t2w_mask")

    # Refine INU correction
    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            bspline_fitting_distance=bspline_fitting_distance,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 5,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5),
                         name="final_clip")
    apply_mask = pe.Node(ApplyMask(), name="apply_mask")

    # fmt:off
    workflow.connect([
        (inputnode, final_n4, [("in_t2w", "input_image")]),
        # 1. Massage T2w
        (inputnode, mrg_t2w, [("in_t2w", "in1")]),
        (inputnode, lap_t2w, [("in_t2w", "op1")]),
        (inputnode, map_mask_t2w, [("in_t2w", "reference_image")]),
        (bin_regmask, refine_mask, [("out_file", "in_file")]),
        (refine_mask, fixed_masks, [("out_file", "in4")]),
        (lap_t2w, norm_lap_t2w, [("output_image", "in_file")]),
        (norm_lap_t2w, mrg_t2w, [("out", "in2")]),
        # 2. Prepare template
        (clip_tmpl, lap_tmpl, [("out_file", "op1")]),
        (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]),
        (clip_tmpl, mrg_tmpl, [("out_file", "in1")]),
        (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]),
        # 3. Set normalization node inputs
        (mrg_tmpl, norm, [("out", "fixed_image")]),
        (mrg_t2w, norm, [("out", "moving_image")]),
        (fixed_masks, norm, [("out", "fixed_image_masks")]),
        # 4. Map template brainmask into T2w space
        (norm, map_mask_t2w, [("reverse_transforms", "transforms"),
                              ("reverse_invert_flags",
                               "invert_transform_flags")]),
        (map_mask_t2w, thr_t2w_mask, [("output_image", "in_file")]),
        (thr_t2w_mask, apply_mask, [("out_mask", "in_mask")]),
        (final_n4, apply_mask, [("output_image", "in_file")]),
        # 5. Refine T2w INU correction with brain mask
        (map_mask_t2w, final_n4, [("output_image", "weight_image")]),
        (final_n4, final_clip, [("output_image", "in_file")]),
        # 9. Outputs
        (final_clip, outputnode, [("out_file", "t2w_preproc")]),
        (map_mask_t2w, outputnode, [("output_image", "out_probmap")]),
        (thr_t2w_mask, outputnode, [("out_mask", "out_mask")]),
        (apply_mask, outputnode, [("out_file", "t2w_brain")]),
    ])
    # fmt:on

    if ants_affine_init:
        from nipype.interfaces.ants.utils import AI

        ants_kwargs = dict(
            metric=("Mattes", 32, "Regular", 0.2),
            transform=("Affine", 0.1),
            search_factor=(20, 0.12),
            principal_axes=False,
            convergence=(10, 1e-6, 10),
            search_grid=(40, (0, 40, 40)),
            verbose=True,
        )

        if ants_affine_init == "random":
            ants_kwargs["metric"] = ("Mattes", 32, "Random", 0.2)
        if ants_affine_init == "search":
            ants_kwargs["search_grid"] = (20, (20, 40, 40))

        init_aff = pe.Node(
            AI(**ants_kwargs),
            name="init_aff",
            n_procs=omp_nthreads,
        )
        if tpl_regmask_path:
            init_aff.inputs.fixed_image_mask = _pop(tpl_regmask_path)

        # fmt:off
        workflow.connect([
            (clip_tmpl, init_aff, [("out_file", "fixed_image")]),
            (inputnode, init_aff, [("in_t2w", "moving_image")]),
            (init_aff, norm, [("output_transform", "initial_moving_transform")
                              ]),
        ])
        # fmt:on

    return workflow
Пример #8
0
def init_coeff2epi_wf(
    omp_nthreads,
    debug=False,
    write_coeff=False,
    name="coeff2epi_wf",
):
    """
    Move the field coefficients on to the target (distorted) EPI space.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.apply.registration import init_coeff2epi_wf
            wf = init_coeff2epi_wf(omp_nthreads=2)

    Parameters
    ----------
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use.
    debug : :obj:`bool`
        Run fast configurations of registrations.
    name : :obj:`str`
        Unique name of this workflow.
    write_coeff : :obj:`bool`
        Map coefficients file

    Inputs
    ------
    target_ref
        the target EPI reference image
    target_mask
        the reference image (skull-stripped)
    fmap_ref
        the reference (anatomical) image corresponding to ``fmap``
    fmap_mask
        a brain mask corresponding to ``fmap``
    fmap_coeff
        fieldmap coefficients

    Outputs
    -------
    fmap_coeff
        fieldmap coefficients in the space of the target reference EPI
    target_ref
        the target reference EPI resampled into the fieldmap reference for
        quality control purposes.

    """
    from packaging.version import parse as parseversion, Version
    from niworkflows.interfaces.fixes import FixHeaderRegistration as Registration
    from ...interfaces.bspline import TransformCoefficients
    from ...utils.misc import front as _pop

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The estimated *fieldmap* was then aligned with rigid-registration to the target
EPI (echo-planar imaging) reference run.
The field coefficients were mapped on to the reference EPI using the transform.
"""
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "target_ref", "target_mask", "fmap_ref", "fmap_mask", "fmap_coeff"
        ]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(fields=["target_ref", "fmap_coeff"]),
        name="outputnode")

    # Register the reference of the fieldmap to the reference
    # of the target image (the one that shall be corrected)
    ants_settings = pkgrf(
        "sdcflows", f"data/fmap-any_registration{'_testing' * debug}.json")

    coregister = pe.Node(
        Registration(
            from_file=ants_settings,
            output_warped_image=True,
        ),
        name="coregister",
        n_procs=omp_nthreads,
    )

    ver = coregister.interface.version or "2.2.0"
    mask_trait_s = "s" if parseversion(ver) >= Version("2.2.0") else ""

    # fmt: off
    workflow.connect([
        (inputnode, coregister, [
            ("target_ref", "moving_image"),
            ("fmap_ref", "fixed_image"),
            ("target_mask", f"moving_image_mask{mask_trait_s}"),
            ("fmap_mask", f"fixed_image_mask{mask_trait_s}"),
        ]),
        (coregister, outputnode, [("warped_image", "target_ref")]),
    ])
    # fmt: on

    if not write_coeff:
        return workflow

    # Map the coefficients into the EPI space
    map_coeff = pe.Node(TransformCoefficients(), name="map_coeff")
    map_coeff.interface._always_run = debug

    # fmt: off
    workflow.connect([
        (inputnode, map_coeff, [("fmap_coeff", "in_coeff"),
                                ("fmap_ref", "fmap_ref")]),
        (coregister, map_coeff, [(("forward_transforms", _pop), "transform")]),
        (map_coeff, outputnode, [("out_coeff", "fmap_coeff")]),
    ])
    # fmt: on

    return workflow
Пример #9
0
def init_coregistration_wf(
    *,
    bspline_fitting_distance=200,
    mem_gb=3.0,
    name="coregistration_wf",
    omp_nthreads=None,
    sloppy=False,
    debug=False,
):
    """
    Set-up a T2w-to-T1w within-baby co-registration framework.

    See the ANTs' registration config file (under ``nibabies/data``) for further
    details.
    The main surprise in it is that, for some participants, accurate registration
    requires extra degrees of freedom (one affine level and one SyN level) to ensure
    that the T1w and T2w images align well.
    I attribute this requirement to the following potential reasons:

      * The T1w image and the T2w image were acquired in different sessions, apart in
        time enough for growth to happen.
        Although this is, in theory possible, it doesn't seem the images we have tested
        on are acquired on different sessions.
      * The skull is still so malleable that a change of position of the baby inside the
        coil made an actual change on the overall shape of their head.
      * Nonlinear distortions of the T1w and T2w images are, for some reason, more notorious
        for babies than they are for adults.
        We would need to look into each sequence's details to confirm this.

    Parameters
    ----------
    bspline_fitting_distance : :obj:`float`
        Distance in mm between B-Spline control points for N4 INU estimation.
    mem_gb : :obj:`float`
        Base memory fingerprint unit.
    name : :obj:`str`
        This particular workflow's unique name (Nipype requirement).
    omp_nthreads : :obj:`int`
        The number of threads for individual processes in this workflow.
    sloppy : :obj:`bool`
        Run in *sloppy* mode.
    debug : :obj:`bool`
        Produce intermediate registration files


    Inputs
    ------
    in_t1w : :obj:`str`
        The unprocessed input T1w image.
    in_t2w_preproc : :obj:`str`
        The preprocessed input T2w image, from the brain extraction workflow.
    in_mask : :obj:`str`
        The brainmask, as obtained in T2w space.
    in_probmap : :obj:`str`
        The probabilistic brainmask, as obtained in T2w space.

    Outputs
    -------
    t1w_preproc : :obj:`str`
        The preprocessed T1w image (INU and clipping).
    t2w_preproc : :obj:`str`
        The preprocessed T2w image (INU and clipping), aligned into the T1w's space.
    t1w_brain : :obj:`str`
        The preprocessed, brain-extracted T1w image.
    t1w_mask : :obj:`str`
        The binary brainmask projected from the T2w.
    t1w2t2w_xfm : :obj:`str`
        The T1w-to-T2w mapping.

    """
    from nipype.interfaces.ants import N4BiasFieldCorrection
    from niworkflows.interfaces.fixes import (
        FixHeaderRegistration as Registration,
        FixHeaderApplyTransforms as ApplyTransforms,
    )
    from niworkflows.interfaces.nibabel import ApplyMask, Binarize
    from ...interfaces.nibabel import BinaryDilation

    workflow = pe.Workflow(name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["in_t1w", "in_t2w_preproc", "in_mask", "in_probmap"]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "t1w_preproc",
            "t1w_brain",
            "t1w_mask",
            "t1w2t2w_xfm",
            "t2w_preproc",
        ]),
        name="outputnode",
    )

    fixed_masks_arg = pe.Node(niu.Merge(3),
                              name="fixed_masks_arg",
                              run_without_submitting=True)

    # Dilate t2w mask for easier t1->t2 registration
    reg_mask = pe.Node(BinaryDilation(radius=8, iterations=3), name="reg_mask")
    refine_mask = pe.Node(BinaryDilation(radius=8, iterations=1),
                          name="refine_mask")

    # Set up T2w -> T1w within-subject registration
    coreg = pe.Node(
        Registration(
            from_file=pkgr_fn("nibabies.data", "within_subject_t1t2.json")),
        name="coreg",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    coreg.inputs.float = sloppy
    if debug:
        coreg.inputs.args = "--write-interval-volumes 5"
        coreg.inputs.output_inverse_warped_image = sloppy
        coreg.inputs.output_warped_image = sloppy

    map_mask = pe.Node(ApplyTransforms(interpolation="Gaussian"),
                       name="map_mask",
                       mem_gb=1)
    map_t2w = pe.Node(ApplyTransforms(interpolation="BSpline"),
                      name="map_t2w",
                      mem_gb=1)
    thr_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_mask")

    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            bspline_fitting_distance=bspline_fitting_distance,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 5,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    apply_mask = pe.Node(ApplyMask(), name="apply_mask")

    # fmt:off
    workflow.connect([
        (inputnode, map_mask, [("in_t1w", "reference_image")]),
        (inputnode, final_n4, [("in_t1w", "input_image")]),
        (inputnode, coreg, [("in_t1w", "moving_image"),
                            ("in_t2w_preproc", "fixed_image")]),
        (inputnode, map_mask, [("in_probmap", "input_image")]),
        (inputnode, reg_mask, [("in_mask", "in_file")]),
        (inputnode, refine_mask, [("in_mask", "in_file")]),
        (reg_mask, fixed_masks_arg, [("out_file", "in1")]),
        (reg_mask, fixed_masks_arg, [("out_file", "in2")]),
        (refine_mask, fixed_masks_arg, [("out_file", "in3")]),
        (inputnode, map_t2w, [("in_t1w", "reference_image")]),
        (inputnode, map_t2w, [("in_t2w_preproc", "input_image")]),
        (fixed_masks_arg, coreg, [("out", "fixed_image_masks")]),
        (coreg, map_mask, [
            ("reverse_transforms", "transforms"),
            ("reverse_invert_flags", "invert_transform_flags"),
        ]),
        (coreg, map_t2w, [
            ("reverse_transforms", "transforms"),
            ("reverse_invert_flags", "invert_transform_flags"),
        ]),
        (map_mask, thr_mask, [("output_image", "in_file")]),
        (map_mask, final_n4, [("output_image", "weight_image")]),
        (final_n4, apply_mask, [("output_image", "in_file")]),
        (thr_mask, apply_mask, [("out_mask", "in_mask")]),
        (final_n4, outputnode, [("output_image", "t1w_preproc")]),
        (map_t2w, outputnode, [("output_image", "t2w_preproc")]),
        (thr_mask, outputnode, [("out_mask", "t1w_mask")]),
        (apply_mask, outputnode, [("out_file", "t1w_brain")]),
        (coreg, outputnode, [("forward_transforms", "t1w2t2w_xfm")]),
    ])
    # fmt:on
    return workflow
Пример #10
0
def init_syn_sdc_wf(
    *,
    atlas_threshold=3,
    debug=False,
    name="syn_sdc_wf",
    omp_nthreads=1,
):
    """
    Build the *fieldmap-less* susceptibility-distortion estimation workflow.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.


    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.syn import init_syn_sdc_wf
            wf = init_syn_sdc_wf(omp_nthreads=8)

    Parameters
    ----------
    atlas_threshold : :obj:`float`
        Exclude from the registration metric computation areas with average distortions
        below this threshold (in mm).
    debug : :obj:`bool`
        Whether a fast (less accurate) configuration of the workflow should be applied.
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    epi_ref : :obj:`tuple` (:obj:`str`, :obj:`dict`)
        A tuple, where the first element is the path of the distorted EPI
        reference map (e.g., an average of *b=0* volumes), and the second
        element is a dictionary of associated metadata.
    epi_mask : :obj:`str`
        A path to a brain mask corresponding to ``epi_ref``.
    anat_brain : :obj:`str`
        A preprocessed, skull-stripped anatomical (T1w or T2w) image.
    std2anat_xfm : :obj:`str`
        inverse registration transform of T1w image to MNI template
    anat2epi_xfm : :obj:`str`
        transform mapping coordinates from the EPI space to the anatomical
        space (i.e., the transform to resample anatomical info into EPI space.)

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of files in ``epi_ref``.
    fmap_coeff : :obj:`str` or :obj:`list` of :obj:`str`
        The path(s) of the B-Spline coefficients supporting the fieldmap.

    """
    from pkg_resources import resource_filename as pkgrf
    from packaging.version import parse as parseversion, Version
    from nipype.interfaces.image import Rescale
    from niworkflows.interfaces.fixes import (
        FixHeaderApplyTransforms as ApplyTransforms,
        FixHeaderRegistration as Registration,
    )
    from niworkflows.interfaces.nibabel import Binarize
    from ...utils.misc import front as _pop
    from ...interfaces.utils import Deoblique, Reoblique
    from ...interfaces.bspline import (
        BSplineApprox,
        DEFAULT_LF_ZOOMS_MM,
        DEFAULT_HF_ZOOMS_MM,
        DEFAULT_ZOOMS_MM,
    )
    from ..ancillary import init_brainextraction_wf

    ants_version = Registration().version
    if ants_version and parseversion(ants_version) < Version("2.2.0"):
        raise RuntimeError(
            f"Please upgrade ANTs to 2.2 or older ({ants_version} found).")

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the EPI reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration`
(ANTs {ants_version or "-- version unknown"}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
"""
    inputnode = pe.Node(
        niu.IdentityInterface([
            "epi_ref", "epi_mask", "anat_brain", "std2anat_xfm", "anat2epi_xfm"
        ]),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(["fmap", "fmap_ref", "fmap_coeff", "fmap_mask"]),
        name="outputnode",
    )

    invert_t1w = pe.Node(Rescale(invert=True), name="invert_t1w", mem_gb=0.3)
    anat2epi = pe.Node(ApplyTransforms(interpolation="BSpline"),
                       name="anat2epi",
                       n_procs=omp_nthreads)

    # Mapping & preparing prior knowledge
    # Concatenate transform files:
    # 1) anat -> EPI; 2) MNI -> anat; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3),
                             name="transform_list",
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
    transform_list.inputs.in3 = pkgrf(
        "sdcflows", "data/fmap_atlas_2_MNI152NLin2009cAsym_affine.mat")
    prior2epi = pe.Node(
        ApplyTransforms(
            input_image=pkgrf("sdcflows", "data/fmap_atlas.nii.gz")),
        name="prior2epi",
        n_procs=omp_nthreads,
        mem_gb=0.3,
    )
    atlas_msk = pe.Node(Binarize(thresh_low=atlas_threshold), name="atlas_msk")

    deoblique = pe.Node(Deoblique(), name="deoblique")
    reoblique = pe.Node(Reoblique(), name="reoblique")

    # SyN Registration Core
    syn = pe.Node(
        Registration(
            from_file=pkgrf("sdcflows", "data/susceptibility_syn.json")),
        name="syn",
        n_procs=omp_nthreads,
    )

    unwarp_ref = pe.Node(
        ApplyTransforms(interpolation="BSpline"),
        name="unwarp_ref",
    )

    brainextraction_wf = init_brainextraction_wf()

    # Extract nonzero component
    extract_field = pe.Node(niu.Function(function=_extract_field),
                            name="extract_field")

    # Regularize with B-Splines
    bs_filter = pe.Node(BSplineApprox(),
                        n_procs=omp_nthreads,
                        name="bs_filter")
    bs_filter.interface._always_run = debug
    bs_filter.inputs.bs_spacing = ([DEFAULT_LF_ZOOMS_MM, DEFAULT_HF_ZOOMS_MM]
                                   if not debug else [DEFAULT_ZOOMS_MM])
    bs_filter.inputs.extrapolate = not debug

    # fmt: off
    workflow.connect([
        (inputnode, transform_list, [("anat2epi_xfm", "in1"),
                                     ("std2anat_xfm", "in2")]),
        (inputnode, invert_t1w, [("anat_brain", "in_file"),
                                 (("epi_ref", _pop), "ref_file")]),
        (inputnode, anat2epi, [(("epi_ref", _pop), "reference_image"),
                               ("anat2epi_xfm", "transforms")]),
        (inputnode, deoblique, [(("epi_ref", _pop), "in_epi"),
                                ("epi_mask", "mask_epi")]),
        (inputnode, reoblique, [(("epi_ref", _pop), "in_epi")]),
        (inputnode, syn, [(("epi_ref", _warp_dir), "restrict_deformation")]),
        (inputnode, unwarp_ref, [(("epi_ref", _pop), "reference_image"),
                                 (("epi_ref", _pop), "input_image")]),
        (inputnode, prior2epi, [(("epi_ref", _pop), "reference_image")]),
        (inputnode, extract_field, [("epi_ref", "epi_meta")]),
        (invert_t1w, anat2epi, [("out_file", "input_image")]),
        (transform_list, prior2epi, [("out", "transforms")]),
        (prior2epi, atlas_msk, [("output_image", "in_file")]),
        (anat2epi, deoblique, [("output_image", "in_anat")]),
        (atlas_msk, deoblique, [("out_mask", "mask_anat")]),
        (deoblique, syn, [("out_epi", "moving_image"),
                          ("out_anat", "fixed_image"),
                          ("mask_epi", "moving_image_masks"),
                          (("mask_anat", _fixed_masks_arg),
                           "fixed_image_masks")]),
        (syn, extract_field, [("forward_transforms", "in_file")]),
        (syn, unwarp_ref, [("forward_transforms", "transforms")]),
        (unwarp_ref, reoblique, [("output_image", "in_plumb")]),
        (reoblique, brainextraction_wf, [("out_epi", "inputnode.in_file")]),
        (extract_field, reoblique, [("out", "in_field")]),
        (reoblique, bs_filter, [("out_field", "in_data")]),
        (brainextraction_wf, bs_filter, [("outputnode.out_mask", "in_mask")]),
        (reoblique, outputnode, [("out_epi", "fmap_ref")]),
        (brainextraction_wf, outputnode, [("outputnode.out_mask", "fmap_mask")
                                          ]),
        (bs_filter, outputnode,
         [("out_extrapolated" if not debug else "out_field", "fmap"),
          ("out_coeff", "fmap_coeff")]),
    ])
    # fmt: on

    return workflow
Пример #11
0
def init_templateflow_wf(
    bids_dir,
    output_dir,
    participant_label,
    mov_template,
    ref_template='MNI152NLin2009cAsym',
    use_float=True,
    omp_nthreads=None,
    mem_gb=3.0,
    modality='T1w',
    normalization_quality='precise',
    name='templateflow_wf',
    fs_subjects_dir=None,
):
    """
    A Nipype workflow to perform image registration between two templates
    *R* and *M*. *R* is the *reference template*, selected by a templateflow
    identifier such as ``MNI152NLin2009cAsym``, and *M* is the *moving
    template* (e.g., ``MNI152Lin``). This workflows maps data defined on
    template-*M* space onto template-*R* space.


    1. Run the subrogate images through ``antsBrainExtraction``.
    2. Recompute :abbr:`INU (intensity non-uniformity)` correction using
        the mask obtained in 1).
    3. Independently, run spatial normalization of every
       :abbr:`INU (intensity non-uniformity)` corrected image
       (supplied via ``in_files``) to both templates.
    4. Calculate an initialization between both templates, using them directly.
    5. Run multi-channel image registration of the images resulting from
        3). Both sets of images (one registered to *R* and another to *M*)
        are then used as reference and moving images in the registration
        framework.

    **Parameters**

    in_files: list of files
        a list of paths pointing to the images that will be used as surrogates
    mov_template: str
        a templateflow identifier for template-*M*
    ref_template: str
        a templateflow identifier for template-*R* (default: ``MNI152NLin2009cAsym``).


    """
    # number of participants
    ninputs = len(participant_label)
    ants_env = {
        'NSLOTS': '%d' % omp_nthreads,
        'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS': '%d' % omp_nthreads,
        'OMP_NUM_THREADS': '%d' % omp_nthreads,
    }

    # Get path to templates
    tpl_ref = str(
        get_template(ref_template, suffix=modality, desc=None, resolution=1))
    tpl_ref_mask = str(
        get_template(ref_template, suffix='mask', desc='brain', resolution=1))
    tpl_mov = str(
        get_template(mov_template, suffix=modality, desc=None, resolution=1))
    tpl_mov_mask = str(
        get_template(mov_template, suffix='mask', desc='brain', resolution=1))

    wf = pe.Workflow(name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['participant_label']),
                        name='inputnode')
    inputnode.iterables = ('participant_label',
                           sorted(list(participant_label)))

    pick_file = pe.Node(niu.Function(function=_bids_pick),
                        name='pick_file',
                        run_without_submitting=True)
    pick_file.inputs.bids_root = bids_dir

    ref_bex = init_brain_extraction_wf(
        in_template=ref_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='reference_bex',
    )

    mov_bex = init_brain_extraction_wf(
        in_template=mov_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='moving_bex',
    )

    ref_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='ref_norm',
                       n_procs=omp_nthreads)
    ref_norm.inputs.fixed_image = tpl_ref
    ref_norm.inputs.fixed_image_masks = tpl_ref_mask
    ref_norm.inputs.environ = ants_env

    # Register the INU-corrected image to the other template
    mov_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='mov_norm',
                       n_procs=omp_nthreads)
    mov_norm.inputs.fixed_image = tpl_mov
    mov_norm.inputs.fixed_image_masks = tpl_mov_mask
    mov_norm.inputs.environ = ants_env

    # Initialize between-templates transform with antsAI
    init_aff = pe.Node(AI(
        metric=('Mattes', 32, 'Regular', 0.2),
        transform=('Affine', 0.1),
        search_factor=(20, 0.12),
        principal_axes=False,
        convergence=(10, 1e-6, 10),
        verbose=True,
        fixed_image=tpl_ref,
        fixed_image_mask=tpl_ref_mask,
        moving_image=tpl_mov,
        moving_image_mask=tpl_mov_mask,
        environ=ants_env,
    ),
                       name='init_aff',
                       n_procs=omp_nthreads)

    ref_buffer = pe.JoinNode(niu.IdentityInterface(fields=['fixed_image']),
                             joinsource='inputnode',
                             joinfield='fixed_image',
                             name='ref_buffer')

    mov_buffer = pe.JoinNode(niu.IdentityInterface(fields=['moving_image']),
                             joinsource='inputnode',
                             joinfield='moving_image',
                             name='mov_buffer')

    flow = pe.Node(
        Registration(from_file=pkgr.resource_filename(
            'niworkflows.data', 't1w-mni_registration_%s_000.json' %
            normalization_quality)),
        name='flow_norm',
        n_procs=omp_nthreads,
    )
    flow.inputs.fixed_image_masks = tpl_ref_mask
    flow.inputs.moving_image_masks = tpl_mov_mask
    flow.inputs.metric = [[v] * ninputs for v in flow.inputs.metric]
    flow.inputs.metric_weight = [[1 / ninputs] * ninputs
                                 for _ in flow.inputs.metric_weight]
    flow.inputs.radius_or_number_of_bins = [
        [v] * ninputs for v in flow.inputs.radius_or_number_of_bins
    ]
    flow.inputs.sampling_percentage = [[v] * ninputs
                                       for v in flow.inputs.sampling_percentage
                                       ]
    flow.inputs.sampling_strategy = [[v] * ninputs
                                     for v in flow.inputs.sampling_strategy]
    flow.inputs.environ = ants_env

    # Datasinking
    ref_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=ref_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='ref_norm_ds',
                          run_without_submitting=True)

    mov_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=mov_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='mov_norm_ds',
                          run_without_submitting=True)

    xfm_ds = pe.Node(DerivativesDataSink(
        base_directory=str(output_dir.parent),
        out_path_base=output_dir.name,
        allowed_entities=['from', 'mode'],
        mode='image',
        suffix='xfm',
        source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template),
        **{'from': mov_template}),
                     name='xfm_ds',
                     run_without_submitting=True)

    wf.connect([
        (inputnode, pick_file, [('participant_label', 'participant_label')]),
        (pick_file, ref_bex, [('out', 'inputnode.in_files')]),
        (pick_file, mov_bex, [('out', 'inputnode.in_files')]),
        (ref_bex, ref_norm, [('outputnode.bias_corrected', 'moving_image'),
                             ('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (ref_bex, mov_norm, [('outputnode.bias_corrected', 'moving_image')]),
        (mov_bex, mov_norm, [('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (init_aff, flow, [('output_transform', 'initial_moving_transform')]),
        (ref_norm, ref_buffer, [('warped_image', 'fixed_image')]),
        (mov_norm, mov_buffer, [('warped_image', 'moving_image')]),
        (ref_buffer, flow, [('fixed_image', 'fixed_image')]),
        (mov_buffer, flow, [('moving_image', 'moving_image')]),
        (pick_file, ref_norm_ds, [('out', 'source_file')]),
        (ref_norm, ref_norm_ds, [('warped_image', 'in_file')]),
        (pick_file, mov_norm_ds, [('out', 'source_file')]),
        (mov_norm, mov_norm_ds, [('warped_image', 'in_file')]),
        (flow, xfm_ds, [('composite_transform', 'in_file')]),
    ])

    if fs_subjects_dir:
        fssource = pe.Node(FreeSurferSource(subjects_dir=str(fs_subjects_dir)),
                           name='fssource',
                           run_without_submitting=True)
        tonative = pe.Node(fs.Label2Vol(subjects_dir=str(fs_subjects_dir)),
                           name='tonative')
        tonii = pe.Node(fs.MRIConvert(out_type='niigz',
                                      resample_type='nearest'),
                        name='tonii')

        ref_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_ref,
                                            environ=ants_env),
                            name='ref_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        mov_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_mov,
                                            environ=ants_env),
                            name='mov_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        ref_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='ref_aparc_buffer')

        ref_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_ref],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='ref_join_labels',
                                  n_procs=omp_nthreads)

        ref_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                     name='ref_join_labels_ds',
                                     run_without_submitting=True)

        ref_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                    name='ref_join_probs_ds',
                                    run_without_submitting=True)

        # ref_join_voting_ds = pe.Node(
        #     DerivativesDataSink(
        #         base_directory=str(output_dir.parent),
        #         out_path_base=output_dir.name, space=ref_template,
        #         suffix='probtissue', desc='aparcvoting', keep_dtype=False,
        #         source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
        #     name='ref_join_voting_ds', run_without_submitting=True)

        mov_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='mov_aparc_buffer')

        mov_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_mov],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='mov_join_labels',
                                  n_procs=omp_nthreads)

        mov_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                     name='mov_join_labels_ds',
                                     run_without_submitting=True)

        mov_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                    name='mov_join_probs_ds',
                                    run_without_submitting=True)

        ref_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='ref_aparc_ds',
                               run_without_submitting=True)

        mov_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='mov_aparc_ds',
                               run_without_submitting=True)
        # Extract surfaces
        cifti_wf = init_gifti_surface_wf(name='cifti_surfaces',
                                         subjects_dir=str(fs_subjects_dir))

        # Move surfaces to template spaces
        gii2csv = pe.MapNode(GiftiToCSV(itk_lps=True),
                             iterfield=['in_file'],
                             name='gii2csv')
        ref_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='ref_map_surf',
                                  iterfield=['input_file'])
        ref_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='ref_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        ref_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='ref_surfs_buffer')
        ref_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='ref_surfs_unzip',
                                  run_without_submitting=True)
        ref_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='ref_ply',
                             iterfield=['in_files'])
        ref_recon = pe.MapNode(PoissonRecon(),
                               name='ref_recon',
                               iterfield=['in_file'])
        ref_avggii = pe.MapNode(PLYtoGifti(),
                                name='ref_avggii',
                                iterfield=['in_file', 'surf_key'])
        ref_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='ref_smooth',
                                iterfield=['in_file'])

        ref_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False),
                               name='ref_surfs_ds',
                               run_without_submitting=True)
        ref_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                             name='ref_avg_ds',
                             run_without_submitting=True)

        mov_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='mov_map_surf',
                                  iterfield=['input_file'])
        mov_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='mov_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        mov_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='mov_surfs_buffer')
        mov_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='mov_surfs_unzip',
                                  run_without_submitting=True)
        mov_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='mov_ply',
                             iterfield=['in_files'])
        mov_recon = pe.MapNode(PoissonRecon(),
                               name='mov_recon',
                               iterfield=['in_file'])
        mov_avggii = pe.MapNode(PLYtoGifti(),
                                name='mov_avggii',
                                iterfield=['in_file', 'surf_key'])
        mov_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='mov_smooth',
                                iterfield=['in_file'])

        mov_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False),
                               name='mov_surfs_ds',
                               run_without_submitting=True)
        mov_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                             name='mov_avg_ds',
                             run_without_submitting=True)

        wf.connect([
            (inputnode, fssource, [(('participant_label', _sub_decorate),
                                    'subject_id')]),
            (inputnode, cifti_wf, [(('participant_label', _sub_decorate),
                                    'inputnode.subject_id')]),
            (pick_file, cifti_wf, [('out', 'inputnode.in_t1w')]),
            (pick_file, tonii, [('out', 'reslice_like')]),
            # Select DKT aparc
            (fssource, tonative, [(('aparc_aseg', _last), 'seg_file'),
                                  ('rawavg', 'template_file'),
                                  ('aseg', 'reg_header')]),
            (tonative, tonii, [('vol_label_file', 'in_file')]),
            (tonii, ref_aparc, [('out_file', 'input_image')]),
            (tonii, mov_aparc, [('out_file', 'input_image')]),
            (ref_norm, ref_aparc, [('composite_transform', 'transforms')]),
            (mov_norm, mov_aparc, [('composite_transform', 'transforms')]),
            (ref_buffer, ref_join_labels, [('fixed_image', 'atlas_image')]),
            (ref_aparc, ref_aparc_buffer, [('output_image', 'aparc')]),
            (ref_aparc_buffer, ref_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            (mov_buffer, mov_join_labels, [('moving_image', 'atlas_image')]),
            (mov_aparc, mov_aparc_buffer, [('output_image', 'aparc')]),
            (mov_aparc_buffer, mov_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            # Datasinks
            (ref_join_labels, ref_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (ref_join_labels, ref_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            # (ref_join_labels, ref_join_voting_ds, [
            #     ('out_atlas_voting_weight_name_format', 'in_file')]),
            (mov_join_labels, mov_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (mov_join_labels, mov_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            (pick_file, ref_aparc_ds, [('out', 'source_file')]),
            (ref_aparc, ref_aparc_ds, [('output_image', 'in_file')]),
            (pick_file, mov_aparc_ds, [('out', 'source_file')]),
            (mov_aparc, mov_aparc_ds, [('output_image', 'in_file')]),
            # Mapping ref surfaces
            (cifti_wf, gii2csv, [(('outputnode.surf_norm', _discard_inflated),
                                  'in_file')]),
            (gii2csv, ref_map_surf, [('out_file', 'input_file')]),
            (ref_norm, ref_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (ref_map_surf, ref_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, ref_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, ref_surfs_ds, [('out', 'source_file')]),
            (ref_csv2gii, ref_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (ref_csv2gii, ref_surfs_buffer, [('out_file', 'surfaces')]),
            (ref_surfs_buffer, ref_surfs_unzip, [('surfaces', 'in_files')]),
            (ref_surfs_unzip, ref_ply, [('out_files', 'in_files')]),
            (ref_ply, ref_recon, [('out_file', 'in_file')]),
            (ref_recon, ref_avggii, [('out_file', 'in_file')]),
            (ref_surfs_unzip, ref_avggii, [('surf_keys', 'surf_key')]),
            (ref_avggii, ref_smooth, [('out_file', 'in_file')]),
            (ref_smooth, ref_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),

            # Mapping mov surfaces
            (gii2csv, mov_map_surf, [('out_file', 'input_file')]),
            (mov_norm, mov_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (mov_map_surf, mov_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, mov_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, mov_surfs_ds, [('out', 'source_file')]),
            (mov_csv2gii, mov_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (mov_csv2gii, mov_surfs_buffer, [('out_file', 'surfaces')]),
            (mov_surfs_buffer, mov_surfs_unzip, [('surfaces', 'in_files')]),
            (mov_surfs_unzip, mov_ply, [('out_files', 'in_files')]),
            (mov_ply, mov_recon, [('out_file', 'in_file')]),
            (mov_recon, mov_avggii, [('out_file', 'in_file')]),
            (mov_surfs_unzip, mov_avggii, [('surf_keys', 'surf_key')]),
            (mov_avggii, mov_smooth, [('out_file', 'in_file')]),
            (mov_smooth, mov_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),
        ])

    return wf
Пример #12
0
def init_infant_brain_extraction_wf(
    age_months=None,
    ants_affine_init=False,
    bspline_fitting_distance=200,
    sloppy=False,
    skull_strip_template="UNCInfant",
    template_specs=None,
    interim_checkpoints=True,
    mem_gb=3.0,
    mri_scheme="T1w",
    name="infant_brain_extraction_wf",
    atropos_model=None,
    omp_nthreads=None,
    output_dir=None,
    use_float=True,
    use_t2w=False,
):
    """
    Build an atlas-based brain extraction pipeline for infant T1w/T2w MRI data.

    Pros/Cons of available templates
    --------------------------------
    * MNIInfant
     + More cohorts available for finer-grain control
     + T1w/T2w images available
     - Template masks are poor

    * UNCInfant
     + Accurate masks
     - No T2w image available


    Parameters
    ----------
    ants_affine_init : :obj:`bool`, optional
        Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images.

    """
    # handle template specifics
    template_specs = template_specs or {}
    if skull_strip_template == 'MNIInfant':
        template_specs['resolution'] = 2 if sloppy else 1

    if not template_specs.get('cohort'):
        if age_months is None:
            raise KeyError(
                f"Age or cohort for {skull_strip_template} must be provided!")
        template_specs['cohort'] = cohort_by_months(skull_strip_template,
                                                    age_months)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=["t1w", "t2w", "in_mask"]),
        name="inputnode")
    outputnode = pe.Node(niu.IdentityInterface(
        fields=["t1w_corrected", "t1w_corrected_brain", "t1w_mask"]),
                         name="outputnode")

    if not use_t2w:
        raise RuntimeError("A T2w image is currently required.")

    tpl_target_path = get_template(
        skull_strip_template,
        suffix='T1w',  # no T2w template
        desc=None,
        **template_specs,
    )
    if not tpl_target_path:
        raise RuntimeError(
            f"An instance of template <tpl-{skull_strip_template}> with MR scheme "
            f"'{'T1w' or mri_scheme}' could not be found.")

    tpl_brainmask_path = get_template(skull_strip_template,
                                      label="brain",
                                      suffix="probseg",
                                      **template_specs) or get_template(
                                          skull_strip_template,
                                          desc="brain",
                                          suffix="mask",
                                          **template_specs)

    tpl_regmask_path = get_template(skull_strip_template,
                                    label="BrainCerebellumExtraction",
                                    suffix="mask",
                                    **template_specs)

    # validate images
    val_tmpl = pe.Node(ValidateImage(), name='val_tmpl')
    val_t1w = val_tmpl.clone("val_t1w")
    val_t2w = val_tmpl.clone("val_t2w")
    val_tmpl.inputs.in_file = _pop(tpl_target_path)

    gauss_tmpl = pe.Node(niu.Function(function=_gauss_filter),
                         name="gauss_tmpl")

    # Spatial normalization step
    lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"),
                       name="lap_tmpl")
    lap_t1w = lap_tmpl.clone("lap_t1w")
    lap_t2w = lap_tmpl.clone("lap_t2w")

    # Merge image nodes
    mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")
    mrg_t2w = mrg_tmpl.clone("mrg_t2w")
    mrg_t1w = mrg_tmpl.clone("mrg_t1w")

    norm_lap_tmpl = pe.Node(niu.Function(function=_trunc),
                            name="norm_lap_tmpl")
    norm_lap_tmpl.inputs.dtype = "float32"
    norm_lap_tmpl.inputs.out_max = 1.0
    norm_lap_tmpl.inputs.percentile = (0.01, 99.99)
    norm_lap_tmpl.inputs.clip_max = None

    norm_lap_t1w = norm_lap_tmpl.clone('norm_lap_t1w')
    norm_lap_t2w = norm_lap_t1w.clone('norm_lap_t2w')

    # Set up initial spatial normalization
    ants_params = "testing" if sloppy else "precise"
    norm = pe.Node(
        Registration(from_file=pkgr_fn(
            "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")),
        name="norm",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm.inputs.float = use_float
    if tpl_regmask_path:
        norm.inputs.fixed_image_masks = tpl_regmask_path

    # Set up T2w -> T1w within-subject registration
    norm_subj = pe.Node(
        Registration(
            from_file=pkgr_fn("nibabies.data", "within_subject_t1t2.json")),
        name="norm_subj",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm_subj.inputs.float = use_float

    # main workflow
    wf = pe.Workflow(name)
    # Create a buffer interface as a cache for the actual inputs to registration
    buffernode = pe.Node(
        niu.IdentityInterface(fields=["hires_target", "smooth_target"]),
        name="buffernode")

    # truncate target intensity for N4 correction
    clip_tmpl = pe.Node(niu.Function(function=_trunc), name="clip_tmpl")
    clip_t2w = clip_tmpl.clone('clip_t2w')
    clip_t1w = clip_tmpl.clone('clip_t1w')

    # INU correction of the t1w
    init_t2w_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=False,
            copy_header=True,
            n_iterations=[50] * (4 - sloppy),
            convergence_threshold=1e-7,
            shrink_factor=4,
            bspline_fitting_distance=bspline_fitting_distance,
        ),
        n_procs=omp_nthreads,
        name="init_t2w_n4",
    )
    init_t1w_n4 = init_t2w_n4.clone("init_t1w_n4")

    clip_t2w_inu = pe.Node(niu.Function(function=_trunc), name="clip_t2w_inu")
    clip_t1w_inu = clip_t2w_inu.clone("clip_t1w_inu")

    map_mask_t2w = pe.Node(ApplyTransforms(interpolation="Gaussian",
                                           float=True),
                           name="map_mask_t2w",
                           mem_gb=1)
    map_mask_t1w = map_mask_t2w.clone("map_mask_t1w")

    # map template brainmask to t2w space
    map_mask_t2w.inputs.input_image = str(tpl_brainmask_path)

    thr_t2w_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_t2w_mask")
    thr_t1w_mask = thr_t2w_mask.clone('thr_t1w_mask')

    bspline_grid = pe.Node(niu.Function(function=_bspline_distance),
                           name="bspline_grid")

    # Refine INU correction
    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            bspline_fitting_distance=bspline_fitting_distance,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 5,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    final_mask = pe.Node(ApplyMask(), name="final_mask")

    if atropos_model is None:
        atropos_model = tuple(ATROPOS_MODELS[mri_scheme].values())

    atropos_wf = init_atropos_wf(
        use_random_seed=False,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        in_segmentation_model=atropos_model,
    )
    # if tpl_regmask_path:
    #     atropos_wf.get_node('inputnode').inputs.in_mask_dilated = tpl_regmask_path

    sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1),
                     name='sel_wm',
                     run_without_submitting=True)

    wf.connect([
        # 1. massage template
        (val_tmpl, clip_tmpl, [("out_file", "in_file")]),
        (clip_tmpl, lap_tmpl, [("out", "op1")]),
        (clip_tmpl, mrg_tmpl, [("out", "in1")]),
        (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]),
        (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]),
        # 2. massage T2w
        (inputnode, val_t2w, [('t2w', 'in_file')]),
        (val_t2w, clip_t2w, [('out_file', 'in_file')]),
        (clip_t2w, init_t2w_n4, [('out', 'input_image')]),
        (init_t2w_n4, clip_t2w_inu, [("output_image", "in_file")]),
        (clip_t2w_inu, lap_t2w, [('out', 'op1')]),
        (clip_t2w_inu, mrg_t2w, [('out', 'in1')]),
        (lap_t2w, norm_lap_t2w, [("output_image", "in_file")]),
        (norm_lap_t2w, mrg_t2w, [("out", "in2")]),
        # 3. normalize T2w to target template (UNC)
        (mrg_t2w, norm, [("out", "moving_image")]),
        (mrg_tmpl, norm, [("out", "fixed_image")]),
        # 4. map template brainmask to T2w space
        (inputnode, map_mask_t2w, [('t2w', 'reference_image')]),
        (norm, map_mask_t2w, [("reverse_transforms", "transforms"),
                              ("reverse_invert_flags",
                               "invert_transform_flags")]),
        (map_mask_t2w, thr_t2w_mask, [("output_image", "in_file")]),
        # 5. massage T1w
        (inputnode, val_t1w, [("t1w", "in_file")]),
        (val_t1w, clip_t1w, [("out_file", "in_file")]),
        (clip_t1w, init_t1w_n4, [("out", "input_image")]),
        (init_t1w_n4, clip_t1w_inu, [("output_image", "in_file")]),
        (clip_t1w_inu, lap_t1w, [('out', 'op1')]),
        (clip_t1w_inu, mrg_t1w, [('out', 'in1')]),
        (lap_t1w, norm_lap_t1w, [("output_image", "in_file")]),
        (norm_lap_t1w, mrg_t1w, [("out", "in2")]),
        # 6. normalize within subject T1w to T2w
        (mrg_t1w, norm_subj, [("out", "moving_image")]),
        (mrg_t2w, norm_subj, [("out", "fixed_image")]),
        (thr_t2w_mask, norm_subj, [("out_mask", "fixed_image_mask")]),
        # 7. map mask to T1w space
        (thr_t2w_mask, map_mask_t1w, [("out_mask", "input_image")]),
        (inputnode, map_mask_t1w, [("t1w", "reference_image")]),
        (norm_subj, map_mask_t1w, [
            ("reverse_transforms", "transforms"),
            ("reverse_invert_flags", "invert_transform_flags"),
        ]),
        (map_mask_t1w, thr_t1w_mask, [("output_image", "in_file")]),
        # 8. T1w INU
        (inputnode, final_n4, [("t1w", "input_image")]),
        (inputnode, bspline_grid, [("t1w", "in_file")]),
        (bspline_grid, final_n4, [("out", "args")]),
        (map_mask_t1w, final_n4, [("output_image", "weight_image")]),
        (final_n4, final_mask, [("output_image", "in_file")]),
        (thr_t1w_mask, final_mask, [("out_mask", "in_mask")]),
        # 9. Outputs
        (final_n4, outputnode, [("output_image", "t1w_corrected")]),
        (thr_t1w_mask, outputnode, [("out_mask", "t1w_mask")]),
        (final_mask, outputnode, [("out_file", "t1w_corrected_brain")]),
    ])

    if ants_affine_init:
        ants_kwargs = dict(
            metric=("Mattes", 32, "Regular", 0.2),
            transform=("Affine", 0.1),
            search_factor=(20, 0.12),
            principal_axes=False,
            convergence=(10, 1e-6, 10),
            search_grid=(40, (0, 40, 40)),
            verbose=True,
        )

        if ants_affine_init == 'random':
            ants_kwargs['metric'] = ("Mattes", 32, "Random", 0.2)
        if ants_affine_init == 'search':
            ants_kwargs['search_grid'] = (20, (20, 40, 40))

        init_aff = pe.Node(
            AI(**ants_kwargs),
            name="init_aff",
            n_procs=omp_nthreads,
        )
        if tpl_regmask_path:
            init_aff.inputs.fixed_image_mask = _pop(tpl_regmask_path)

        wf.connect([
            (clip_tmpl, init_aff, [("out", "fixed_image")]),
            (clip_t2w_inu, init_aff, [("out", "moving_image")]),
            (init_aff, norm, [("output_transform", "initial_moving_transform")
                              ]),
        ])

    return wf
Пример #13
0
    def make_registration_wf(input_file,
                             name,
                             subject=subject,
                             target=target,
                             target_mask=target_mask,
                             init_reg=init_reg,
                             t1w_to_mni_transform=t1w_to_mni_transform,
                             t1w_in_mni=t1w_in_mni,
                             mni_brain_mask=mni_brain_mask,
                             ants_numthreads=8):

        workflow = pe.Workflow(base_dir='/tmp/workflow_folders', name=name)

        input_node = pe.Node(niu.IdentityInterface(fields=[
            'input_file', 'target', 'target_mask', 't1w_to_mni_transform',
            't1w_in_mni', 'mni_brain_mask'
        ]),
                             name='inputspec')
        input_node.inputs.input_file = input_file
        input_node.inputs.target = target
        input_node.inputs.target_mask = target_mask
        input_node.inputs.init_reg = init_reg
        input_node.inputs.t1w_to_mni_transform = t1w_to_mni_transform
        input_node.inputs.t1w_in_mni = t1w_in_mni
        input_node.inputs.mni_brain_mask = mni_brain_mask

        convert_dtype = pe.Node(fsl.maths.MathsCommand(), name='convert_dtype')
        convert_dtype.inputs.output_datatype = 'double'

        workflow.connect(input_node, 'input_file', convert_dtype, 'in_file')

        inu_n4 = pe.Node(
            N4BiasFieldCorrection(
                dimension=3,
                save_bias=True,
                num_threads=ants_numthreads,
                rescale_intensities=True,
                copy_header=True,
            ),
            n_procs=ants_numthreads,
            name="inu_n4",
        )

        workflow.connect(convert_dtype, 'out_file', inu_n4, 'input_image')

        register = pe.Node(Registration(from_file=registration_scheme,
                                        num_threads=ants_numthreads,
                                        verbose=True),
                           name='registration')

        workflow.connect(inu_n4, 'output_image', register, 'moving_image')

        if init_reg:
            workflow.connect(input_node, 'init_reg', register,
                             'initial_moving_transform')

        workflow.connect(input_node, 'target', register, 'fixed_image')
        workflow.connect(input_node, 'target_mask', register,
                         'fixed_image_masks')

        def get_mask(input_image):
            from nilearn import image
            from nipype.utils.filemanip import split_filename
            import os.path as op

            _, fn, _ = split_filename(input_image)
            mask = image.math_img('im != 0', im=input_image)
            new_fn = op.abspath(fn + '_mask.nii.gz')
            mask.to_filename(new_fn)

            return new_fn

        mask_node = pe.Node(niu.Function(function=get_mask,
                                         input_names=['input_image'],
                                         output_names=['mask']),
                            name='mask_node')

        workflow.connect(register, 'warped_image', mask_node, 'input_image')

        gen_grid_node = pe.Node(GenerateSamplingReference(),
                                name='gen_grid_node')

        workflow.connect(mask_node, 'mask', gen_grid_node, 'fov_mask')
        workflow.connect(inu_n4, 'output_image', gen_grid_node, 'moving_image')
        workflow.connect(input_node, 'target', gen_grid_node, 'fixed_image')

        datasink_image_t1w = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            compress=True,
            base_directory=op.join(bids_folder, 'derivatives')),
                                     name='datasink_image_t1w')
        workflow.connect(input_node, 'input_file', datasink_image_t1w,
                         'source_file')
        datasink_image_t1w.inputs.space = 'T1w'
        datasink_image_t1w.inputs.desc = 'registered'

        datasink_report_t1w = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            space='T1w',
            base_directory=op.join(bids_folder, 'derivatives'),
            datatype='figures'),
                                      name='datasink_report_t1w')

        workflow.connect(input_node, 'input_file', datasink_report_t1w,
                         'source_file')
        datasink_report_t1w.inputs.space = 'T1w'

        transformer = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=True,
            num_threads=ants_numthreads),
                              n_procs=ants_numthreads,
                              name='transformer')
        workflow.connect(transformer, 'output_image', datasink_image_t1w,
                         'in_file')
        workflow.connect(transformer, 'out_report', datasink_report_t1w,
                         'in_file')
        workflow.connect(inu_n4, 'output_image', transformer, 'input_image')
        workflow.connect(gen_grid_node, 'out_file', transformer,
                         'reference_image')
        workflow.connect(register, 'composite_transform', transformer,
                         'transforms')

        concat_transforms = pe.Node(niu.Merge(2), name='concat_transforms')

        workflow.connect(register, 'composite_transform', concat_transforms,
                         'in2')
        workflow.connect(input_node, 't1w_to_mni_transform', concat_transforms,
                         'in1')

        transformer_to_mni1 = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=False,
            num_threads=ants_numthreads),
                                      n_procs=ants_numthreads,
                                      name='transformer_to_mni1')
        workflow.connect(inu_n4, 'output_image', transformer_to_mni1,
                         'input_image')
        workflow.connect(input_node, 't1w_in_mni', transformer_to_mni1,
                         'reference_image')
        workflow.connect(concat_transforms, 'out', transformer_to_mni1,
                         'transforms')

        mask_node_mni = pe.Node(niu.Function(function=get_mask,
                                             input_names=['input_image'],
                                             output_names=['mask']),
                                name='mask_node_mni')
        workflow.connect(transformer_to_mni1, 'output_image', mask_node_mni,
                         'input_image')

        def join_masks(mask1, mask2):
            from nilearn import image
            from nipype.utils.filemanip import split_filename
            import os.path as op

            _, fn, _ = split_filename(mask1)

            new_mask = image.math_img('(im1 > 0) & (im2 > 0)',
                                      im1=mask1,
                                      im2=mask2)

            new_fn = op.abspath(fn + '_jointmask' + '.nii.gz')

            new_mask.to_filename(new_fn)

            return new_fn

        combine_masks_node = pe.Node(niu.Function(
            function=join_masks,
            input_names=['mask1', 'mask2'],
            output_names=['combined_mask']),
                                     name='combine_mask_node')

        workflow.connect(mask_node_mni, 'mask', combine_masks_node, 'mask1')
        workflow.connect(input_node, 'mni_brain_mask', combine_masks_node,
                         'mask2')

        gen_grid_node_mni = pe.Node(GenerateSamplingReference(),
                                    name='gen_grid_node_mni')
        workflow.connect(combine_masks_node, 'combined_mask',
                         gen_grid_node_mni, 'fov_mask')
        workflow.connect(inu_n4, 'output_image', gen_grid_node_mni,
                         'moving_image')
        workflow.connect(input_node, 't1w_in_mni', gen_grid_node_mni,
                         'fixed_image')

        transformer_to_mni2 = pe.Node(ApplyTransforms(
            interpolation='LanczosWindowedSinc',
            generate_report=False,
            num_threads=ants_numthreads),
                                      n_procs=ants_numthreads,
                                      name='transformer_to_mni2')
        workflow.connect(inu_n4, 'output_image', transformer_to_mni2,
                         'input_image')
        workflow.connect(gen_grid_node_mni, 'out_file', transformer_to_mni2,
                         'reference_image')
        workflow.connect(concat_transforms, 'out', transformer_to_mni2,
                         'transforms')

        datasink_image_mni = pe.Node(DerivativesDataSink(
            out_path_base='registration',
            compress=True,
            base_directory=op.join(bids_folder, 'derivatives')),
                                     name='datasink_mni')
        datasink_image_mni.inputs.source_file = input_file
        datasink_image_mni.inputs.space = 'MNI152NLin2009cAsym'
        datasink_image_mni.inputs.desc = 'registered'

        workflow.connect(input_node, 'input_file', datasink_image_mni,
                         'source_file')
        workflow.connect(transformer_to_mni2, 'output_image',
                         datasink_image_mni, 'in_file')

        return workflow
Пример #14
0
def init_3dQwarp_wf(omp_nthreads=1, debug=False, name="pepolar_estimate_wf"):
    """
    Create the PEPOLAR field estimation workflow based on AFNI's ``3dQwarp``.

    This workflow takes in two EPI files that MUST have opposed
    :abbr:`PE (phase-encoding)` direction.
    Therefore, EPIs with orthogonal PE directions are not supported.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.pepolar import init_3dQwarp_wf
            wf = init_3dQwarp_wf()

    Parameters
    ----------
    debug : :obj:`bool`
        Whether a fast configuration of topup (less accurate) should be applied.
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    in_data : :obj:`list` of :obj:`str`
        A list of two EPI files, the first of which will be taken as reference.

    Outputs
    -------
    fmap : :obj:`str`
        The path of the estimated fieldmap.
    fmap_ref : :obj:`str`
        The path of an unwarped conversion of the first element of ``in_data``.

    """
    from nipype.interfaces import afni
    from niworkflows.interfaces.header import CopyHeader
    from niworkflows.interfaces.fixes import (
        FixHeaderRegistration as Registration,
        FixHeaderApplyTransforms as ApplyTransforms,
    )
    from niworkflows.interfaces.freesurfer import StructuralReference
    from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf
    from ...utils.misc import front as _front, last as _last
    from ...interfaces.utils import Flatten, ConvertWarp

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""{_PEPOLAR_DESC} \
with `3dQwarp` (@afni; AFNI {''.join(['%02d' % v for v in afni.Info().version() or []])}).
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=["in_data", "metadata"]),
                        name="inputnode")

    outputnode = pe.Node(niu.IdentityInterface(fields=["fmap", "fmap_ref"]),
                         name="outputnode")

    flatten = pe.Node(Flatten(), name="flatten")
    sort_pe = pe.Node(
        niu.Function(function=_sorted_pe,
                     output_names=["sorted", "qwarp_args"]),
        name="sort_pe",
        run_without_submitting=True,
    )

    merge_pes = pe.MapNode(
        StructuralReference(
            auto_detect_sensitivity=True,
            initial_timepoint=1,
            fixed_timepoint=True,  # Align to first image
            intensity_scaling=True,
            # 7-DOF (rigid + intensity)
            no_iteration=True,
            subsample_threshold=200,
            out_file="template.nii.gz",
        ),
        name="merge_pes",
        iterfield=["in_files"],
    )

    pe0_wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=omp_nthreads,
                                                 name="pe0_wf")
    pe1_wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=omp_nthreads,
                                                 name="pe1_wf")

    align_pes = pe.Node(
        Registration(
            from_file=_pkg_fname("sdcflows", "data/translation_rigid.json"),
            output_warped_image=True,
        ),
        name="align_pes",
        n_procs=omp_nthreads,
    )

    qwarp = pe.Node(
        afni.QwarpPlusMinus(
            blur=[-1, -1],
            environ={"OMP_NUM_THREADS": f"{min(omp_nthreads, 4)}"},
            minpatch=9,
            nopadWARP=True,
            noweight=True,
            pblur=[0.05, 0.05],
        ),
        name="qwarp",
        n_procs=min(omp_nthreads, 4),
    )

    to_ants = pe.Node(ConvertWarp(), name="to_ants", mem_gb=0.01)

    cphdr_warp = pe.Node(CopyHeader(), name="cphdr_warp", mem_gb=0.01)

    unwarp_reference = pe.Node(
        ApplyTransforms(
            dimension=3,
            float=True,
            interpolation="LanczosWindowedSinc",
        ),
        name="unwarp_reference",
    )

    # fmt: off
    workflow.connect([
        (inputnode, flatten, [("in_data", "in_data"),
                              ("metadata", "in_meta")]),
        (flatten, sort_pe, [("out_list", "inlist")]),
        (sort_pe, qwarp, [("qwarp_args", "args")]),
        (sort_pe, merge_pes, [("sorted", "in_files")]),
        (merge_pes, pe0_wf, [(("out_file", _front), "inputnode.in_file")]),
        (merge_pes, pe1_wf, [(("out_file", _last), "inputnode.in_file")]),
        (pe0_wf, align_pes, [("outputnode.skull_stripped_file", "fixed_image")
                             ]),
        (pe1_wf, align_pes, [("outputnode.skull_stripped_file", "moving_image")
                             ]),
        (pe0_wf, qwarp, [("outputnode.skull_stripped_file", "in_file")]),
        (align_pes, qwarp, [("warped_image", "base_file")]),
        (inputnode, cphdr_warp, [(("in_data", _front), "hdr_file")]),
        (qwarp, cphdr_warp, [("source_warp", "in_file")]),
        (cphdr_warp, to_ants, [("out_file", "in_file")]),
        (to_ants, unwarp_reference, [("out_file", "transforms")]),
        (inputnode, unwarp_reference, [("in_reference", "reference_image"),
                                       ("in_reference", "input_image")]),
        (unwarp_reference, outputnode, [("output_image", "fmap_ref")]),
        (to_ants, outputnode, [("out_file", "fmap")]),
    ])
    # fmt: on
    return workflow
Пример #15
0
def init_syn_preprocessing_wf(
    *,
    debug=False,
    name="syn_preprocessing_wf",
    omp_nthreads=1,
    auto_bold_nss=False,
    t1w_inversion=False,
):
    """
    Prepare EPI references and co-registration to anatomical for SyN.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.fit.syn import init_syn_sdc_wf
            wf = init_syn_sdc_wf(omp_nthreads=8)

    Parameters
    ----------
    debug : :obj:`bool`
        Whether a fast (less accurate) configuration of the workflow should be applied.
    name : :obj:`str`
        Name for this workflow
    omp_nthreads : :obj:`int`
        Parallelize internal tasks across the number of CPUs given by this option.
    auto_bold_nss : :obj:`bool`
        Set up the reference workflow to automatically execute nonsteady states detection
        of BOLD images.
    t1w_inversion : :obj:`bool`
        Run T1w intensity inversion so that it looks more like a T2 contrast.

    Inputs
    ------
    in_epis : :obj:`list` of :obj:`str`
        Distorted EPI images that will be merged together to create the
        EPI reference file.
    t_masks : :obj:`list` of :obj:`bool`
        (optional) mask of timepoints for calculating an EPI reference.
        Not used if ``auto_bold_nss=True``.
    in_meta : :obj:`list` of :obj:`dict`
        Metadata dictionaries corresponding to the ``in_epis`` input.
    in_anat : :obj:`str`
        A preprocessed anatomical (T1w or T2w) image.
    mask_anat : :obj:`str`
        A brainmask corresponding to the anatomical (T1w or T2w) image.
    std2anat_xfm : :obj:`str`
        inverse registration transform of T1w image to MNI template.

    Outputs
    -------
    epi_ref : :obj:`tuple` (:obj:`str`, :obj:`dict`)
        A tuple, where the first element is the path of the distorted EPI
        reference map (e.g., an average of *b=0* volumes), and the second
        element is a dictionary of associated metadata.
    anat_ref : :obj:`str`
        Path to the anatomical, skull-stripped reference in EPI space.
    anat_mask : :obj:`str`
        Path to the brain mask corresponding to ``anat_ref`` in EPI space.
    sd_prior : :obj:`str`
        A template map of areas with strong susceptibility distortions (SD) to regularize
        the cost function of SyN.

    """
    from pkg_resources import resource_filename as pkgrf
    from niworkflows.interfaces.nibabel import (
        IntensityClip,
        ApplyMask,
        GenerateSamplingReference,
    )
    from niworkflows.interfaces.fixes import (
        FixHeaderApplyTransforms as ApplyTransforms,
        FixHeaderRegistration as Registration,
    )
    from niworkflows.workflows.epi.refmap import init_epi_reference_wf
    from ...interfaces.utils import Deoblique, DenoiseImage
    from ...interfaces.brainmask import BrainExtraction, BinaryDilation

    workflow = Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=[
                "in_epis",
                "t_masks",
                "in_meta",
                "in_anat",
                "mask_anat",
                "std2anat_xfm",
            ]
        ),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["epi_ref", "epi_mask", "anat_ref", "anat_mask", "sd_prior"]
        ),
        name="outputnode",
    )

    deob_epi = pe.Node(Deoblique(), name="deob_epi")

    # Mapping & preparing prior knowledge
    # Concatenate transform files:
    # 1) MNI -> anat; 2) ATLAS -> MNI
    transform_list = pe.Node(
        niu.Merge(3),
        name="transform_list",
        mem_gb=DEFAULT_MEMORY_MIN_GB,
        run_without_submitting=True,
    )
    transform_list.inputs.in3 = pkgrf(
        "sdcflows", "data/fmap_atlas_2_MNI152NLin2009cAsym_affine.mat"
    )
    prior2epi = pe.Node(
        ApplyTransforms(
            invert_transform_flags=[True, False, False],
            input_image=pkgrf("sdcflows", "data/fmap_atlas.nii.gz"),
        ),
        name="prior2epi",
        n_procs=omp_nthreads,
        mem_gb=0.3,
    )

    anat2epi = pe.Node(
        ApplyTransforms(invert_transform_flags=[True]),
        name="anat2epi",
        n_procs=omp_nthreads,
        mem_gb=0.3,
    )
    mask2epi = pe.Node(
        ApplyTransforms(invert_transform_flags=[True], interpolation="MultiLabel"),
        name="mask2epi",
        n_procs=omp_nthreads,
        mem_gb=0.3,
    )
    mask_dtype = pe.Node(
        niu.Function(function=_set_dtype, input_names=["in_file", "dtype"]),
        name="mask_dtype",
    )
    mask_dtype.inputs.dtype = "uint8"

    epi_reference_wf = init_epi_reference_wf(
        omp_nthreads=omp_nthreads,
        auto_bold_nss=auto_bold_nss,
    )
    epi_brain = pe.Node(BrainExtraction(), name="epi_brain")
    merge_output = pe.Node(
        niu.Function(function=_merge_meta),
        name="merge_output",
        run_without_submitting=True,
    )
    mask_anat = pe.Node(ApplyMask(), name="mask_anat")
    clip_anat = pe.Node(IntensityClip(p_min=0.0, p_max=99.8), name="clip_anat")
    ref_anat = pe.Node(
        DenoiseImage(copy_header=True), name="ref_anat", n_procs=omp_nthreads
    )

    epi2anat = pe.Node(
        Registration(from_file=resource_filename("sdcflows", "data/affine.json")),
        name="epi2anat",
        n_procs=omp_nthreads,
    )
    epi2anat.inputs.output_warped_image = debug
    epi2anat.inputs.output_inverse_warped_image = debug
    if debug:
        epi2anat.inputs.args = "--write-interval-volumes 5"

    def _remove_first_mask(in_file):
        if not isinstance(in_file, list):
            in_file = [in_file]

        in_file.insert(0, "NULL")
        return in_file

    anat_dilmsk = pe.Node(BinaryDilation(), name="anat_dilmsk")
    epi_dilmsk = pe.Node(BinaryDilation(), name="epi_dilmsk")

    sampling_ref = pe.Node(GenerateSamplingReference(), name="sampling_ref")

    # fmt:off
    workflow.connect([
        (inputnode, transform_list, [("std2anat_xfm", "in2")]),
        (inputnode, epi_reference_wf, [("in_epis", "inputnode.in_files")]),
        (inputnode, merge_output, [("in_meta", "meta_list")]),
        (inputnode, anat_dilmsk, [("mask_anat", "in_file")]),
        (inputnode, mask_anat, [("in_anat", "in_file"),
                                ("mask_anat", "in_mask")]),
        (inputnode, mask2epi, [("mask_anat", "input_image")]),
        (epi_reference_wf, deob_epi, [("outputnode.epi_ref_file", "in_file")]),
        (deob_epi, merge_output, [("out_file", "epi_ref")]),
        (mask_anat, clip_anat, [("out_file", "in_file")]),
        (clip_anat, ref_anat, [("out_file", "input_image")]),
        (deob_epi, epi_brain, [("out_file", "in_file")]),
        (epi_brain, epi_dilmsk, [("out_mask", "in_file")]),
        (ref_anat, epi2anat, [("output_image", "fixed_image")]),
        (anat_dilmsk, epi2anat, [("out_file", "fixed_image_masks")]),
        (deob_epi, epi2anat, [("out_file", "moving_image")]),
        (epi_dilmsk, epi2anat, [
            (("out_file", _remove_first_mask), "moving_image_masks")]),
        (deob_epi, sampling_ref, [("out_file", "fixed_image")]),
        (epi2anat, transform_list, [("forward_transforms", "in1")]),
        (transform_list, prior2epi, [("out", "transforms")]),
        (sampling_ref, prior2epi, [("out_file", "reference_image")]),
        (ref_anat, anat2epi, [("output_image", "input_image")]),
        (epi2anat, anat2epi, [("forward_transforms", "transforms")]),
        (sampling_ref, anat2epi, [("out_file", "reference_image")]),
        (epi2anat, mask2epi, [("forward_transforms", "transforms")]),
        (sampling_ref, mask2epi, [("out_file", "reference_image")]),
        (mask2epi, mask_dtype, [("output_image", "in_file")]),
        (anat2epi, outputnode, [("output_image", "anat_ref")]),
        (mask_dtype, outputnode, [("out", "anat_mask")]),
        (merge_output, outputnode, [("out", "epi_ref")]),
        (epi_brain, outputnode, [("out_mask", "epi_mask")]),
        (prior2epi, outputnode, [("output_image", "sd_prior")]),
    ])
    # fmt:on

    if debug:
        from niworkflows.interfaces.nibabel import RegridToZooms

        regrid_anat = pe.Node(
            RegridToZooms(zooms=(2.0, 2.0, 2.0), smooth=True), name="regrid_anat"
        )
        # fmt:off
        workflow.connect([
            (inputnode, regrid_anat, [("in_anat", "in_file")]),
            (regrid_anat, sampling_ref, [("out_file", "moving_image")]),
        ])
        # fmt:on
    else:
        # fmt:off
        workflow.connect([
            (inputnode, sampling_ref, [("in_anat", "moving_image")]),
        ])
        # fmt:on

    if not auto_bold_nss:
        workflow.connect(inputnode, "t_masks", epi_reference_wf, "inputnode.t_masks")

    return workflow
Пример #16
0
def init_syn_sdc_wf(omp_nthreads, epi_pe=None,
                    atlas_threshold=3, name='syn_sdc_wf'):
    """
    Build the *fieldmap-less* susceptibility-distortion estimation workflow.

    This workflow takes a skull-stripped T1w image and reference BOLD image and
    estimates a susceptibility distortion correction warp, using ANTs symmetric
    normalization (SyN) and the average fieldmap atlas described in
    [Treiber2016]_.

    SyN deformation is restricted to the phase-encoding (PE) direction.
    If no PE direction is specified, anterior-posterior PE is assumed.

    SyN deformation is also restricted to regions that are expected to have a
    >3mm (approximately 1 voxel) warp, based on the fieldmap atlas.

    This technique is a variation on those developed in [Huntenburg2014]_ and
    [Wang2017]_.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.syn import init_syn_sdc_wf
            wf = init_syn_sdc_wf(
                epi_pe='j',
                omp_nthreads=8)

    Inputs
    ------
    in_reference
        reference image
    in_reference_brain
        skull-stripped reference image
    t1w_brain
        skull-stripped, bias-corrected structural image
    std2anat_xfm
        inverse registration transform of T1w image to MNI template

    Outputs
    -------
    out_reference
        the ``in_reference`` image after unwarping
    out_reference_brain
        the ``in_reference_brain`` image after unwarping
    out_warp
        the corresponding :abbr:`DFM (displacements field map)` compatible with
        ANTs
    out_mask
        mask of the unwarped input file

    References
    ----------
    .. [Treiber2016] Treiber, J. M. et al. (2016) Characterization and Correction
        of Geometric Distortions in 814 Diffusion Weighted Images,
        PLoS ONE 11(3): e0152472. doi:`10.1371/journal.pone.0152472
        <https://doi.org/10.1371/journal.pone.0152472>`_.
    .. [Wang2017] Wang S, et al. (2017) Evaluation of Field Map and Nonlinear
        Registration Methods for Correction of Susceptibility Artifacts
        in Diffusion MRI. Front. Neuroinform. 11:17.
        doi:`10.3389/fninf.2017.00017
        <https://doi.org/10.3389/fninf.2017.00017>`_.
    .. [Huntenburg2014] Huntenburg, J. M. (2014) Evaluating Nonlinear
        Coregistration of BOLD EPI and T1w Images. Berlin: Master
        Thesis, Freie Universität. `PDF
        <http://pubman.mpdl.mpg.de/pubman/item/escidoc:2327525:5/component/escidoc:2327523/master_thesis_huntenburg_4686947.pdf>`_.

    """
    if epi_pe is None or epi_pe[0] not in ['i', 'j']:
        LOGGER.warning('Incorrect phase-encoding direction, assuming PA (posterior-to-anterior).')
        epi_pe = 'j'

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on *fMRIPrep*'s *fieldmap-less* approach.
The deformation field is that resulting from co-registering the BOLD reference
to the same-subject T1w-reference with its intensity inverted [@fieldmapless1;
@fieldmapless2].
Registration is performed with `antsRegistration` (ANTs {ants_ver}), and
the process regularized by constraining deformation to be nonzero only
along the phase-encoding direction, and modulated with an average fieldmap
template [@fieldmapless3].
""".format(ants_ver=Registration().version or '<ver>')
    inputnode = pe.Node(
        niu.IdentityInterface(['in_reference', 'in_reference_brain',
                               't1w_brain', 'std2anat_xfm']),
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(['out_reference', 'out_reference_brain',
                               'out_mask', 'out_warp']),
        name='outputnode')

    # Collect predefined data
    # Atlas image and registration affine
    atlas_img = resource_filename('sdcflows', 'data/fmap_atlas.nii.gz')
    # Registration specifications
    affine_transform = resource_filename('sdcflows', 'data/affine.json')
    syn_transform = resource_filename('sdcflows', 'data/susceptibility_syn.json')

    invert_t1w = pe.Node(Rescale(invert=True), name='invert_t1w',
                         mem_gb=0.3)

    ref_2_t1 = pe.Node(Registration(from_file=affine_transform),
                       name='ref_2_t1', n_procs=omp_nthreads)
    t1_2_ref = pe.Node(ApplyTransforms(invert_transform_flags=[True]),
                       name='t1_2_ref', n_procs=omp_nthreads)

    # 1) BOLD -> T1; 2) MNI -> T1; 3) ATLAS -> MNI
    transform_list = pe.Node(niu.Merge(3), name='transform_list',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
    transform_list.inputs.in3 = resource_filename(
        'sdcflows', 'data/fmap_atlas_2_MNI152NLin2009cAsym_affine.mat')

    # Inverting (1), then applying in reverse order:
    #
    # ATLAS -> MNI -> T1 -> BOLD
    atlas_2_ref = pe.Node(
        ApplyTransforms(invert_transform_flags=[True, False, False]),
        name='atlas_2_ref', n_procs=omp_nthreads,
        mem_gb=0.3)
    atlas_2_ref.inputs.input_image = atlas_img

    threshold_atlas = pe.Node(
        fsl.maths.MathsCommand(args='-thr {:.8g} -bin'.format(atlas_threshold),
                               output_datatype='char'),
        name='threshold_atlas', mem_gb=0.3)

    fixed_image_masks = pe.Node(niu.Merge(2), name='fixed_image_masks',
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
    fixed_image_masks.inputs.in1 = 'NULL'

    restrict = [[int(epi_pe[0] == 'i'), int(epi_pe[0] == 'j'), 0]] * 2
    syn = pe.Node(
        Registration(from_file=syn_transform, restrict_deformation=restrict),
        name='syn', n_procs=omp_nthreads)

    unwarp_ref = pe.Node(ApplyTransforms(
        dimension=3, float=True, interpolation='LanczosWindowedSinc'),
        name='unwarp_ref')

    skullstrip_bold_wf = init_skullstrip_bold_wf()

    workflow.connect([
        (inputnode, invert_t1w, [('t1w_brain', 'in_file'),
                                 ('in_reference', 'ref_file')]),
        (inputnode, ref_2_t1, [('in_reference_brain', 'moving_image')]),
        (invert_t1w, ref_2_t1, [('out_file', 'fixed_image')]),
        (inputnode, t1_2_ref, [('in_reference', 'reference_image')]),
        (invert_t1w, t1_2_ref, [('out_file', 'input_image')]),
        (ref_2_t1, t1_2_ref, [('forward_transforms', 'transforms')]),
        (ref_2_t1, transform_list, [('forward_transforms', 'in1')]),
        (inputnode, transform_list, [
            ('std2anat_xfm', 'in2')]),
        (inputnode, atlas_2_ref, [('in_reference', 'reference_image')]),
        (transform_list, atlas_2_ref, [('out', 'transforms')]),
        (atlas_2_ref, threshold_atlas, [('output_image', 'in_file')]),
        (threshold_atlas, fixed_image_masks, [('out_file', 'in2')]),
        (inputnode, syn, [('in_reference_brain', 'moving_image')]),
        (t1_2_ref, syn, [('output_image', 'fixed_image')]),
        (fixed_image_masks, syn, [('out', 'fixed_image_masks')]),
        (syn, outputnode, [('forward_transforms', 'out_warp')]),
        (syn, unwarp_ref, [('forward_transforms', 'transforms')]),
        (inputnode, unwarp_ref, [('in_reference', 'reference_image'),
                                 ('in_reference', 'input_image')]),
        (unwarp_ref, skullstrip_bold_wf, [
            ('output_image', 'inputnode.in_file')]),
        (unwarp_ref, outputnode, [('output_image', 'out_reference')]),
        (skullstrip_bold_wf, outputnode, [
            ('outputnode.skull_stripped_file', 'out_reference_brain'),
            ('outputnode.mask_file', 'out_mask')]),
    ])

    return workflow
Пример #17
0
def init_infant_brain_extraction_wf(
    ants_affine_init=False,
    bspline_fitting_distance=200,
    debug=False,
    in_template="MNIInfant",
    template_specs=None,
    interim_checkpoints=True,
    mem_gb=3.0,
    mri_scheme="T2w",
    name="infant_brain_extraction_wf",
    atropos_model=None,
    omp_nthreads=None,
    output_dir=None,
    use_float=True,
):
    """
    Build an atlas-based brain extraction pipeline for infant T2w MRI data.

    Parameters
    ----------
    ants_affine_init : :obj:`bool`, optional
        Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images.

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=["in_files", "in_mask"]),
                        name="inputnode")
    outputnode = pe.Node(niu.IdentityInterface(
        fields=["out_corrected", "out_brain", "out_mask"]),
                         name="outputnode")

    template_specs = template_specs or {}
    # Find a suitable target template in TemplateFlow
    tpl_target_path = get_template(in_template,
                                   suffix=mri_scheme,
                                   **template_specs)
    if not tpl_target_path:
        raise RuntimeError(
            f"An instance of template <tpl-{in_template}> with MR scheme '{mri_scheme}'"
            " could not be found.")

    # tpl_brainmask_path = get_template(
    #     in_template, desc="brain", suffix="probseg", **template_specs
    # )
    # if not tpl_brainmask_path:

    # ignore probseg for the time being
    tpl_brainmask_path = get_template(in_template,
                                      desc="brain",
                                      suffix="mask",
                                      **template_specs)

    tpl_regmask_path = get_template(in_template,
                                    desc="BrainCerebellumExtraction",
                                    suffix="mask",
                                    **template_specs)

    # validate images
    val_tmpl = pe.Node(ValidateImage(), name='val_tmpl')
    val_tmpl.inputs.in_file = _pop(tpl_target_path)

    val_target = pe.Node(ValidateImage(), name='val_target')

    # Resample both target and template to a controlled, isotropic resolution
    res_tmpl = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS),
                       name="res_tmpl")  # testing
    res_target = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS),
                         name="res_target")  # testing
    gauss_tmpl = pe.Node(niu.Function(function=_gauss_filter),
                         name="gauss_tmpl")

    # Spatial normalization step
    lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"),
                       name="lap_tmpl")
    lap_target = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"),
                         name="lap_target")

    # Merge image nodes
    mrg_target = pe.Node(niu.Merge(2), name="mrg_target")
    mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")

    norm_lap_tmpl = pe.Node(niu.Function(function=_trunc),
                            name="norm_lap_tmpl")
    norm_lap_tmpl.inputs.dtype = "float32"
    norm_lap_tmpl.inputs.out_max = 1.0
    norm_lap_tmpl.inputs.percentile = (0.01, 99.99)
    norm_lap_tmpl.inputs.clip_max = None

    norm_lap_target = pe.Node(niu.Function(function=_trunc),
                              name="norm_lap_target")
    norm_lap_target.inputs.dtype = "float32"
    norm_lap_target.inputs.out_max = 1.0
    norm_lap_target.inputs.percentile = (0.01, 99.99)
    norm_lap_target.inputs.clip_max = None

    # Set up initial spatial normalization
    ants_params = "testing" if debug else "precise"
    norm = pe.Node(
        Registration(from_file=pkgr_fn(
            "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")),
        name="norm",
        n_procs=omp_nthreads,
        mem_gb=mem_gb,
    )
    norm.inputs.float = use_float

    # main workflow
    wf = pe.Workflow(name)
    # Create a buffer interface as a cache for the actual inputs to registration
    buffernode = pe.Node(
        niu.IdentityInterface(fields=["hires_target", "smooth_target"]),
        name="buffernode")

    # truncate target intensity for N4 correction
    clip_target = pe.Node(
        niu.Function(function=_trunc),
        name="clip_target",
    )
    clip_tmpl = pe.Node(
        niu.Function(function=_trunc),
        name="clip_tmpl",
    )
    #clip_tmpl.inputs.in_file = _pop(tpl_target_path)

    # INU correction of the target image
    init_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=False,
            copy_header=True,
            n_iterations=[50] * (4 - debug),
            convergence_threshold=1e-7,
            shrink_factor=4,
            bspline_fitting_distance=bspline_fitting_distance,
        ),
        n_procs=omp_nthreads,
        name="init_n4",
    )
    clip_inu = pe.Node(
        niu.Function(function=_trunc),
        name="clip_inu",
    )
    gauss_target = pe.Node(niu.Function(function=_gauss_filter),
                           name="gauss_target")
    wf.connect([
        # truncation, resampling, and initial N4
        (inputnode, val_target, [(("in_files", _pop), "in_file")]),
        # (inputnode, res_target, [(("in_files", _pop), "in_file")]),
        (val_target, res_target, [("out_file", "in_file")]),
        (res_target, clip_target, [("out_file", "in_file")]),
        (val_tmpl, clip_tmpl, [("out_file", "in_file")]),
        (clip_tmpl, res_tmpl, [("out", "in_file")]),
        (clip_target, init_n4, [("out", "input_image")]),
        (init_n4, clip_inu, [("output_image", "in_file")]),
        (clip_inu, gauss_target, [("out", "in_file")]),
        (clip_inu, buffernode, [("out", "hires_target")]),
        (gauss_target, buffernode, [("out", "smooth_target")]),
        (res_tmpl, gauss_tmpl, [("out_file", "in_file")]),
        # (clip_tmpl, gauss_tmpl, [("out", "in_file")]),
    ])

    # Graft a template registration-mask if present
    if tpl_regmask_path:
        hires_mask = pe.Node(ApplyTransforms(
            input_image=_pop(tpl_regmask_path),
            transforms="identity",
            interpolation="NearestNeighbor",
            float=True),
                             name="hires_mask",
                             mem_gb=1)
        wf.connect([
            (res_tmpl, hires_mask, [("out_file", "reference_image")]),
        ])

    map_brainmask = pe.Node(ApplyTransforms(interpolation="Gaussian",
                                            float=True),
                            name="map_brainmask",
                            mem_gb=1)
    map_brainmask.inputs.input_image = str(tpl_brainmask_path)

    thr_brainmask = pe.Node(Binarize(thresh_low=0.80), name="thr_brainmask")
    bspline_grid = pe.Node(niu.Function(function=_bspline_distance),
                           name="bspline_grid")

    # Refine INU correction
    final_n4 = pe.Node(
        N4BiasFieldCorrection(
            dimension=3,
            save_bias=True,
            copy_header=True,
            n_iterations=[50] * 5,
            convergence_threshold=1e-7,
            rescale_intensities=True,
            shrink_factor=4,
        ),
        n_procs=omp_nthreads,
        name="final_n4",
    )
    final_mask = pe.Node(ApplyMask(), name="final_mask")

    if atropos_model is None:
        atropos_model = tuple(ATROPOS_MODELS[mri_scheme].values())

    atropos_wf = init_atropos_wf(
        use_random_seed=False,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        in_segmentation_model=atropos_model,
    )
    # if tpl_regmask_path:
    #     atropos_wf.get_node('inputnode').inputs.in_mask_dilated = tpl_regmask_path

    sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1),
                     name='sel_wm',
                     run_without_submitting=True)

    wf.connect([
        (inputnode, map_brainmask, [(("in_files", _pop), "reference_image")]),
        (inputnode, final_n4, [(("in_files", _pop), "input_image")]),
        (inputnode, bspline_grid, [(("in_files", _pop), "in_file")]),
        # (bspline_grid, final_n4, [("out", "bspline_fitting_distance")]),
        (bspline_grid, final_n4, [("out", "args")]),
        # merge laplacian and original images
        (buffernode, lap_target, [("smooth_target", "op1")]),
        (buffernode, mrg_target, [("hires_target", "in1")]),
        (lap_target, norm_lap_target, [("output_image", "in_file")]),
        (norm_lap_target, mrg_target, [("out", "in2")]),
        # Template massaging
        (res_tmpl, lap_tmpl, [("out_file", "op1")]),
        (res_tmpl, mrg_tmpl, [("out_file", "in1")]),
        (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]),
        (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]),
        # spatial normalization
        (mrg_target, norm, [("out", "moving_image")]),
        (mrg_tmpl, norm, [("out", "fixed_image")]),
        (norm, map_brainmask, [("reverse_transforms", "transforms"),
                               ("reverse_invert_flags",
                                "invert_transform_flags")]),
        (map_brainmask, thr_brainmask, [("output_image", "in_file")]),
        # take a second pass of N4
        (map_brainmask, final_n4, [("output_image", "weight_image")]),
        (final_n4, final_mask, [("output_image", "in_file")]),
        (thr_brainmask, final_mask, [("out_mask", "in_mask")]),
        (final_n4, outputnode, [("output_image", "out_corrected")]),
        (thr_brainmask, outputnode, [("out_mask", "out_mask")]),
        (final_mask, outputnode, [("out_file", "out_brain")]),
    ])

    # wf.disconnect([
    #     (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
    #     (copy_xform, outputnode, [('out_mask', 'out_mask')]),
    # ])

    # wf.connect([
    #     (init_n4, atropos_wf, [
    #         ('output_image', 'inputnode.in_files')]),  # intensity image
    #     (thr_brainmask, atropos_wf, [
    #         ('out_mask', 'inputnode.in_mask')]),
    #     (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]),
    #     (sel_wm, final_n4, [('out', 'weight_image')]),
    # ])
    # wf.connect([
    # (atropos_wf, outputnode, [
    #     ('outputnode.out_mask', 'out_mask'),
    #     ('outputnode.out_segm', 'out_segm'),
    #     ('outputnode.out_tpms', 'out_tpms')]),
    # ])

    if tpl_regmask_path:
        wf.connect([
            (hires_mask, norm, [("output_image", "fixed_image_masks")]),
            # (hires_mask, atropos_wf, [
            #     ("output_image", "inputnode.in_mask_dilated")]),
        ])

    if interim_checkpoints:
        final_apply = pe.Node(ApplyTransforms(interpolation="BSpline",
                                              float=True),
                              name="final_apply",
                              mem_gb=1)
        final_report = pe.Node(SimpleBeforeAfter(
            before_label=f"tpl-{in_template}",
            after_label="target",
            out_report="final_report.svg"),
                               name="final_report")
        wf.connect([
            (inputnode, final_apply, [(("in_files", _pop), "reference_image")
                                      ]),
            (res_tmpl, final_apply, [("out_file", "input_image")]),
            (norm, final_apply, [("reverse_transforms", "transforms"),
                                 ("reverse_invert_flags",
                                  "invert_transform_flags")]),
            (final_apply, final_report, [("output_image", "before")]),
            (outputnode, final_report, [("out_corrected", "after"),
                                        ("out_mask", "wm_seg")]),
        ])

    if output_dir:
        from nipype.interfaces.io import DataSink
        ds_final_inu = pe.Node(DataSink(base_directory=str(output_dir.parent)),
                               name="ds_final_inu")
        ds_final_msk = pe.Node(DataSink(base_directory=str(output_dir.parent)),
                               name="ds_final_msk")
        ds_report = pe.Node(DataSink(base_directory=str(output_dir.parent)),
                            name="ds_report")

        wf.connect([
            (outputnode, ds_final_inu,
             [("out_corrected", f"{output_dir.name}.@inu_corrected")]),
            (outputnode, ds_final_msk, [("out_mask",
                                         f"{output_dir.name}.@brainmask")]),
            (final_report, ds_report, [("out_report",
                                        f"{output_dir.name}.@report")]),
        ])

    if not ants_affine_init:
        return wf

    # Initialize transforms with antsAI
    lowres_tmpl = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS),
                          name="lowres_tmpl")
    lowres_target = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS),
                            name="lowres_target")

    init_aff = pe.Node(
        AI(
            metric=("Mattes", 32, "Regular", 0.25),
            transform=("Affine", 0.1),
            search_factor=(15, 0.1),
            principal_axes=False,
            convergence=(10, 1e-6, 10),
            search_grid=(40, (0, 40, 40)),
            verbose=True,
        ),
        name="init_aff",
        n_procs=omp_nthreads,
    )
    wf.connect([
        (gauss_tmpl, lowres_tmpl, [("out", "in_file")]),
        (lowres_tmpl, init_aff, [("out_file", "fixed_image")]),
        (gauss_target, lowres_target, [("out", "in_file")]),
        (lowres_target, init_aff, [("out_file", "moving_image")]),
        (init_aff, norm, [("output_transform", "initial_moving_transform")]),
    ])

    if tpl_regmask_path:
        lowres_mask = pe.Node(ApplyTransforms(
            input_image=_pop(tpl_regmask_path),
            transforms="identity",
            interpolation="MultiLabel",
            float=True),
                              name="lowres_mask",
                              mem_gb=1)
        wf.connect([
            (lowres_tmpl, lowres_mask, [("out_file", "reference_image")]),
            (lowres_mask, init_aff, [("output_image", "fixed_image_mask")]),
        ])

    if interim_checkpoints:
        init_apply = pe.Node(ApplyTransforms(interpolation="BSpline",
                                             float=True),
                             name="init_apply",
                             mem_gb=1)
        init_report = pe.Node(SimpleBeforeAfter(
            before_label=f"tpl-{in_template}",
            after_label="target",
            out_report="init_report.svg"),
                              name="init_report")
        wf.connect([
            (lowres_target, init_apply, [("out_file", "input_image")]),
            (res_tmpl, init_apply, [("out_file", "reference_image")]),
            (init_aff, init_apply, [("output_transform", "transforms")]),
            (init_apply, init_report, [("output_image", "after")]),
            (res_tmpl, init_report, [("out_file", "before")]),
        ])

        if output_dir:
            ds_init_report = pe.Node(
                DataSink(base_directory=str(output_dir.parent)),
                name="ds_init_report")
            wf.connect(init_report, "out_report", ds_init_report,
                       f"{output_dir.name}.@init_report")
    return wf