Exemplo n.º 1
0
def spm_mrpet_preprocessing(wf_name="spm_mrpet_preproc"):
    """ Run the PET pre-processing workflow against the
    gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this
    has not been run, this function will run it too.

    # TODO: organize the anat2pet hack/condition somehow:
    If anat2pet:
    - SPM12 Coregister T1 and tissues to PET
    - PETPVC the PET image in PET space
    - SPM12 Warp PET to MNI
    else:
    - SPM12 Coregister PET to T1
    - PETPVC the PET image in anatomical space
    - SPM12 Warp PET in anatomical space to MNI through the
    `anat_to_mni_warp`.

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the
        anatomical image in its native space.

    pet_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process.
        At least the first 3 tissues must be present.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pet_output.pvc_warped: existing file
        Results from PETPVC normalized to MNI.
        The result of every internal pre-processing step
        is normalized to MNI here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files

    pet_output.gm_norm: existing file
        The output of the grey matter intensity
        normalization process.
        This is the last step in the PET signal correction,
        before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and
        `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["in_file", "anat", "anat_to_mni_warp", "tissues"]

    out_fields = [
        "brain_mask",
        "coreg_others",
        "coreg_ref",
        "pvc_warped",
        "pet_warped",  # 'pet_warped' is a dummy entry to keep the fields pattern.
        "warp_field",
        "pvc_out",
        "pvc_mask",
        "gm_norm"
    ]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_pet"]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    merge_list = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    warp_pet = setup_node(spm_normalize(), name="warp_pet")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (petpvc, warp_pet, [("pvc_output.coreg_ref", "image_to_align")]),
            (gunzipper, warp_pet, [("out_file", "apply_to_files")]),
            (tpm_bbox, warp_pet, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (warp_pet, pet_output, [("normalized_files", "pvc_warped"),
                                    ("deformation_field", "warp_field")]),
        ])
    else:  # PET 2 ANAT
        collector = setup_node(Merge(2), name='merge_for_warp')
        apply_warp = setup_node(spm_apply_deformations(), name="warp_pet")

        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (gunzipper, collector, [("out_file", "in1")]),
            (petpvc, collector, [("pvc_output.coreg_ref", "in2")]),
            (pet_input, apply_warp, [("anat_to_mni_warp", "deformation_file")
                                     ]),
            (collector, apply_warp, [("out", "apply_to_files")]),
            (tpm_bbox, apply_warp, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.petpvc_mask", "petpvc_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (apply_warp, pet_output, [("normalized_files", "pvc_warped"),
                                      ("deformation_field", "warp_field")]),
        ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (pet_input, coreg_atlas, [("anat", "source")]),
            (petpvc, coreg_atlas, [("pvc_output.coreg_ref", "target")]),
            (pet_input, coreg_atlas, [("atlas_anat", "apply_to_files")]),
            (coreg_atlas, pet_output, [("coregistered_files", "atlas_pet")]),
        ])

    return wf
Exemplo n.º 2
0
def attach_spm_mrpet_preprocessing(main_wf,
                                   wf_name="spm_mrpet_preproc",
                                   do_group_template=False):
    """ Attach a PET pre-processing workflow that uses SPM12 to `main_wf`.
    This workflow needs MRI based workflow.

    This function is using the workflows defined in the function above:
    spm_mrpet_preprocessing or spm_mrpet_grouptemplate_preprocessing. Depending
    if group template is enabled.

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a
    `datasink` nodes.

    input_files.select.pet: input node

    datasink: nipype Node

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    do_group_template: bool
        If True will attach the group template creation and pre-processing pipeline.

    Nipype Workflow Dependencies
    ----------------------------
    This workflow depends on:
    - spm_anat_preproc

    Returns
    -------
    main_wf: nipype Workflow
    """
    # Dependency workflows
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)

    anat_output = get_interface_node(main_wf, "anat_output")

    # The base name of the 'pet' file for the substitutions
    anat_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'anat')))
    pet_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'pet')))

    # get the PET preprocessing pipeline
    if do_group_template:
        pet_wf = spm_mrpet_grouptemplate_preprocessing(wf_name=wf_name)
        template_name = 'grptemplate'
        output_subfolder = 'group_template'
    else:
        pet_wf = spm_mrpet_preprocessing(wf_name=wf_name)
        template_name = 'stdtemplate'
        output_subfolder = 'std_template'

    # dataSink output substitutions
    regexp_subst = [
        (r"/{pet}_.*_pvc.nii.gz$", "/{pet}_pvc.nii.gz"),
        (r"/{pet}_.*_pvc_maths.nii.gz$", "/{pet}_pvc_norm.nii.gz"),
        (r"/{pet}_.*_pvc_intnormed.nii.gz$", "/{pet}_pvc_norm.nii.gz"),
        (r"/tissues_brain_mask.nii$", "/brain_mask_anat.nii"),
        (r"/w{pet}.nii", "/{pet}_{template}.nii"),
        (r"/w{pet}_.*_pvc.nii$", "/{pet}_pvc_{template}.nii"),
        (r"/w{pet}_.*_pvc_maths.nii$", "/{pet}_pvc_norm_{template}.nii"),
        (r"/w{pet}_.*_pvc_intnormed.nii$", "/{pet}_pvc_norm_{template}.nii"),
        (r"/wbrain_mask.nii", "/brain_mask_{template}.nii"),
        (r"/r{pet}.nii", "/{pet}_anat.nii"),
        (r"/r{pet}_.*_pvc.nii$", "/{pet}_pvc_anat.nii"),
        (r"/r{pet}_.*_pvc_maths.nii$", "/{pet}_pvc_norm_anat.nii"),
        (r"/r{pet}_.*_pvc_intnormed.nii$", "/{pet}_pvc_norm_anat.nii"),
        (r"/y_rm{anat}_corrected.nii", "/{anat}_{pet}_warpfield.nii"),
        (r"/rm{anat}_corrected.nii$", "/{anat}_{pet}.nii"),
        (r"/rc1{anat}_corrected.nii$", "/gm_{pet}.nii"),
        (r"/rc2{anat}_corrected.nii$", "/wm_{pet}.nii"),
        (r"/rc3{anat}_corrected.nii$", "/csf_{pet}.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst,
                                    pet=pet_fbasename,
                                    anat=anat_fbasename,
                                    template=template_name)

    # prepare substitution for atlas_file, if any
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        atlas_basename = remove_ext(os.path.basename(atlas_file))
        regexp_subst.extend([(r"/[\w]*{atlas}\.nii$", "/{atlas}_{pet}.nii")])
        regexp_subst = format_pair_list(regexp_subst,
                                        pet=pet_fbasename,
                                        atlas=atlas_basename)

    regexp_subst += extension_duplicates(regexp_subst)
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/mrpet')

    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    # Connect the nodes
    main_wf.connect([
        # pet file input
        (in_files, pet_wf, [("pet", "pet_input.in_file")]),

        # pet to anat registration
        (anat_output, pet_wf, [("anat_biascorr", "pet_input.anat"),
                               ("tissues_native", "pet_input.tissues")]),
        (
            pet_wf,
            datasink,
            [
                ("pet_output.gm_norm", "mrpet.@norm"),
                ("pet_output.coreg_others",
                 "mrpet.tissues"),  # careful changing this, look regexp_subst
                ("pet_output.coreg_ref", "mrpet.@anat"),
                ("pet_output.pvc_mask", "mrpet.@pvc_mask"),
                ("pet_output.pvc_out", "mrpet.@pvc"),
                ("pet_output.brain_mask", "mrpet.@brain_mask"),
                ("pet_output.pvc_warped",
                 "mrpet.{}.@pvc".format(output_subfolder)),
                ("pet_output.warp_field",
                 "mrpet.{}.@warp_field".format(output_subfolder)),
                ("pet_output.pet_warped",
                 "mrpet.{}.@pet_warped".format(output_subfolder)),
            ])
    ])

    if not do_group_template:
        # Connect the nodes
        main_wf.connect([
            # pet to anat registration
            (anat_output, pet_wf, [("warp_forward",
                                    "pet_input.anat_to_mni_warp")]),
        ])

    if do_atlas:
        main_wf.connect([
            (anat_output, pet_wf, [("atlas_anat", "pet_input.atlas_anat")]),
            (pet_wf, datasink, [("pet_output.atlas_pet", "mrpet.@atlas")]),
        ])

    return main_wf
Exemplo n.º 3
0
def attach_spm_anat_preprocessing(main_wf, wf_name="spm_anat_preproc"):
    """ Attach the SPM12 anatomical MRI pre-processing workflow to
    the `main_wf`.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an
    `input_files` and a `datasink` nodes.

    input_files.anat: input node

    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)

    # The workflow box
    anat_wf = spm_anat_preprocessing(wf_name=wf_name)

    # The base name of the 'anat' file for the substitutions
    anat_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'anat')))

    # dataSink output substitutions
    regexp_subst = [
        (r"/{anat}_.*corrected_seg8.mat$", "/{anat}_to_mni_affine.mat"),
        (r"/m{anat}.*_corrected.nii$", "/{anat}_biascorrected.nii"),
        (r"/wm{anat}.*_corrected.nii$", "/{anat}_mni.nii"),
        (r"/y_{anat}.*nii$", "/{anat}_to_mni_field.nii"),
        (r"/iy_{anat}.*nii$", "/{anat}_to_mni_inv_field.nii"),
        (r"/mwc1{anat}.*nii$", "/{anat}_gm_mod_mni.nii"),
        (r"/mwc2{anat}.*nii$", "/{anat}_wm_mod_mni.nii"),
        (r"/mwc3{anat}.*nii$", "/{anat}_csf_mod_mni.nii"),
        (r"/mwc4{anat}.*nii$", "/{anat}_nobrain_mod_mni.nii"),
        (r"/c1{anat}.*nii$", "/{anat}_gm.nii"),
        (r"/c2{anat}.*nii$", "/{anat}_wm.nii"),
        (r"/c3{anat}.*nii$", "/{anat}_csf.nii"),
        (r"/c4{anat}.*nii$", "/{anat}_nobrain.nii"),
        (r"/c5{anat}.*nii$", "/{anat}_nobrain_mask.nii"),
        (r"/direct_cortical_thickness.nii$",
         "/{anat}_gm_cortical_thickness.nii"),
        (r"/direct_warped_white_matter.nii$",
         "/{anat}_warped_white_matter.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst, anat=anat_fbasename)

    # prepare substitution for atlas_file, if any
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        atlas_basename = remove_ext(os.path.basename(atlas_file))
        regexp_subst.extend([
            (r"/w{atlas}\.nii$", "/{atlas}_anat_space.nii"),
        ])
        regexp_subst = format_pair_list(regexp_subst,
                                        anat=anat_fbasename,
                                        atlas=atlas_basename)

    # add nii.gz patterns
    regexp_subst += extension_duplicates(regexp_subst)

    # add parent folder to paths
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/anat')

    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    main_wf.connect([
        (in_files, anat_wf, [("anat", "anat_input.in_file")]),
        (anat_wf, datasink, [
            ("anat_output.anat_mni", "anat.@mni"),
            ("anat_output.tissues_warped", "anat.tissues.warped"),
            ("anat_output.tissues_native", "anat.tissues.native"),
            ("anat_output.affine_transform", "anat.transform.@linear"),
            ("anat_output.warp_forward", "anat.transform.@forward"),
            ("anat_output.warp_inverse", "anat.transform.@inverse"),
            ("anat_output.anat_biascorr", "anat.@biascor"),
            ("anat_output.brain_mask", "anat.@brain_mask"),
        ]),
    ])

    # check optional outputs
    if do_atlas:
        main_wf.connect([
            (anat_wf, datasink, [("anat_output.atlas_anat", "anat.@atlas")]),
        ])

    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        main_wf.connect([
            (anat_wf, datasink, [
                ("anat_output.cortical_thickness", "anat.@cortical_thickness"),
                ("anat_output.warped_white_matter",
                 "anat.@warped_white_matter"),
            ]),
        ])

    return main_wf
Exemplo n.º 4
0
def spm_mrpet_grouptemplate_preprocessing(
        wf_name="spm_mrpet_grouptemplate_preproc"):
    """ Run the PET pre-processing workflow against the gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this has not been run, this function
    will run it too.

    This is identical to the workflow defined in `spm_mrpet_preprocessing`,
    with the only difference that we now normalize all subjects agains a custom
    template using the spm Old Normalize interface.

    It does:
    - SPM12 Coregister T1 and tissues to PET
    - PVC the PET image in PET space
    - SPM12 Warp PET to the given template

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the anatomical image in its native space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process. At least the first
        3 tissues must be present.

    pet_input.pet_template: traits.File
        The template file for inter-subject registration reference.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process.

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files.

    pet_output.pet_warped: existing file
        PET image normalized to the group template.

    pet_output.pvc_warped: existing file
        The outputs of the PETPVC workflow normalized to the group template.
        The result of every internal pre-processing step is normalized to the
        group template here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files.

    pet_output.gm_norm: existing file
        The output of the grey matter intensity normalization process.
        This is the last step in the PET signal correction, before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["in_file", "anat", "tissues", "pet_template"]

    out_fields = [
        "brain_mask", "coreg_others", "coreg_ref", "pvc_warped", "pet_warped",
        "warp_field", "pvc_out", "pvc_mask", "gm_norm"
    ]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_pet"]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    unzip_mrg = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # warp each subject to the group template
    gunzip_template = setup_node(Gunzip(), name="gunzip_template")
    gunzip_pet = setup_node(Gunzip(), name="gunzip_pet")

    warp_mrg = setup_node(Merge(2), name='merge_for_warp')
    warp2template = setup_node(spm.Normalize(jobtype="estwrite",
                                             out_prefix="wgrptemplate_"),
                               name="warp2template")

    get_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="get_bbox")

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # inputs
        (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                             ("anat", "pvc_input.reference_file"),
                             ("tissues", "pvc_input.tissues")]),

        # get template bounding box to apply to results
        (pet_input, get_bbox, [("pet_template", "in_file")]),

        # gunzip some inputs
        (pet_input, gunzip_pet, [("in_file", "in_file")]),
        (pet_input, gunzip_template, [("pet_template", "in_file")]),

        # gunzip some files for SPM Normalize
        (petpvc, unzip_mrg, [("pvc_output.pvc_out", "in1"),
                             ("pvc_output.brain_mask", "in2"),
                             ("pvc_output.gm_norm", "in3")]),
        (pet_input, unzip_mrg, [("in_file", "in4")]),
        (unzip_mrg, gunzipper, [("out", "in_file")]),
        (gunzipper, warp_mrg, [("out_file", "in1")]),
        (warp_mrg, warp2template, [(("out", flatten_list), "apply_to_files")]),

        # prepare the target parameters of the warp to template
        (gunzip_pet, warp2template, [("out_file", "source")]),
        (gunzip_template, warp2template, [("out_file", "template")]),
        (get_bbox, warp2template, [("bbox", "write_bounding_box")]),

        # output
        (warp2template, pet_output, [
            ("normalization_parameters", "warp_field"),
            ("normalized_files", "pvc_warped"),
            ("normalized_source", "pet_warped"),
        ]),

        # output
        (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                              ("pvc_output.brain_mask", "brain_mask"),
                              ("pvc_output.coreg_ref", "coreg_ref"),
                              ("pvc_output.coreg_others", "coreg_others"),
                              ("pvc_output.gm_norm", "gm_norm")]),
    ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (pet_input, coreg_atlas, [("anat", "source")]),
            (petpvc, coreg_atlas, [("pvc_output.coreg_ref", "target")]),
            (pet_input, coreg_atlas, [("atlas_anat", "apply_to_files")]),
            (coreg_atlas, pet_output, [("coregistered_files", "atlas_pet")]),
            # warp the atlas to the template space as well
            (coreg_atlas, warp_mrg, [("coregistered_files", "in2")])
        ])

    return wf
Exemplo n.º 5
0
def spm_anat_preprocessing(wf_name="spm_anat_preproc"):
    """ Run the T1 pre-processing workflow against the anat_hc
    files in `data_dir`.

    It does:
    - N4BiasFieldCorrection
    - SPM12 New Segment
    - SPM12 Warp of MPRAGE to MNI

    [Optional: from config]
    - Atlas file warping to MPRAGE
    - Cortical thickness (SPM+DiReCT)

    Nipype Inputs
    -------------
    anat_input.in_file: traits.File
        Path to the anatomical image.

    anat_input.atlas_file: traits.File
        Path to an atlas file in MNI space to be
        warped to the anatomical space.
        Can also be set through the configuration
        setting `atlas_file`.

    Nipype Outputs
    --------------
    anat_output.anat_mni: traits.File
        The bias-field normalized to MNI anatomical image.

    anat_output.tissues_warped: traits.File
        The tissue segmentation in MNI space from SPM.

    anat_output.tissues_native: traits.File
        The tissue segmentation in native space from SPM.

    anat_output.affine_transform: traits.File
        The affine transformation file.

    anat_output.warp_forward: traits.File
        The forward (anat to MNI) warp field from SPM.

    anat_output.warp_inverse: traits.File
        The inverse (MNI to anat) warp field from SPM.

    anat_output.anat_biascorr: traits.File
        The bias-field corrected anatomical image.

    anat_output.brain_mask: traits.File
        A brain mask file in anatomical space.
        This is calculated by summing up the maps of
        segmented tissues (CSF, WM, GM) and then binarised.

    anat_output.atlas_anat: traits.File
        If `atlas_file` is an existing file in MNI space.
        The atlas file warped to anatomical space,
        if do_atlas and the atlas file is set in configuration.

    anat_output.cortical_thickness: traits.File
        If `anat_preproc.do_cortical_thickness` is True.
        The cortical thickness estimations calculated with the
        SPM+DiReCT method (KellyKapowski).

    anat_output.warped_white_matter: warped_white_matter
        If `anat_preproc.do_cortical_thickness` is True.
        The warped white matter image calculated with the
        SPM+DiReCT method (KellyKapowski).

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = ["in_file"]
    out_fields = [
        "anat_mni",
        "tissues_warped",
        "tissues_native",
        "affine_transform",
        "warp_forward",
        "warp_inverse",
        "anat_biascorr",
        "brain_mask",
    ]

    # check if we have to warp an atlas files too.
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_file"]
        out_fields += ["atlas_anat"]

    # check if we have to do cortical thickness (SPM+DiReCT) method.
    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        out_fields += [
            "cortical_thickness",
            "warped_white_matter",
        ]

    # input node
    anat_input = pe.Node(IdentityInterface(fields=in_fields,
                                           mandatory_inputs=True),
                         name="anat_input")

    # atlas registration
    if do_atlas and not isdefined(anat_input.inputs.atlas_file):
        anat_input.inputs.set(atlas_file=atlas_file)

    # T1 preprocessing nodes
    biascor = setup_node(biasfield_correct(), name="bias_correction")
    gunzip_anat = setup_node(Gunzip(), name="gunzip_anat")
    segment = setup_node(spm_segment(), name="new_segment")
    warp_anat = setup_node(spm_apply_deformations(), name="warp_anat")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # calculate brain mask from tissue maps
    tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"],
                                           mandatory_inputs=True),
                         name="tissues")

    brain_mask = setup_node(Function(
        function=math_img,
        input_names=["formula", "out_file", "gm", "wm", "csf"],
        output_names=["out_file"],
        imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='brain_mask')
    brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
    brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"

    # output node
    anat_output = pe.Node(IdentityInterface(fields=out_fields),
                          name="anat_output")

    # Connect the nodes
    wf.connect([
        # input to biasfieldcorrection
        (anat_input, biascor, [("in_file", "input_image")]),

        # new segment
        (biascor, gunzip_anat, [("output_image", "in_file")]),
        (gunzip_anat, segment, [("out_file", "channel_files")]),

        # Normalize12
        (segment, warp_anat, [("forward_deformation_field", "deformation_file")
                              ]),
        (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]),
        (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]),

        # brain mask from tissues
        (segment, tissues, [
            (("native_class_images", selectindex, 0), "gm"),
            (("native_class_images", selectindex, 1), "wm"),
            (("native_class_images", selectindex, 2), "csf"),
        ]),
        (tissues, brain_mask, [
            ("gm", "gm"),
            ("wm", "wm"),
            ("csf", "csf"),
        ]),

        # output
        (warp_anat, anat_output, [("normalized_files", "anat_mni")]),
        (segment, anat_output, [("modulated_class_images", "tissues_warped"),
                                ("native_class_images", "tissues_native"),
                                ("transformation_mat", "affine_transform"),
                                ("forward_deformation_field", "warp_forward"),
                                ("inverse_deformation_field", "warp_inverse"),
                                ("bias_corrected_images", "anat_biascorr")]),
        (brain_mask, anat_output, [("out_file", "brain_mask")]),
    ])

    # atlas warping nodes
    if do_atlas:
        gunzip_atlas = pe.Node(Gunzip(), name="gunzip_atlas")
        warp_atlas = setup_node(spm_apply_deformations(), name="warp_atlas")
        anat_bbox = setup_node(Function(function=get_bounding_box,
                                        input_names=["in_file"],
                                        output_names=["bbox"]),
                               name="anat_bbox")

        # set the warping interpolation to nearest neighbour.
        warp_atlas.inputs.write_interp = 0

        # connect the atlas registration nodes
        wf.connect([
            (anat_input, gunzip_atlas, [("atlas_file", "in_file")]),
            (gunzip_anat, anat_bbox, [("out_file", "in_file")]),
            (gunzip_atlas, warp_atlas, [("out_file", "apply_to_files")]),
            (segment, warp_atlas, [("inverse_deformation_field",
                                    "deformation_file")]),
            (anat_bbox, warp_atlas, [("bbox", "write_bounding_box")]),
            (warp_atlas, anat_output, [("normalized_files", "atlas_anat")]),
        ])

    # cortical thickness (SPM+DiReCT) method
    if do_cortical_thickness:
        from ..interfaces.ants import KellyKapowski

        segm_img = setup_node(Function(
            function=math_img,
            input_names=["formula", "out_file", "gm", "wm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                              name='gm-wm_image')
        segm_img.inputs.out_file = "gm_wm.nii.gz"
        segm_img.inputs.formula = '((gm >= 0.5)*2 + (wm > 0.5)*3).astype(np.uint8)'

        # copy the header from the GM tissue image to the result from `gm-wm_image`.
        # this is necessary because the `gm-wm_image` operation sometimes modifies the
        # offset of the image, which will provoke an ANTs exception due to
        # ITK tolerance in ImageToImageFilter
        # https://github.com/stnava/ANTs/issues/74
        cp_hdr = setup_node(Function(
            function=copy_header,
            input_names=["in_file", "data_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='copy_header')

        kk = setup_node(KellyKapowski(), name='direct')
        kk.inputs.cortical_thickness = 'direct_cortical_thickness.nii.gz'
        kk.inputs.warped_white_matter = 'direct_warped_white_matter.nii.gz'

        # connect the cortical thickness (SPM+DiReCT) method
        wf.connect([
            # create segmentation GM+WM file
            (tissues, segm_img, [("gm", "gm"), ("wm", "wm")]),
            (segm_img, cp_hdr, [("out_file", "data_file")]),
            (tissues, cp_hdr, [("gm", "in_file")]),

            # kellykapowski
            (cp_hdr, kk, [("out_file", "segmentation_image")]),
            (tissues, kk, [("gm", "gray_matter_prob_image"),
                           ("wm", "white_matter_prob_image")]),
            (kk, anat_output, [("cortical_thickness", "cortical_thickness"),
                               ("warped_white_matter", "warped_white_matter")
                               ]),
        ])
    return wf
Exemplo n.º 6
0
def attach_camino_tractography(main_wf, wf_name="camino_tract"):
    """ Attach the Camino-based tractography workflow to the `main_wf`.

    Parameters
    ----------
    main_wf: nipype Workflow

    atlas_file: str
        Path to the anatomical atlas.

    wf_name: str
        Name of the preprocessing workflow

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.

    input_files.select.diff: input node

    datasink: nipype Node

    Nipype Workflow Dependencies
    ----------------------------
    This workflow depends on:
    - spm_anat_preproc
    - spm_fsl_dti_preprocessing

    Returns
    -------
    main_wf: nipype Workflow
    """
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)
    dti_coreg_output = get_interface_node(main_wf, 'dti_co_output')
    dti_artif_output = get_interface_node(main_wf, 'dti_art_output')

    # The workflow box
    tract_wf = camino_tractography(wf_name=wf_name)

    # input and output diffusion MRI workflow to main workflow connections
    main_wf.connect([
        (in_files, tract_wf, [("bval", "tract_input.bval")]),
        (dti_coreg_output, tract_wf, [("brain_mask_diff", "tract_input.mask")]),

        (dti_artif_output, tract_wf, [
            ("eddy_corr_file", "tract_input.diff"),
            ("bvec_rotated", "tract_input.bvec"),
        ]),

        # output
        (tract_wf, datasink, [
            ("tract_output.tensor", "tract.@tensor"),
            ("tract_output.tracks", "tract.@tracks"),
            ("tract_output.connectivity", "tract.@connectivity"),
            ("tract_output.mean_fa", "tract.@mean_fa"),
            ("tract_output.fa", "tract.@fa"),
        ])
    ])

    # pass the atlas if it's the case
    do_atlas, _ = check_atlas_file()
    if do_atlas:
        main_wf.connect([(dti_coreg_output, tract_wf, [("atlas_diff", "tract_input.atlas")])])

    return main_wf
Exemplo n.º 7
0
def attach_spm_fsl_dti_preprocessing(main_wf,
                                     wf_name="spm_fsl_dti_preprocessing"):
    """ Attach a set of pipelines to the `main_wf` for Diffusion MR (`diff`) image processing.
    - dti_artifact_correction
    - spm_anat_to_diff_coregistration
    - dti_tensor_fitting

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.

    input_files.select.diff: input node

    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)
    anat_output = get_interface_node(main_wf, 'anat_output')

    # attach the artifact detection and correction pipeline
    main_wf = attach_dti_artifact_correction(main_wf)
    dti_art_output = get_interface_node(main_wf, 'dti_art_output')

    # The workflow boxes
    coreg_dti_wf = spm_anat_to_diff_coregistration(wf_name=wf_name)

    # dataSink output substitutions
    ## The base name of the 'diff' file for the substitutions
    diff_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'diff')))
    anat_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'anat')))

    regexp_subst = [
        (r"/brain_mask_{diff}_space\.nii$", "/brain_mask.nii"),
        (r"/eddy_corrected\.nii$", "/{diff}_eddycor.nii"),
        (r"/rc1anat_hc_corrected\.nii$", "/gm_diff.nii"),
        (r"/rc2anat_hc_corrected\.nii$", "/wm_diff.nii"),
        (r"/rc3anat_hc_corrected\.nii$", "/csf_diff.nii"),
        (r"/rmanat_hc_corrected\.nii$", "/{anat}_diff.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst,
                                    diff=diff_fbasename,
                                    anat=anat_fbasename)

    # prepare substitution for atlas_file, if any
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        atlas_basename = remove_ext(os.path.basename(atlas_file))
        regexp_subst.extend([
            (r"/[\w]*{atlas}.*\.nii$", "/{atlas}_{diff}_space.nii"),
        ])
        regexp_subst = format_pair_list(regexp_subst,
                                        atlas=atlas_basename,
                                        diff=diff_fbasename)

    regexp_subst += extension_duplicates(regexp_subst)
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/diff')
    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    # input and output diffusion MRI workflow to main workflow connections
    main_wf.connect([
        (dti_art_output, coreg_dti_wf, [
            ("avg_b0", "dti_co_input.avg_b0"),
        ]),
        (anat_output, coreg_dti_wf, [("tissues_native",
                                      "dti_co_input.tissues"),
                                     ("anat_biascorr", "dti_co_input.anat")]),
        (coreg_dti_wf, datasink, [
            ("dti_co_output.anat_diff", "diff.@anat_diff"),
            ("dti_co_output.tissues_diff", "diff.tissues.@tissues_diff"),
            ("dti_co_output.brain_mask_diff", "diff.@brain_mask"),
        ]),
    ])

    if do_atlas:
        main_wf.connect([
            (anat_output, coreg_dti_wf, [("atlas_anat",
                                          "dti_co_input.atlas_anat")]),
            (coreg_dti_wf, datasink, [("dti_co_output.atlas_diff",
                                       "diff.@atlas")]),
        ])

    return main_wf
Exemplo n.º 8
0
def attach_spm_warp_fmri_wf(main_wf,
                            registration_wf_name="spm_warp_fmri",
                            do_group_template=False):
    """ Attach the fMRI inter-subject spatial normalization workflow to the `main_wf`.

    Parameters
    ----------
    main_wf: nipype Workflow

    registration_wf_name: str
        Name of the registration workflow.

    do_group_template: bool
        If True will attach the group template creation and pre-processing pipeline.

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.

    input_files.select.anat: input node

    datasink: nipype Node

    Workflow Dependencies
    ---------------------
    fmri_cleanup, the cleanup and preprocessing of the fMRI data

    spm_anat_preproc, for the anatomical to MNI space transformation

    spm_fmri_template, if do_group_template is True

    Returns
    -------
    main_wf: nipype Workflow
    """
    # Dependency workflows
    anat_wf = get_subworkflow(main_wf, 'spm_anat_preproc')
    cleanup_wf = get_subworkflow(main_wf, 'fmri_cleanup')

    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)

    if do_group_template:
        template_name = 'grptemplate'
    else:
        template_name = 'stdtemplate'

    warp_wf_name = "{}_{}".format(registration_wf_name, template_name)
    warp_fmri_wf = spm_warp_fmri_wf(warp_wf_name,
                                    register_to_grptemplate=do_group_template)

    # dataSink output substitutions
    # The base name of the 'rest' file for the substitutions
    rest_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'rest')))
    anat_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'anat')))

    regexp_subst = [
        (r"/corr_stc{fmri}_trim_mean_sn.mat$",
         "/{fmri}_grptemplate_params.mat"),
        (r"/y_corr_stc{fmri}_trim_mean\.nii$", "/{fmri}_to_mni_warpfield.nii"),
        (r"/rcorr_stc{fmri}_trim_mean.nii$", "/avg_epi_anat.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim_mean\.nii$",
         "/avg_epi_grptemplate.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim\.nii$",
         "/{fmri}_trimmed_grptemplate.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim_filtermotart[\w_]*_cleaned\.nii$",
         "/{fmri}_nuisance_corrected_grptemplate.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim_filtermotart[\w_]*_gsr\.nii$",
         "/{fmri}_nuisance_corrected_grptemplate.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim_filtermotart[\w_]*_bandpassed\.nii$",
         "/{fmri}_time_filtered_grptemplate.nii"),
        (r"/wgrptmpl_corr_stc{fmri}_trim_filtermotart[\w_]*_smooth\.nii$",
         "/{fmri}_smooth_grptemplate.nii"),
        (r"/w[r]?corr_stc{fmri}_trim_mean\.nii$", "/avg_epi_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim\.nii$", "/{fmri}_trimmed_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim_filtermotart[\w_]*_cleaned\.nii$",
         "/{fmri}_nuisance_corrected_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim_filtermotart[\w_]*_gsr\.nii$",
         "/{fmri}_nuisance_corrected_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim_filtermotart[\w_]*_bandpassed\.nii$",
         "/{fmri}_time_filtered_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim_filtermotart[\w_]*_smooth\.nii$",
         "/{fmri}_smooth_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim[\w_]*_smooth\.nii$",
         "/{fmri}_nofilt_smooth_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim[\w_]*_cleaned\.nii$",
         "/{fmri}_nofilt_nuisance_corrected_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim[\w_]*_gsr\.nii$",
         "/{fmri}_nofilt_nuisance_corrected_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim[\w_]*_bandpassed\.nii$",
         "/{fmri}_nofilt_time_filtered_mni.nii"),
        (r"/w[r]?corr_stc{fmri}_trim[\w_]*_smooth\.nii$",
         "/{fmri}_nofilt_smooth_mni.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst,
                                    fmri=rest_fbasename,
                                    anat=anat_fbasename)

    # prepare substitution for atlas_file, if any
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        atlas_basename = remove_ext(os.path.basename(atlas_file))
        regexp_subst.extend([
            (r"/[\w]*{atlas}.*\.nii$", "/{atlas}_{fmri}_space.nii"),
        ])
        regexp_subst = format_pair_list(regexp_subst,
                                        atlas=atlas_basename,
                                        fmri=rest_fbasename)

    regexp_subst += extension_duplicates(regexp_subst)
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/rest')

    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    # input and output anat workflow to main workflow connections
    main_wf.connect([
        # clean_up_wf to registration_wf
        (cleanup_wf, warp_fmri_wf, [
            ("rest_output.motion_corrected", "wfmri_input.in_file"),
            ("rest_output.anat", "wfmri_input.anat_fmri"),
            ("rest_output.time_filtered", "wfmri_input.time_filtered"),
            ("rest_output.avg_epi", "wfmri_input.avg_epi"),
            ("rest_output.tissues_brain_mask", "wfmri_input.brain_mask"),
        ]),
        # output
        (warp_fmri_wf, datasink, [
            ("wfmri_output.warped_fmri",
             "rest.{}.@warped_fmri".format(template_name)),
            ("wfmri_output.wtime_filtered",
             "rest.{}.@time_filtered".format(template_name)),
            ("wfmri_output.smooth", "rest.{}.@smooth".format(template_name)),
            ("wfmri_output.wavg_epi",
             "rest.{}.@avg_epi".format(template_name)),
            ("wfmri_output.warp_field",
             "rest.{}.@warp_field".format(template_name)),
        ]),
    ])

    if not do_group_template:
        main_wf.connect([
            (anat_wf, warp_fmri_wf, [
                ("anat_output.anat_biascorr", "wfmri_input.reference_file"),
                ("anat_output.warp_forward", "wfmri_input.anat_to_mni_warp"),
            ]),
            # output
            (warp_fmri_wf, datasink, [
                ("wfmri_output.coreg_avg_epi", "rest.@coreg_fmri_anat"),
                ("wfmri_output.coreg_others", "rest.@coreg_others"),
            ]),
        ])

    if do_atlas:
        main_wf.connect([
            (anat_wf, warp_fmri_wf, [("anat_output.atlas_anat",
                                      "wfmri_input.atlas_anat")]),
            (warp_fmri_wf, datasink, [("wfmri_output.atlas_fmri",
                                       "rest.@atlas")]),
        ])
    return main_wf
Exemplo n.º 9
0
def spm_warp_fmri_wf(wf_name="spm_warp_fmri", register_to_grptemplate=False):
    """ Run SPM to warp resting-state fMRI pre-processed data to MNI or a given
    template.

    Tasks:
    - Warping the inputs to MNI or a template, if `do_group_template` is True

    Parameters
    ----------
    wf_name: str

    register_to_grptemplate: bool
        If True will expect the wfmri_input.epi_template input and use it as a group template
        for inter-subject registratio.

    Nipype Inputs
    -------------
    wfmri_input.in_file: traits.File
        The slice time and motion corrected fMRI file.

    wfmri_input.reference_file: traits.File
        The anatomical image in its native space
        for registration reference.

    wfmri_input.anat_fmri: traits.File
        The anatomical image in fMRI space.

    wfmri_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    wfmri_input.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    wfmri_input.avg_epi: traits.File
        The average EPI from the fMRI file.

    wfmri_input.epi_template: traits.File
        Reference EPI template file for inter subject registration.
        If `do_group_template` is True you must specify this input.

    wfmri_input.brain_mask: traits.File
        Brain mask in fMRI space.

    wfmri_input.atlas_anat: traits.File
        Atlas in subject anatomical space.

    Nipype Outputs
    --------------
    wfmri_output.warped_fmri: traits.File
        The slice time, motion, and nuisance corrected fMRI
        file registered to the template.

    wfmri_output.wtime_filtered: traits.File
        The bandpass time filtered fMRI file
        registered to the template.

    wfmri_output.smooth: traits.File
        The smooth bandpass time filtered fMRI file
        registered to the template.

    wfmri_output.wavg_epi: traits.File
        The average EPI from the fMRI file
        registered to the template.

    wfmri_output.warp_field: traits.File
        The fMRI to template warp field.

    wfmri_output.coreg_avg_epi: traits.File
        The average EPI image in anatomical space.

        Only if registration.fmri2mni is false.

    wfmri_output.coreg_others: traits.File
        Other mid-preprocessing fmri images registered to
        anatomical space:

        - wfmri_input.in_file,

        - wfmri_input.brain_mask,

        - wfmri_input.time_filtered.

        Only if registration.fmri2mni is false

    wfmri_output.wbrain_mask: traits.File
        Brain mask in fMRI space warped to MNI.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat_fmri",
        "anat_to_mni_warp",
        "brain_mask",
        "reference_file",
        "time_filtered",
        "avg_epi",
    ]

    out_fields = [
        "warped_fmri", "wtime_filtered", "smooth", "wavg_epi", "wbrain_mask",
        "warp_field", "coreg_avg_epi", "coreg_others"
    ]

    if register_to_grptemplate:
        in_fields += ['epi_template']

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_fmri"]

    # input identities
    wfmri_input = setup_node(IdentityInterface(fields=in_fields,
                                               mandatory_inputs=True),
                             name="wfmri_input")

    # in file unzipper
    in_gunzip = pe.Node(Gunzip(), name="in_gunzip")

    # merge list for normalization input
    merge_list = pe.Node(Merge(2), name='merge_for_warp')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # the template bounding box
    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")

    # smooth the final result
    smooth = setup_node(fsl.IsotropicSmooth(fwhm=8, output_type='NIFTI'),
                        name="smooth_fmri")

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields),
                             name="wfmri_output")

    # check how to perform the registration, to decide how to build the pipeline
    fmri2mni = get_config_setting('registration.fmri2mni', False)
    # register to group template
    if register_to_grptemplate:
        gunzip_template = pe.Node(
            Gunzip(),
            name="gunzip_template",
        )
        warp = setup_node(spm.Normalize(jobtype="estwrite",
                                        out_prefix="wgrptmpl_"),
                          name="fmri_grptemplate_warp")
        warp_source_arg = "source"
        warp_outsource_arg = "normalized_source"
        warp_field_arg = "normalization_parameters"

    elif fmri2mni:
        # register to standard template
        warp = setup_node(spm_normalize(), name="fmri_warp")
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()
        warp_source_arg = "image_to_align"
        warp_outsource_arg = "normalized_image"
        warp_field_arg = "deformation_field"

    else:  # fmri2mni is False
        coreg = setup_node(spm_coregister(cost_function="mi"),
                           name="coreg_fmri")
        warp = setup_node(spm_apply_deformations(), name="fmri_warp")
        coreg_files = pe.Node(Merge(3), name='merge_for_coreg')
        warp_files = pe.Node(Merge(2), name='merge_for_warp')
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # make the connections
    if register_to_grptemplate:
        wf.connect([
            # get template bounding box to apply to results
            (wfmri_input, tpm_bbox, [("epi_template", "in_file")]),

            # unzip and forward the template file
            (wfmri_input, gunzip_template, [("epi_template", "in_file")]),
            (gunzip_template, warp, [("out_file", "template")]),

            # get template bounding box to apply to results
            (wfmri_input, tpm_bbox, [("epi_template", "in_file")]),
        ])

    if fmri2mni or register_to_grptemplate:
        # prepare the inputs
        wf.connect([
            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),

            # warp source file
            (in_gunzip, warp, [("out_file", warp_source_arg)]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # merge the other input files into a list
            (wfmri_input, merge_list, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
            ]),

            # gunzip them for SPM
            (merge_list, gunzipper, [("out", "in_file")]),

            # apply to files
            (gunzipper, warp, [("out_file", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                (warp_field_arg, "warp_field"),
                (warp_outsource_arg, "wavg_epi"),
            ]),
        ])

    else:  # FMRI to ANAT
        wf.connect([
            (wfmri_input, coreg, [("reference_file", "target")]),

            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),
            (in_gunzip, coreg, [("out_file", "source")]),

            # merge the other input files into a list
            (wfmri_input, coreg_files, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
                ("brain_mask", "in3"),
            ]),

            # gunzip them for SPM
            (coreg_files, gunzipper, [("out", "in_file")]),

            # coregister fmri to anat
            (gunzipper, coreg, [("out_file", "apply_to_files")]),

            # anat to mni warp field
            (wfmri_input, warp, [("anat_to_mni_warp", "deformation_file")]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # apply to files
            (coreg, warp_files, [("coregistered_source", "in1")]),
            (coreg, warp_files, [("coregistered_files", "in2")]),
            (warp_files, warp, [("out", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                ("normalized_files", "warped_files"),
            ]),
            (warp, rest_output, [
                (("normalized_files", selectindex, 0), "wavg_epi"),
            ]),
            (coreg, rest_output, [("coregistered_source", "coreg_avg_epi")]),
            (coreg, rest_output, [("coregistered_files", "coreg_others")]),
        ])

    # atlas file in fMRI space
    if fmri2mni:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas2fmri")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (wfmri_input, coreg_atlas, [
                ("reference_file", "source"),
                ("atlas_anat", "apply_to_files"),
            ]),
            (in_gunzip, coreg_atlas, [("out_file", "target")]),
            (coreg_atlas, rest_output, [("coregistered_files", "atlas_fmri")]),
        ])

    # smooth and sink
    wf.connect([
        # smooth the final bandpassed image
        (warp, smooth, [(("normalized_files", selectindex, 1), "in_file")]),

        # output
        (smooth, rest_output, [("out_file", "smooth")]),
        (warp, rest_output, [
            (("normalized_files", selectindex, 0), "warped_fmri"),
            (("normalized_files", selectindex, 1), "wtime_filtered"),
        ]),
    ])

    return wf