Beispiel #1
0
def spm_tpm_priors_path(spm_dir=None):
    """ Return the path to the TPM.nii file from SPM.

    Parameters
    ----------
    spm_dir: str
        Path to SPM.
        If `None` will try nipype to guess it. #TOBEDONE

    Returns
    -------
    tpm_path: str
        Path to the TPM.nii file.

    Raises
    ------
    NotADirectoryError
        If the SPM path is not found.

    FileNotFoundError
        If `template` is `None` and can't find the TPM.nii file from SPM.
    """
    spm_info = spm.Info()

    spm_version = spm_info.version()
    if spm_version is None:
        raise RuntimeError(
            "Nipype could not find a valid Matlab or SPM configuration.")

    if spm_dir is None:
        spm_dir = spm_info.path()

    if spm_dir is None:
        spm_dir = os.path.expanduser(get_config_setting('spm_dir'))

    if not spm_dir:
        raise NotADirectoryError('Could not find a SPM path.')

    if not os.path.exists(spm_dir):
        raise NotADirectoryError(
            'The specified SPM path ({}) does not exist.'.format(spm_dir))

    tpm_path = os.path.join(spm_dir, 'tpm', 'TPM.nii')
    if not os.path.exists(tpm_path):
        raise FileNotFoundError('Could not find TPM.nii file from SPM.')

    return tpm_path
Beispiel #2
0
def attach_spm_pet_grouptemplate(main_wf, wf_name="spm_pet_template"):
    """ Attach a PET pre-processing workflow that uses SPM12 to `main_wf`.
    This workflow picks all spm_pet_preproc outputs 'pet_output.warped_files' in `main_wf`
    to create a group template.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.

    pet_output.warped_files: input node

    datasink: nipype Node

    spm_pet_preproc: nipype Workflow

    Nipype Outputs
    --------------
    group_template.pet_template: file
        The path to the PET group template.

    Nipype Workflow Dependencies
    ----------------------------
    This workflow depends on:
    - spm_pet_preproc
    - spm_anat_preproc if `spm_pet_template.do_petpvc` is True.

    Returns
    -------
    main_wf: nipype Workflow
    """
    # Dependency workflows
    pet_wf = get_subworkflow(main_wf, "spm_pet_preproc")

    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf, name='datasink')

    # The base name of the 'pet' file for the substitutions
    pet_fbasename = remove_ext(os.path.basename(get_input_file_name(in_files, 'pet')))

    # the group template datasink
    base_outdir = datasink.inputs.base_directory
    grp_datasink = pe.Node(
        DataSink(parameterization=False, base_directory=base_outdir),
        name='{}_grouptemplate_datasink'.format(pet_fbasename)
    )
    grp_datasink.inputs.container = '{}_grouptemplate'.format(pet_fbasename)

    # the list of the raw pet subjects
    warped_pets = pe.JoinNode(
        interface=IdentityInterface(fields=["warped_pets"]),
        joinsource="infosrc",
        joinfield="warped_pets",
        name="warped_pets"
    )

    # the group template workflow
    template_wf = spm_create_group_template_wf(wf_name)

    # output node
    output = setup_node(IdentityInterface(fields=["pet_template"]), name="group_template")

    # group dataSink output substitutions
    regexp_subst = [
        (r"/wgrptemplate{pet}_merged_mean_smooth.nii$", "/{pet}_grouptemplate_mni.nii"),
        (r"/w{pet}_merged_mean_smooth.nii$",            "/{pet}_grouptemplate_mni.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst, pet=pet_fbasename)
    regexp_subst += extension_duplicates(regexp_subst)

    grp_datasink.inputs.regexp_substitutions = extend_trait_list(
        grp_datasink.inputs.regexp_substitutions,
        regexp_subst
    )

    # Connect the nodes
    main_wf.connect([
        # warped pets file list input
        (pet_wf, warped_pets, [("warp_output.warped_files", "warped_pets")]),

        # group template wf
        (warped_pets, template_wf, [(("warped_pets", flatten_list), "grptemplate_input.in_files")]),

        # output node
        (template_wf, output, [("grptemplate_output.template", "pet_template")]),

        # template output
        (output, grp_datasink, [("pet_template", "@pet_grouptemplate")]),
    ])

    # Now we start with the correction and registration of each subject to the group template
    do_petpvc = get_config_setting('spm_pet_template.do_petpvc')
    if do_petpvc:
        get_subworkflow(main_wf, 'spm_anat_preproc')

        preproc_wf_name = "spm_mrpet_grouptemplate_preproc"
        main_wf = attach_spm_mrpet_preprocessing(main_wf, wf_name=preproc_wf_name, do_group_template=True)
        preproc_wf = get_subworkflow(main_wf, preproc_wf_name)

        main_wf.connect([(output, preproc_wf, [
            ("pet_template", "pet_input.pet_template")]),
        ])
    else:
        # add the pet template to the preproc workflow
        reg_wf = spm_register_to_template_wf(wf_name="spm_pet_register_to_grouptemplate")
        main_wf.connect([
            (output,      reg_wf, [("pet_template", "reg_input.template")]),
            (in_files,    reg_wf, [("pet",          "reg_input.in_file")]),

            (reg_wf, datasink, [
                ("reg_output.warped",     "pet.group_template.@warped"),
                ("reg_output.warp_field", "pet.group_template.@warp_field"),
            ]),
        ])

    # per-subject datasink output substitutions
    regexp_subst = [
        (r"group_template/{pet}_sn.mat$",           "group_template/{pet}_grptemplate_params.mat"),
        (r"group_template/wgrptemplate_{pet}.nii$", "group_template/{pet}_grptemplate.nii"),
        (r"group_template/w{pet}.nii",              "group_template/{pet}_grptemplate.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst, pet=pet_fbasename)
    regexp_subst += extension_duplicates(regexp_subst)
    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions,
        regexp_subst
    )

    return main_wf
Beispiel #3
0
def petpvc_workflow(wf_name="petpvc"):
    """ Run the PET pre-processing workflow against the gunzip_pet.in_file files.
    It coregisters the reference_file and tissues to PET space, then applies PVC and grey matter normalization.

    It does:
    - SPM12 Coregister T1 and tisues to PET
    - PVC the PET image in PET space

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pvc_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pvc_input.reference_file: traits.File
        The anatomical image in its native space. For registration reference.

    pvc_input.tissues: list of traits.File
        List of tissues files from the New Segment process. At least the first
        3 tissues must be present.

    Nipype outputs
    --------------
    pvc_output.coreg_ref: existing file
        The coregistered reference_file image in PET space.

    pvc_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pvc_output.pvc_out: existing file
        The output of the PETPVC process.

    pvc_output.petpvc_mask: existing file
        The mask built for the PETPVC.

    pvc_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pvc_output.gm_norm: existing file
        The output of the grey matter intensity normalization process.
        This is the last step in the PET signal correction.

    Returns
    -------
    wf: nipype Workflow
    """
    # fixed parameters of the NUK mMR
    psf_fwhm = (4.3, 4.3, 4.3)

    # specify input and output fields
    in_fields = [
        "in_file",
        "reference_file",
        "tissues",
    ]

    out_fields = [
        "coreg_ref",
        "coreg_others",
        "pvc_out",
        "petpvc_mask",
        "brain_mask",
        "gm_norm",
    ]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pvc_input")

    flat_list = pe.Node(Function(input_names=['list_of_lists'],
                                 output_names=['out'],
                                 function=flatten_list),
                        name='flatten_tissue_list')

    # coreg pet
    gunzip_pet = setup_node(Gunzip(), name="gunzip_pet")
    coreg_pet = setup_node(spm_coregister(cost_function="mi"),
                           name="coreg_pet")

    tissues_sel = setup_node(Select(index=[0, 1, 2]), name="tissues")
    select_gm = setup_node(Select(index=[0]), name="select_gm")
    pvc = setup_node(petpvc_cmd(fwhm_mm=psf_fwhm, pvc_method='RBV'),
                     name="pvc")

    # output
    pvc_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pvc_output")

    # workflow to create the mask
    mask_wf = petpvc_mask(wf_name="petpvc_mask")

    # workflow for intensity normalization
    norm_wf = intensity_norm(wf_name="intensity_norm_gm")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # inputs
        (pet_input, gunzip_pet, [("in_file", "in_file")]),
        (pet_input, tissues_sel, [("tissues", "inlist")]),
    ])

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
            # inputs
            (pet_input, coreg_pet, [("reference_file", "source")]),

            # unzip to coregister the reference file (anatomical image) to PET space.
            (gunzip_pet, coreg_pet, [("out_file", "target")]),
            (tissues_sel, flat_list, [("out", "list_of_lists")]),
            (flat_list, coreg_pet, [("out", "apply_to_files")]),

            # the list of tissues to the mask wf and the GM for PET intensity normalization
            (coreg_pet, select_gm, [("coregistered_files", "inlist")]),
            (coreg_pet, mask_wf, [("coregistered_files",
                                   "pvcmask_input.tissues")]),

            # the PET in native space to PVC correction
            (gunzip_pet, pvc, [("out_file", "in_file")]),

            # the merged file with 4 tissues to PCV correction
            (mask_wf, pvc, [("pvcmask_output.petpvc_mask", "mask_file")]),

            # normalize voxel values of PET PVCed by demeaning it entirely by GM PET voxel values
            (pvc, norm_wf, [("out_file", "intnorm_input.source")]),
            (select_gm, norm_wf, [("out", "intnorm_input.mask")]),

            # output
            (coreg_pet, pvc_output, [("coregistered_source", "coreg_ref")]),
            (coreg_pet, pvc_output, [("coregistered_files", "coreg_others")]),
            (pvc, pvc_output, [("out_file", "pvc_out")]),
            (mask_wf, pvc_output, [("pvcmask_output.brain_mask", "brain_mask")
                                   ]),
            (mask_wf, pvc_output, [("pvcmask_output.petpvc_mask",
                                    "petpvc_mask")]),
            (norm_wf, pvc_output, [("intnorm_output.out_file", "gm_norm")]),
        ])
    else:  # PET to ANAT
        wf.connect([
            # inputs
            (pet_input, coreg_pet, [("reference_file", "target")]),

            # unzip PET image and set as a source to register it to anatomical space.
            (gunzip_pet, coreg_pet, [("out_file", "source")]),
            (tissues_sel, flat_list, [("out", "list_of_lists")]),
            (flat_list, coreg_pet, [("out", "apply_to_files")]),

            # the list of tissues to the mask wf and the GM for PET intensity normalization
            (tissues_sel, select_gm, [("out", "inlist")]),
            (flat_list, mask_wf, [("out", "pvcmask_input.tissues")]),

            # the PET in ANAT space to PVC correction
            (coreg_pet, pvc, [("coregistered_source", "in_file")]),

            # the merged file with 4 tissues to PCV correction
            (mask_wf, pvc, [("pvcmask_output.petpvc_mask", "mask_file")]),

            # normalize voxel values of PET PVCed by demeaning it entirely by GM PET voxel values
            (pvc, norm_wf, [("out_file", "intnorm_input.source")]),
            (select_gm, norm_wf, [("out", "intnorm_input.mask")]),

            # output
            # TODO: coreg_ref should have a different name in this case
            (coreg_pet, pvc_output, [("coregistered_source", "coreg_ref")]),
            (coreg_pet, pvc_output, [("coregistered_files", "coreg_others")]),
            (pvc, pvc_output, [("out_file", "pvc_out")]),
            (mask_wf, pvc_output, [("pvcmask_output.brain_mask", "brain_mask")
                                   ]),
            (mask_wf, pvc_output, [("pvcmask_output.petpvc_mask",
                                    "petpvc_mask")]),
            (norm_wf, pvc_output, [("intnorm_output.out_file", "gm_norm")]),
        ])

    return wf
Beispiel #4
0
def fmri_cleanup_wf(wf_name="fmri_cleanup"):
    """ Run the resting-state fMRI pre-processing workflow against the rest files in `data_dir`.

    Tasks:
    - Trim first 6 volumes of the rs-fMRI file.
    - Slice Timing correction.
    - Motion and nuisance correction.
    - Calculate brain mask in fMRI space.
    - Bandpass frequency filtering for resting-state fMRI.
    - Smoothing.
    - Tissue maps co-registration to fMRI space.

    Parameters
    ----------
    wf_name: str

    Nipype Inputs
    -------------
    rest_input.in_file: traits.File
        The resting-state fMRI file.

    rest_input.anat: traits.File
        Path to the high-contrast anatomical image.

    rest_input.tissues: list of traits.File
        Paths to the tissue segmentations in anatomical space.
        Expected to have this order: GM, WM and CSF.

    rest_input.highpass_sigma:traits.Float
        Band pass timeseries filter higher bound in Hz.

    rest_input.lowpass_sigma: traits.Float
        Band pass timeseries filter lower bound in Hz.

    Nipype Outputs
    --------------
    rest_output.smooth: traits.File
        The isotropically smoothed time filtered nuisance corrected image.

    rest_output.nuis_corrected: traits.File
        The nuisance corrected fMRI file.

    rest_output.motion_params: traits.File
        The affine transformation file.

    rest_output.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    rest_output.epi_brain_mask: traits.File
        An estimated brain mask from mean EPI volume.

    rest_output.tissues_brain_mask: traits.File
        A brain mask calculated from the addition of coregistered
        GM, WM and CSF segmentation volumes from the anatomical
        segmentation.

    rest_output.tissues: list of traits.File
        The tissues segmentation volume in fMRI space.
        Expected to have this order: GM, WM and CSF.

    rest_output.anat: traits.File
        The T1w image in fMRI space.

    rest_output.avg_epi: traits.File
        The average EPI image in fMRI space after slice-time and motion correction.

    rest_output.motion_regressors: traits.File

    rest_output.compcor_regressors: traits.File

    rest_output.art_displacement_files
        One image file containing the voxel-displacement timeseries.

    rest_output.art_intensity_files
        One file containing the global intensity values determined from the brainmask.

    rest_output.art_norm_files
        One file containing the composite norm.

    rest_output.art_outlier_files
         One file containing a list of 0-based indices corresponding to outlier volumes.

    rest_output.art_plot_files
        One image file containing the detected outliers.

    rest_output.art_statistic_files
        One file containing information about the different types of artifacts and if design info is provided then
        details of stimulus correlated motion and a listing or artifacts by event type.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat",
        "atlas_anat",
        "coreg_target",
        "tissues",
        "lowpass_freq",
        "highpass_freq",
    ]

    out_fields = [
        "motion_corrected",
        "motion_params",
        "tissues",
        "anat",
        "avg_epi",
        "time_filtered",
        "smooth",
        "tsnr_file",
        "epi_brain_mask",
        "tissues_brain_mask",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "nuis_corrected",
        "art_displacement_files",
        "art_intensity_files",
        "art_norm_files",
        "art_outlier_files",
        "art_plot_files",
        "art_statistic_files",
    ]

    # input identities
    rest_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                            name="rest_input")

    # rs-fMRI preprocessing nodes
    trim = setup_node(Trim(), name="trim")

    stc_wf = auto_spm_slicetime()
    realign = setup_node(nipy_motion_correction(), name='realign')

    # average
    average = setup_node(
        Function(
            function=mean_img,
            input_names=["in_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name='average_epi'
    )

    mean_gunzip = setup_node(Gunzip(), name="mean_gunzip")

    # co-registration nodes
    coreg = setup_node(spm_coregister(cost_function="mi"), name="coreg_fmri")
    brain_sel = setup_node(Select(index=[0, 1, 2]), name="brain_sel")

    # brain mask made with EPI
    epi_mask = setup_node(ComputeMask(), name='epi_mask')

    # brain mask made with the merge of the tissue segmentations
    tissue_mask = setup_node(fsl.MultiImageMaths(), name='tissue_mask')
    tissue_mask.inputs.op_string = "-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin"
    tissue_mask.inputs.out_file = "tissue_brain_mask.nii.gz"

    # select tissues
    gm_select = setup_node(Select(index=[0]), name="gm_sel")
    wmcsf_select = setup_node(Select(index=[1, 2]), name="wmcsf_sel")

    # noise filter
    noise_wf = rest_noise_filter_wf()
    wm_select = setup_node(Select(index=[1]), name="wm_sel")
    csf_select = setup_node(Select(index=[2]), name="csf_sel")

    # bandpass filtering
    bandpass = setup_node(
        Function(
            input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'],
            output_names=['out_files'],
            function=bandpass_filter
        ),
        name='bandpass'
    )

    # smooth
    smooth = setup_node(
        Function(
            function=smooth_img,
            input_names=["in_file", "fwhm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name="smooth"
    )
    smooth.inputs.fwhm = get_config_setting('fmri_smooth.fwhm', default=8)
    smooth.inputs.out_file = "smooth_std_{}.nii.gz".format(wf_name)

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields), name="rest_output")

    # Connect the nodes
    wf.connect([
        # trim
        (rest_input, trim, [("in_file", "in_file")]),

        # slice time correction
        (trim, stc_wf, [("out_file", "stc_input.in_file")]),

        # motion correction
        (stc_wf, realign, [("stc_output.timecorrected_files", "in_file")]),

        # coregistration target
        (realign, average, [("out_file", "in_file")]),
        (average, mean_gunzip, [("out_file", "in_file")]),
        (mean_gunzip, coreg, [("out_file", "target")]),

        # epi brain mask
        (average, epi_mask, [("out_file", "mean_volume")]),

        # coregistration
        (rest_input, coreg, [("anat", "source")]),
        (rest_input, brain_sel, [("tissues", "inlist")]),
        (brain_sel, coreg, [(("out", flatten_list), "apply_to_files")]),

        # tissue brain mask
        (coreg, gm_select, [("coregistered_files", "inlist")]),
        (coreg, wmcsf_select, [("coregistered_files", "inlist")]),
        (gm_select, tissue_mask, [(("out", flatten_list), "in_file")]),
        (wmcsf_select, tissue_mask, [(("out", flatten_list), "operand_files")]),

        # nuisance correction
        (coreg, wm_select, [("coregistered_files", "inlist",)]),
        (coreg, csf_select, [("coregistered_files", "inlist",)]),
        (realign, noise_wf, [("out_file", "rest_noise_input.in_file",)]),
        (tissue_mask, noise_wf, [("out_file", "rest_noise_input.brain_mask")]),
        (wm_select, noise_wf, [(("out", flatten_list), "rest_noise_input.wm_mask")]),
        (csf_select, noise_wf, [(("out", flatten_list), "rest_noise_input.csf_mask")]),

        (realign, noise_wf, [("par_file", "rest_noise_input.motion_params",)]),

        # temporal filtering
        (noise_wf, bandpass, [("rest_noise_output.nuis_corrected", "files")]),
        # (realign,     bandpass,    [("out_file", "files")]),
        (stc_wf, bandpass, [("stc_output.time_repetition", "tr")]),
        (rest_input, bandpass, [
            ("lowpass_freq", "lowpass_freq"),
            ("highpass_freq", "highpass_freq"),
        ]),
        (bandpass, smooth, [("out_files", "in_file")]),

        # output
        (epi_mask, rest_output, [("brain_mask", "epi_brain_mask")]),
        (tissue_mask, rest_output, [("out_file", "tissues_brain_mask")]),
        (realign, rest_output, [
            ("out_file", "motion_corrected"),
            ("par_file", "motion_params"),
        ]),
        (coreg, rest_output, [
            ("coregistered_files", "tissues"),
            ("coregistered_source", "anat"),
        ]),
        (noise_wf, rest_output, [
            ("rest_noise_output.motion_regressors", "motion_regressors"),
            ("rest_noise_output.compcor_regressors", "compcor_regressors"),
            ("rest_noise_output.gsr_regressors", "gsr_regressors"),
            ("rest_noise_output.nuis_corrected", "nuis_corrected"),
            ("rest_noise_output.tsnr_file", "tsnr_file"),
            ("rest_noise_output.art_displacement_files", "art_displacement_files"),
            ("rest_noise_output.art_intensity_files", "art_intensity_files"),
            ("rest_noise_output.art_norm_files", "art_norm_files"),
            ("rest_noise_output.art_outlier_files", "art_outlier_files"),
            ("rest_noise_output.art_plot_files", "art_plot_files"),
            ("rest_noise_output.art_statistic_files", "art_statistic_files"),
        ]),
        (average, rest_output, [("out_file", "avg_epi")]),
        (bandpass, rest_output, [("out_files", "time_filtered")]),
        (smooth, rest_output, [("out_file", "smooth")]),
    ])

    return wf
Beispiel #5
0
def spm_mrpet_preprocessing(wf_name="spm_mrpet_preproc"):
    """ Run the PET pre-processing workflow against the
    gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this
    has not been run, this function will run it too.

    # TODO: organize the anat2pet hack/condition somehow:
    If anat2pet:
    - SPM12 Coregister T1 and tissues to PET
    - PETPVC the PET image in PET space
    - SPM12 Warp PET to MNI
    else:
    - SPM12 Coregister PET to T1
    - PETPVC the PET image in anatomical space
    - SPM12 Warp PET in anatomical space to MNI through the
    `anat_to_mni_warp`.

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the
        anatomical image in its native space.

    pet_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process.
        At least the first 3 tissues must be present.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pet_output.pvc_warped: existing file
        Results from PETPVC normalized to MNI.
        The result of every internal pre-processing step
        is normalized to MNI here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files

    pet_output.gm_norm: existing file
        The output of the grey matter intensity
        normalization process.
        This is the last step in the PET signal correction,
        before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and
        `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["in_file", "anat", "anat_to_mni_warp", "tissues"]

    out_fields = [
        "brain_mask",
        "coreg_others",
        "coreg_ref",
        "pvc_warped",
        "pet_warped",  # 'pet_warped' is a dummy entry to keep the fields pattern.
        "warp_field",
        "pvc_out",
        "pvc_mask",
        "gm_norm"
    ]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_pet"]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    merge_list = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    warp_pet = setup_node(spm_normalize(), name="warp_pet")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (petpvc, warp_pet, [("pvc_output.coreg_ref", "image_to_align")]),
            (gunzipper, warp_pet, [("out_file", "apply_to_files")]),
            (tpm_bbox, warp_pet, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (warp_pet, pet_output, [("normalized_files", "pvc_warped"),
                                    ("deformation_field", "warp_field")]),
        ])
    else:  # PET 2 ANAT
        collector = setup_node(Merge(2), name='merge_for_warp')
        apply_warp = setup_node(spm_apply_deformations(), name="warp_pet")

        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (gunzipper, collector, [("out_file", "in1")]),
            (petpvc, collector, [("pvc_output.coreg_ref", "in2")]),
            (pet_input, apply_warp, [("anat_to_mni_warp", "deformation_file")
                                     ]),
            (collector, apply_warp, [("out", "apply_to_files")]),
            (tpm_bbox, apply_warp, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.petpvc_mask", "petpvc_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (apply_warp, pet_output, [("normalized_files", "pvc_warped"),
                                      ("deformation_field", "warp_field")]),
        ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (pet_input, coreg_atlas, [("anat", "source")]),
            (petpvc, coreg_atlas, [("pvc_output.coreg_ref", "target")]),
            (pet_input, coreg_atlas, [("atlas_anat", "apply_to_files")]),
            (coreg_atlas, pet_output, [("coregistered_files", "atlas_pet")]),
        ])

    return wf
Beispiel #6
0
def attach_spm_anat_preprocessing(main_wf, wf_name="spm_anat_preproc"):
    """ Attach the SPM12 anatomical MRI pre-processing workflow to
    the `main_wf`.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an
    `input_files` and a `datasink` nodes.

    input_files.anat: input node

    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)

    # The workflow box
    anat_wf = spm_anat_preprocessing(wf_name=wf_name)

    # The base name of the 'anat' file for the substitutions
    anat_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'anat')))

    # dataSink output substitutions
    regexp_subst = [
        (r"/{anat}_.*corrected_seg8.mat$", "/{anat}_to_mni_affine.mat"),
        (r"/m{anat}.*_corrected.nii$", "/{anat}_biascorrected.nii"),
        (r"/wm{anat}.*_corrected.nii$", "/{anat}_mni.nii"),
        (r"/y_{anat}.*nii$", "/{anat}_to_mni_field.nii"),
        (r"/iy_{anat}.*nii$", "/{anat}_to_mni_inv_field.nii"),
        (r"/mwc1{anat}.*nii$", "/{anat}_gm_mod_mni.nii"),
        (r"/mwc2{anat}.*nii$", "/{anat}_wm_mod_mni.nii"),
        (r"/mwc3{anat}.*nii$", "/{anat}_csf_mod_mni.nii"),
        (r"/mwc4{anat}.*nii$", "/{anat}_nobrain_mod_mni.nii"),
        (r"/c1{anat}.*nii$", "/{anat}_gm.nii"),
        (r"/c2{anat}.*nii$", "/{anat}_wm.nii"),
        (r"/c3{anat}.*nii$", "/{anat}_csf.nii"),
        (r"/c4{anat}.*nii$", "/{anat}_nobrain.nii"),
        (r"/c5{anat}.*nii$", "/{anat}_nobrain_mask.nii"),
        (r"/direct_cortical_thickness.nii$",
         "/{anat}_gm_cortical_thickness.nii"),
        (r"/direct_warped_white_matter.nii$",
         "/{anat}_warped_white_matter.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst, anat=anat_fbasename)

    # prepare substitution for atlas_file, if any
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        atlas_basename = remove_ext(os.path.basename(atlas_file))
        regexp_subst.extend([
            (r"/w{atlas}\.nii$", "/{atlas}_anat_space.nii"),
        ])
        regexp_subst = format_pair_list(regexp_subst,
                                        anat=anat_fbasename,
                                        atlas=atlas_basename)

    # add nii.gz patterns
    regexp_subst += extension_duplicates(regexp_subst)

    # add parent folder to paths
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/anat')

    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    main_wf.connect([
        (in_files, anat_wf, [("anat", "anat_input.in_file")]),
        (anat_wf, datasink, [
            ("anat_output.anat_mni", "anat.@mni"),
            ("anat_output.tissues_warped", "anat.tissues.warped"),
            ("anat_output.tissues_native", "anat.tissues.native"),
            ("anat_output.affine_transform", "anat.transform.@linear"),
            ("anat_output.warp_forward", "anat.transform.@forward"),
            ("anat_output.warp_inverse", "anat.transform.@inverse"),
            ("anat_output.anat_biascorr", "anat.@biascor"),
            ("anat_output.brain_mask", "anat.@brain_mask"),
        ]),
    ])

    # check optional outputs
    if do_atlas:
        main_wf.connect([
            (anat_wf, datasink, [("anat_output.atlas_anat", "anat.@atlas")]),
        ])

    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        main_wf.connect([
            (anat_wf, datasink, [
                ("anat_output.cortical_thickness", "anat.@cortical_thickness"),
                ("anat_output.warped_white_matter",
                 "anat.@warped_white_matter"),
            ]),
        ])

    return main_wf
Beispiel #7
0
def spm_anat_preprocessing(wf_name="spm_anat_preproc"):
    """ Run the T1 pre-processing workflow against the anat_hc
    files in `data_dir`.

    It does:
    - N4BiasFieldCorrection
    - SPM12 New Segment
    - SPM12 Warp of MPRAGE to MNI

    [Optional: from config]
    - Atlas file warping to MPRAGE
    - Cortical thickness (SPM+DiReCT)

    Nipype Inputs
    -------------
    anat_input.in_file: traits.File
        Path to the anatomical image.

    anat_input.atlas_file: traits.File
        Path to an atlas file in MNI space to be
        warped to the anatomical space.
        Can also be set through the configuration
        setting `atlas_file`.

    Nipype Outputs
    --------------
    anat_output.anat_mni: traits.File
        The bias-field normalized to MNI anatomical image.

    anat_output.tissues_warped: traits.File
        The tissue segmentation in MNI space from SPM.

    anat_output.tissues_native: traits.File
        The tissue segmentation in native space from SPM.

    anat_output.affine_transform: traits.File
        The affine transformation file.

    anat_output.warp_forward: traits.File
        The forward (anat to MNI) warp field from SPM.

    anat_output.warp_inverse: traits.File
        The inverse (MNI to anat) warp field from SPM.

    anat_output.anat_biascorr: traits.File
        The bias-field corrected anatomical image.

    anat_output.brain_mask: traits.File
        A brain mask file in anatomical space.
        This is calculated by summing up the maps of
        segmented tissues (CSF, WM, GM) and then binarised.

    anat_output.atlas_anat: traits.File
        If `atlas_file` is an existing file in MNI space.
        The atlas file warped to anatomical space,
        if do_atlas and the atlas file is set in configuration.

    anat_output.cortical_thickness: traits.File
        If `anat_preproc.do_cortical_thickness` is True.
        The cortical thickness estimations calculated with the
        SPM+DiReCT method (KellyKapowski).

    anat_output.warped_white_matter: warped_white_matter
        If `anat_preproc.do_cortical_thickness` is True.
        The warped white matter image calculated with the
        SPM+DiReCT method (KellyKapowski).

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = ["in_file"]
    out_fields = [
        "anat_mni",
        "tissues_warped",
        "tissues_native",
        "affine_transform",
        "warp_forward",
        "warp_inverse",
        "anat_biascorr",
        "brain_mask",
    ]

    # check if we have to warp an atlas files too.
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_file"]
        out_fields += ["atlas_anat"]

    # check if we have to do cortical thickness (SPM+DiReCT) method.
    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        out_fields += [
            "cortical_thickness",
            "warped_white_matter",
        ]

    # input node
    anat_input = pe.Node(IdentityInterface(fields=in_fields,
                                           mandatory_inputs=True),
                         name="anat_input")

    # atlas registration
    if do_atlas and not isdefined(anat_input.inputs.atlas_file):
        anat_input.inputs.set(atlas_file=atlas_file)

    # T1 preprocessing nodes
    biascor = setup_node(biasfield_correct(), name="bias_correction")
    gunzip_anat = setup_node(Gunzip(), name="gunzip_anat")
    segment = setup_node(spm_segment(), name="new_segment")
    warp_anat = setup_node(spm_apply_deformations(), name="warp_anat")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # calculate brain mask from tissue maps
    tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"],
                                           mandatory_inputs=True),
                         name="tissues")

    brain_mask = setup_node(Function(
        function=math_img,
        input_names=["formula", "out_file", "gm", "wm", "csf"],
        output_names=["out_file"],
        imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='brain_mask')
    brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
    brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"

    # output node
    anat_output = pe.Node(IdentityInterface(fields=out_fields),
                          name="anat_output")

    # Connect the nodes
    wf.connect([
        # input to biasfieldcorrection
        (anat_input, biascor, [("in_file", "input_image")]),

        # new segment
        (biascor, gunzip_anat, [("output_image", "in_file")]),
        (gunzip_anat, segment, [("out_file", "channel_files")]),

        # Normalize12
        (segment, warp_anat, [("forward_deformation_field", "deformation_file")
                              ]),
        (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]),
        (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]),

        # brain mask from tissues
        (segment, tissues, [
            (("native_class_images", selectindex, 0), "gm"),
            (("native_class_images", selectindex, 1), "wm"),
            (("native_class_images", selectindex, 2), "csf"),
        ]),
        (tissues, brain_mask, [
            ("gm", "gm"),
            ("wm", "wm"),
            ("csf", "csf"),
        ]),

        # output
        (warp_anat, anat_output, [("normalized_files", "anat_mni")]),
        (segment, anat_output, [("modulated_class_images", "tissues_warped"),
                                ("native_class_images", "tissues_native"),
                                ("transformation_mat", "affine_transform"),
                                ("forward_deformation_field", "warp_forward"),
                                ("inverse_deformation_field", "warp_inverse"),
                                ("bias_corrected_images", "anat_biascorr")]),
        (brain_mask, anat_output, [("out_file", "brain_mask")]),
    ])

    # atlas warping nodes
    if do_atlas:
        gunzip_atlas = pe.Node(Gunzip(), name="gunzip_atlas")
        warp_atlas = setup_node(spm_apply_deformations(), name="warp_atlas")
        anat_bbox = setup_node(Function(function=get_bounding_box,
                                        input_names=["in_file"],
                                        output_names=["bbox"]),
                               name="anat_bbox")

        # set the warping interpolation to nearest neighbour.
        warp_atlas.inputs.write_interp = 0

        # connect the atlas registration nodes
        wf.connect([
            (anat_input, gunzip_atlas, [("atlas_file", "in_file")]),
            (gunzip_anat, anat_bbox, [("out_file", "in_file")]),
            (gunzip_atlas, warp_atlas, [("out_file", "apply_to_files")]),
            (segment, warp_atlas, [("inverse_deformation_field",
                                    "deformation_file")]),
            (anat_bbox, warp_atlas, [("bbox", "write_bounding_box")]),
            (warp_atlas, anat_output, [("normalized_files", "atlas_anat")]),
        ])

    # cortical thickness (SPM+DiReCT) method
    if do_cortical_thickness:
        from ..interfaces.ants import KellyKapowski

        segm_img = setup_node(Function(
            function=math_img,
            input_names=["formula", "out_file", "gm", "wm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                              name='gm-wm_image')
        segm_img.inputs.out_file = "gm_wm.nii.gz"
        segm_img.inputs.formula = '((gm >= 0.5)*2 + (wm > 0.5)*3).astype(np.uint8)'

        # copy the header from the GM tissue image to the result from `gm-wm_image`.
        # this is necessary because the `gm-wm_image` operation sometimes modifies the
        # offset of the image, which will provoke an ANTs exception due to
        # ITK tolerance in ImageToImageFilter
        # https://github.com/stnava/ANTs/issues/74
        cp_hdr = setup_node(Function(
            function=copy_header,
            input_names=["in_file", "data_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='copy_header')

        kk = setup_node(KellyKapowski(), name='direct')
        kk.inputs.cortical_thickness = 'direct_cortical_thickness.nii.gz'
        kk.inputs.warped_white_matter = 'direct_warped_white_matter.nii.gz'

        # connect the cortical thickness (SPM+DiReCT) method
        wf.connect([
            # create segmentation GM+WM file
            (tissues, segm_img, [("gm", "gm"), ("wm", "wm")]),
            (segm_img, cp_hdr, [("out_file", "data_file")]),
            (tissues, cp_hdr, [("gm", "in_file")]),

            # kellykapowski
            (cp_hdr, kk, [("out_file", "segmentation_image")]),
            (tissues, kk, [("gm", "gray_matter_prob_image"),
                           ("wm", "white_matter_prob_image")]),
            (kk, anat_output, [("cortical_thickness", "cortical_thickness"),
                               ("warped_white_matter", "warped_white_matter")
                               ]),
        ])
    return wf
Beispiel #8
0
def attach_concat_canica(main_wf, wf_name="canica", **kwargs):
    """ Attach a Concat and a nilearn CanICA interface to `main_wf`.

    The Concat node will merge all the files together in one 4D volume before delivering it to CanICA.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    kwargs: dict[str]->str
        input_node: str
            Name of the input node from where to connect the source `input_connect`.

        input_connection: str
            Name of the connection to obtain the source files.

    Nipype Inputs for `main_wf`
    ---------------------------
    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    # Dependency workflows
    srcwf_name = kwargs['input_node']
    srcconn_name = kwargs['input_connection']

    src_wf = get_subworkflow(main_wf, srcwf_name)
    datasink = get_datasink(main_wf, name='datasink')

    base_outdir = datasink.inputs.base_directory
    ica_datasink = pe.Node(DataSink(
        parameterization=False,
        base_directory=base_outdir,
    ),
                           name="ica_datasink".format(wf_name))
    ica_datasink.inputs.container = 'ica_{}'.format(wf_name)

    # the list of the raw pet subjects
    ica_subjs = pe.JoinNode(interface=IdentityInterface(fields=["ica_subjs"]),
                            joinsource="infosrc",
                            joinfield="ica_subjs",
                            name="ica_subjs")

    # concat images
    concat = setup_node(Function(
        function=concat_3D_imgs,
        input_names=["in_files"],
        output_names=["out_file"],
        imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                        name="concat")

    # warp each subject to the group template
    ica = setup_node(
        CanICAInterface(),
        name="{}_ica".format(wf_name),
    )
    algorithm = get_config_setting("{}_ica.algorithm".format(wf_name),
                                   default=get_config_setting(
                                       'canica.algorithm', default=''))

    if algorithm:
        ica.inputs.algorithm = algorithm

    # Connect the nodes
    main_wf.connect([
        # file list input
        (src_wf, ica_subjs, [(srcconn_name, "ica_subjs")]),
        # concat images
        (ica_subjs, concat, [("ica_subjs", "in_files")]),
        # canica
        (concat, ica, [(("out_file", _check_list), "in_files")]),

        # canica output
        (ica, ica_datasink, [
            ("components", "@components"),
            ("loadings", "@loadings"),
            ("score", "@score"),
        ]),
    ])

    # plot the ICA results?
    do_plot = get_config_setting('canica_extra.plot', default=True)
    if not do_plot:
        return main_wf

    # get the plot threshold from the ICA node or the config file (in that order).
    plot_thr = get_config_setting('canica_extra.plot_thr', default=0)
    plot_thr = get_trait_value(ica.inputs, 'threshold', default=plot_thr)

    # plto ica results images
    plot_ica = setup_node(Function(
        function=plot_ica_results,
        input_names=[
            "ica_result", "application", "mask_file", "zscore", "bg_img"
        ],
        output_names=["all_icc_plot", "iccs_plot", "sliced_ic_plots"],
    ),
                          name="plot_ica")
    plot_ica.inputs.zscore = plot_thr
    plot_ica.inputs.mask_file = get_trait_value(ica.inputs, 'mask')
    plot_ica.inputs.application = 'nilearn'

    # Connect the plotting nodes
    main_wf.connect([
        # canica
        (ica, plot_ica, [("components", "ica_result")]),

        # canica output
        (plot_ica, ica_datasink, [
            ("all_icc_plot", "@all_icc_plot"),
            ("iccs_plot", "@iccs_plot"),
            ("sliced_ic_plots", "@sliced_ic_plots"),
        ]),
    ])
    return main_wf
Beispiel #9
0
def spm_create_group_template_wf(wf_name="spm_create_group_template"):
    """ Pick all subject files in `grptemplate_input.in_files`, calculate an average
    image and smooth it with `"{}_smooth".format(wf_name)` node (you can configure the smooth `fwhm` from
    a config file.).

    It does:
    - calculate a mean image (across subjects) and
    - smooth it with a 8x8x8mm^3 gaussian kernel -> the result of this is the template.
    The size of the isometric smoothing gaussian kernel is given by one integer for the
    "{}_smooth.fwhm".format(wf_name) setting.

    You can also avoid calculating the mean image across subjects and setting a specific group template file by
    setting the configuration "{}.template_file".format(wf_name) to the path of the file you want.
    This image will be smoothed and used as a common template for the further pipeline steps.

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    grptemplate_input.in_files: list of traits.File
        The raw NIFTI_GZ PET image files

    Nipype outputs
    --------------
    grptemplate_output.template: existing file
        The common custom PET template file.

    Returns
    -------
    wf: nipype Workflow
    """
    # input
    input = setup_node(
        IdentityInterface(fields=["in_files"]),
        name="grptemplate_input",
    )

    # checking if a template file has been set already
    template_file = get_config_setting("{}.template_file".format(wf_name))

    use_common_template = path.exists(template_file)
    if not use_common_template:
        # merge
        concat = setup_node(Function(
            function=concat_imgs,
            input_names=["in_files"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='merge_time')

        # average
        average = setup_node(Function(
            function=mean_img,
            input_names=["in_file", "out_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                             name='group_average')
        average.inputs.out_file = 'group_average.nii.gz'

    # TODO: check what is the difference between nilearn.image.smooth_img and FSL IsotropicSmooth
    # smooth
    # smooth = setup_node(Function(function=smooth_img,
    #                             input_names=["in_file", "fwhm"],
    #                             output_names=["out_file"],
    #                             imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
    #                     name="{}_smooth".format(wf_name))
    smooth = setup_node(fsl.IsotropicSmooth(fwhm=8),
                        name="{}_smooth".format(wf_name))

    # output
    output = setup_node(
        IdentityInterface(fields=["template"]),
        name="grptemplate_output",
    )

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # if I have to create the group template
    if not use_common_template:
        wf.connect([
            # input
            (input, concat, [("in_files", "in_files")]),

            # merge, average and smooth
            (concat, average, [("out_file", "in_file")]),
            (average, smooth, [("out_file", "in_file")]),

            # output
            (smooth, output, [("out_file", "template")]),
        ])
    else:  # if the template has been specified in the configuration file
        wf.add_nodes([input])

        smooth.inputs.in_file = template_file

        wf.connect([
            # output
            (smooth, output, [("out_file", "template")]),
        ])

    return wf
Beispiel #10
0
def attach_dti_artifact_correction(main_wf, wf_name="dti_artifact_correction"):
    """ Attach the FSL-based diffusion MRI artifact detection and correction
    workflow to the `main_wf`.

    Parameters
    ----------
    main_wf: nipype Workflow

    wf_name: str
        Name of the preprocessing workflow

    params: dict with parameter values
        atlas_file: str
            Path to the anatomical atlas to be transformed to diffusion MRI space.

    Nipype Inputs for `main_wf`
    ---------------------------
    Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.

    input_files.select.diff: input node

    datasink: nipype Node

    Returns
    -------
    main_wf: nipype Workflow
    """
    in_files = get_input_node(main_wf)
    datasink = get_datasink(main_wf)

    # The workflow box
    art_dti_wf = dti_artifact_correction(wf_name=wf_name)

    # dataSink output substitutions
    ## The base name of the 'diff' file for the substitutions
    diff_fbasename = remove_ext(
        os.path.basename(get_input_file_name(in_files, 'diff')))

    regexp_subst = [
        (r"/brain_mask_{diff}_space\.nii$", "/brain_mask.nii"),
        (r"/eddy_corrected\.nii$", "/{diff}_eddycor.nii"),
    ]
    regexp_subst = format_pair_list(regexp_subst, diff=diff_fbasename)

    regexp_subst += extension_duplicates(regexp_subst)
    regexp_subst = concat_to_pair_list(regexp_subst, prefix='/diff')
    datasink.inputs.regexp_substitutions = extend_trait_list(
        datasink.inputs.regexp_substitutions, regexp_subst)

    # input and output diffusion MRI workflow to main workflow connections
    main_wf.connect([
        (in_files, art_dti_wf, [
            ("diff", "dti_art_input.diff"),
            ("bval", "dti_art_input.bval"),
            ("bvec", "dti_art_input.bvec"),
        ]),
        (art_dti_wf, datasink, [
            ("dti_art_output.eddy_corr_file", "diff.@eddy_corr_file"),
            ("dti_art_output.bvec_rotated", "diff.@bvec_rotated"),
            ("dti_art_output.brain_mask_1", "diff.@brain_mask_1"),
            ("dti_art_output.brain_mask_2", "diff.@brain_mask_2"),
            ("dti_art_output.acqp", "diff.@acquisition_pars"),
            ("dti_art_output.index", "diff.@acquisition_idx"),
            ("dti_art_output.avg_b0", "diff.@avg_b0"),
        ]),
    ])

    do_rapidart = get_config_setting("dmri.artifact_detect", True)
    if do_rapidart:
        main_wf.connect([
            (art_dti_wf, datasink, [
                ("dti_art_output.hmc_corr_file",
                 "diff.artifact_stats.@hmc_corr_file"),
                ("dti_art_output.hmc_corr_bvec",
                 "diff.artifact_stats.@hmc_rot_bvec"),
                ("dti_art_output.hmc_corr_xfms",
                 "diff.artifact_stats.@hmc_corr_xfms"),
                ("dti_art_output.art_displacement_files",
                 "diff.artifact_stats.@art_disp_files"),
                ("dti_art_output.art_intensity_files",
                 "diff.artifact_stats.@art_ints_files"),
                ("dti_art_output.art_norm_files",
                 "diff.artifact_stats.@art_norm_files"),
                ("dti_art_output.art_outlier_files",
                 "diff.artifact_stats.@art_outliers"),
                ("dti_art_output.art_plot_files",
                 "diff.artifact_stats.@art_plots"),
                ("dti_art_output.art_statistic_files",
                 "diff.artifact_stats.@art_stats"),
            ]),
        ])

    return main_wf
Beispiel #11
0
def dti_artifact_correction(wf_name="dti_artifact_correction"):
    """ Run the diffusion MRI pre-processing workflow against the diff files in `data_dir`.

    It will resample/regrid the diffusion image to have isometric voxels.
    Corrects for head motion correction and Eddy currents.
    Estimates motion outliers and exports motion reports using nipype.algorithms.RapidArt.

    Nipype Inputs
    -------------
    dti_art_input.diff: traits.File
        path to the diffusion MRI image

    dti_art_input.bval: traits.File
        path to the bvals file

    dti_art_input.bvec: traits.File
        path to the bvecs file


    Nipype Outputs
    --------------
    dti_art_output.eddy_corr_file: traits.File
        Eddy currents corrected DTI image.

    dti_art_output.bvec_rotated: traits.File
        Rotated bvecs file

    dti_art_output.brain_mask_1: traits.File
        Brain mask extracted using BET on the first B0 image.

    dti_art_output.brain_mask_2: traits.File
        Brain mask extracted using BET on the average B0 image,
        after motion correction.

    dti_art_output.acpq: traits.File
        Text file with acquisition parameters calculated for Eddy.

    dti_art_output.index: traits.File
        Text file with acquisition indices calculated for Eddy.

    dti_art_output.avg_b0: traits.File
        The average b=0 image extracted from the motion and eddy
        currents correted diffusion MRI.

    dti_art_output.hmc_corr_file: traits.File

    dti_art_output.hmc_corr_bvec: traits.File

    dti_art_output.hmc_corr_xfms: traits.File

    dti_art_output.art_displacement_files: traits.File

    dti_art_output.art_intensity_files: traits.File

    dti_art_output.art_norm_files: traits.File

    dti_art_output.art_outlier_files: traits.File

    dti_art_output.art_plot_files: traits.File

    dti_art_output.art_statistic_files: traits.File

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["diff", "bval", "bvec"]
    out_fields = [
        "eddy_corr_file",
        "bvec_rotated",
        "brain_mask_1",
        "brain_mask_2",
        "acqp",
        "index",
        "avg_b0",
    ]

    do_rapidart = get_config_setting("dmri.artifact_detect", True)
    if do_rapidart:
        out_fields += [
            "hmc_corr_file",
            "hmc_corr_bvec",
            "hmc_corr_xfms",
            "art_displacement_files",
            "art_intensity_files",
            "art_norm_files",
            "art_outlier_files",
            "art_plot_files",
            "art_statistic_files",
        ]

    # input interface
    dti_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="dti_art_input")

    # resample
    resample = setup_node(Function(
        function=reslice,
        input_names=['in_file', 'new_zooms', 'order', 'out_file'],
        output_names=['out_file']),
                          name='dti_reslice')

    ## extract first b0 for Eddy and HMC brain mask
    list_b0 = pe.Node(Function(
        function=b0_indices,
        input_names=['in_bval'],
        output_names=['out_idx'],
    ),
                      name='b0_indices')

    extract_b0 = pe.Node(ExtractROI(t_size=1), name="extract_first_b0")

    # For Eddy, the mask is only used for selecting voxels for the estimation of the hyperparameters,
    # so isn’t very critical.
    # Note also that it is better with a too conservative (small) mask than a too big.
    bet_dwi0 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_pre')

    pick_first = lambda lst: lst[0]

    # motion artifacts detection, requires linear co-registration for motion estimation.
    if do_rapidart:
        # head motion correction
        hmc = hmc_pipeline()

        art = setup_node(rapidart_dti_artifact_detection(),
                         name="detect_artifacts")

    # Eddy
    eddy = setup_node(Eddy(method='jac'), name="eddy")

    ## acquisition parameters for Eddy
    write_acqp = setup_node(Function(
        function=dti_acquisition_parameters,
        input_names=["in_file"],
        output_names=["out_acqp", "out_index"],
    ),
                            name="write_acqp")

    ## rotate b-vecs
    rot_bvec = setup_node(Function(
        function=eddy_rotate_bvecs,
        input_names=["in_bvec", "eddy_params"],
        output_names=["out_file"],
    ),
                          name="rot_bvec")

    ## extract all b0s and average them after Eddy correction
    avg_b0_post = pe.Node(Function(
        function=b0_average,
        input_names=['in_dwi', 'in_bval'],
        output_names=['out_file'],
    ),
                          name='b0_avg_post')

    bet_dwi1 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_post')

    # nlmeans denoise
    apply_nlmeans = get_config_setting("dmri.apply_nlmeans", True)
    if apply_nlmeans:
        nlmeans = setup_node(Function(
            function=nlmeans_denoise,
            input_names=['in_file', 'mask_file', 'out_file', 'N'],
            output_names=['out_file']),
                             name='nlmeans_denoise')

    # output interface
    dti_output = setup_node(IdentityInterface(fields=out_fields),
                            name="dti_art_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # Connect the nodes
    wf.connect([
        # resample to iso-voxel
        (dti_input, resample, [
            ("diff", "in_file"),
        ]),

        # read from input file the acquisition parameters for eddy
        (dti_input, write_acqp, [("diff", "in_file")]),

        # reference mask for hmc and eddy
        (dti_input, list_b0, [("bval", "in_bval")]),
        (resample, extract_b0, [("out_file", "in_file")]),
        (list_b0, extract_b0, [(("out_idx", pick_first), "t_min")]),
        (extract_b0, bet_dwi0, [("roi_file", "in_file")]),

        # Eddy
        (resample, eddy, [("out_file", "in_file")]),
        (bet_dwi0, eddy, [("mask_file", "in_mask")]),
        (dti_input, eddy, [("bval", "in_bval"), ("bvec", "in_bvec")]),
        (write_acqp, eddy, [("out_acqp", "in_acqp"),
                            ("out_index", "in_index")]),

        # rotate bvecs
        (dti_input, rot_bvec, [("bvec", "in_bvec")]),
        (eddy, rot_bvec, [("out_parameter", "eddy_params")]),

        # final avg b0
        (dti_input, avg_b0_post, [("bval", "in_bval")]),
        (eddy, avg_b0_post, [("out_corrected", "in_dwi")]),
        (avg_b0_post, bet_dwi1, [("out_file", "in_file")]),

        # output
        (write_acqp, dti_output, [("out_acqp", "acqp"),
                                  ("out_index", "index")]),
        (bet_dwi0, dti_output, [("mask_file", "brain_mask_1")]),
        (bet_dwi1, dti_output, [("mask_file", "brain_mask_2")]),
        (rot_bvec, dti_output, [("out_file", "bvec_rotated")]),
        (avg_b0_post, dti_output, [("out_file", "avg_b0")]),
    ])

    if apply_nlmeans:
        wf.connect([
            # non-local means
            (eddy, nlmeans, [("out_corrected", "in_file")]),
            (bet_dwi1, nlmeans, [("mask_file", "mask_file")]),

            # output
            (nlmeans, dti_output, [("out_file", "eddy_corr_file")]),
        ])
    else:
        wf.connect([
            # output
            (eddy, dti_output, [("out_corrected", "eddy_corr_file")]),
        ])

    if do_rapidart:
        wf.connect([
            # head motion correction
            (dti_input, hmc, [
                ("bval", "inputnode.in_bval"),
                ("bvec", "inputnode.in_bvec"),
            ]),
            (resample, hmc, [("out_file", "inputnode.in_file")]),
            (bet_dwi0, hmc, [("mask_file", "inputnode.in_mask")]),
            (list_b0, hmc, [
                (("out_idx", pick_first), "inputnode.ref_num"),
            ]),

            # artifact detection
            (hmc, art, [
                ("outputnode.out_file", "realigned_files"),
                ("outputnode.out_xfms", "realignment_parameters"),
            ]),
            (bet_dwi1, art, [
                ("mask_file", "mask_file"),
            ]),

            # output
            (hmc, dti_output, [
                ("outputnode.out_file", "hmc_corr_file"),
                ("outputnode.out_bvec", "hmc_corr_bvec"),
                ("outputnode.out_xfms", "hmc_corr_xfms"),
            ]),
            (art, dti_output, [
                ("displacement_files", "art_displacement_files"),
                ("intensity_files", "art_intensity_files"),
                ("norm_files", "art_norm_files"),
                ("outlier_files", "art_outlier_files"),
                ("plot_files", "art_plot_files"),
                ("statistic_files", "art_statistic_files"),
            ]),
        ])

    return wf
Beispiel #12
0
def spm_warp_fmri_wf(wf_name="spm_warp_fmri", register_to_grptemplate=False):
    """ Run SPM to warp resting-state fMRI pre-processed data to MNI or a given
    template.

    Tasks:
    - Warping the inputs to MNI or a template, if `do_group_template` is True

    Parameters
    ----------
    wf_name: str

    register_to_grptemplate: bool
        If True will expect the wfmri_input.epi_template input and use it as a group template
        for inter-subject registratio.

    Nipype Inputs
    -------------
    wfmri_input.in_file: traits.File
        The slice time and motion corrected fMRI file.

    wfmri_input.reference_file: traits.File
        The anatomical image in its native space
        for registration reference.

    wfmri_input.anat_fmri: traits.File
        The anatomical image in fMRI space.

    wfmri_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    wfmri_input.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    wfmri_input.avg_epi: traits.File
        The average EPI from the fMRI file.

    wfmri_input.epi_template: traits.File
        Reference EPI template file for inter subject registration.
        If `do_group_template` is True you must specify this input.

    wfmri_input.brain_mask: traits.File
        Brain mask in fMRI space.

    wfmri_input.atlas_anat: traits.File
        Atlas in subject anatomical space.

    Nipype Outputs
    --------------
    wfmri_output.warped_fmri: traits.File
        The slice time, motion, and nuisance corrected fMRI
        file registered to the template.

    wfmri_output.wtime_filtered: traits.File
        The bandpass time filtered fMRI file
        registered to the template.

    wfmri_output.smooth: traits.File
        The smooth bandpass time filtered fMRI file
        registered to the template.

    wfmri_output.wavg_epi: traits.File
        The average EPI from the fMRI file
        registered to the template.

    wfmri_output.warp_field: traits.File
        The fMRI to template warp field.

    wfmri_output.coreg_avg_epi: traits.File
        The average EPI image in anatomical space.

        Only if registration.fmri2mni is false.

    wfmri_output.coreg_others: traits.File
        Other mid-preprocessing fmri images registered to
        anatomical space:

        - wfmri_input.in_file,

        - wfmri_input.brain_mask,

        - wfmri_input.time_filtered.

        Only if registration.fmri2mni is false

    wfmri_output.wbrain_mask: traits.File
        Brain mask in fMRI space warped to MNI.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat_fmri",
        "anat_to_mni_warp",
        "brain_mask",
        "reference_file",
        "time_filtered",
        "avg_epi",
    ]

    out_fields = [
        "warped_fmri", "wtime_filtered", "smooth", "wavg_epi", "wbrain_mask",
        "warp_field", "coreg_avg_epi", "coreg_others"
    ]

    if register_to_grptemplate:
        in_fields += ['epi_template']

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_fmri"]

    # input identities
    wfmri_input = setup_node(IdentityInterface(fields=in_fields,
                                               mandatory_inputs=True),
                             name="wfmri_input")

    # in file unzipper
    in_gunzip = pe.Node(Gunzip(), name="in_gunzip")

    # merge list for normalization input
    merge_list = pe.Node(Merge(2), name='merge_for_warp')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # the template bounding box
    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")

    # smooth the final result
    smooth = setup_node(fsl.IsotropicSmooth(fwhm=8, output_type='NIFTI'),
                        name="smooth_fmri")

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields),
                             name="wfmri_output")

    # check how to perform the registration, to decide how to build the pipeline
    fmri2mni = get_config_setting('registration.fmri2mni', False)
    # register to group template
    if register_to_grptemplate:
        gunzip_template = pe.Node(
            Gunzip(),
            name="gunzip_template",
        )
        warp = setup_node(spm.Normalize(jobtype="estwrite",
                                        out_prefix="wgrptmpl_"),
                          name="fmri_grptemplate_warp")
        warp_source_arg = "source"
        warp_outsource_arg = "normalized_source"
        warp_field_arg = "normalization_parameters"

    elif fmri2mni:
        # register to standard template
        warp = setup_node(spm_normalize(), name="fmri_warp")
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()
        warp_source_arg = "image_to_align"
        warp_outsource_arg = "normalized_image"
        warp_field_arg = "deformation_field"

    else:  # fmri2mni is False
        coreg = setup_node(spm_coregister(cost_function="mi"),
                           name="coreg_fmri")
        warp = setup_node(spm_apply_deformations(), name="fmri_warp")
        coreg_files = pe.Node(Merge(3), name='merge_for_coreg')
        warp_files = pe.Node(Merge(2), name='merge_for_warp')
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # make the connections
    if register_to_grptemplate:
        wf.connect([
            # get template bounding box to apply to results
            (wfmri_input, tpm_bbox, [("epi_template", "in_file")]),

            # unzip and forward the template file
            (wfmri_input, gunzip_template, [("epi_template", "in_file")]),
            (gunzip_template, warp, [("out_file", "template")]),

            # get template bounding box to apply to results
            (wfmri_input, tpm_bbox, [("epi_template", "in_file")]),
        ])

    if fmri2mni or register_to_grptemplate:
        # prepare the inputs
        wf.connect([
            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),

            # warp source file
            (in_gunzip, warp, [("out_file", warp_source_arg)]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # merge the other input files into a list
            (wfmri_input, merge_list, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
            ]),

            # gunzip them for SPM
            (merge_list, gunzipper, [("out", "in_file")]),

            # apply to files
            (gunzipper, warp, [("out_file", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                (warp_field_arg, "warp_field"),
                (warp_outsource_arg, "wavg_epi"),
            ]),
        ])

    else:  # FMRI to ANAT
        wf.connect([
            (wfmri_input, coreg, [("reference_file", "target")]),

            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),
            (in_gunzip, coreg, [("out_file", "source")]),

            # merge the other input files into a list
            (wfmri_input, coreg_files, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
                ("brain_mask", "in3"),
            ]),

            # gunzip them for SPM
            (coreg_files, gunzipper, [("out", "in_file")]),

            # coregister fmri to anat
            (gunzipper, coreg, [("out_file", "apply_to_files")]),

            # anat to mni warp field
            (wfmri_input, warp, [("anat_to_mni_warp", "deformation_file")]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # apply to files
            (coreg, warp_files, [("coregistered_source", "in1")]),
            (coreg, warp_files, [("coregistered_files", "in2")]),
            (warp_files, warp, [("out", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                ("normalized_files", "warped_files"),
            ]),
            (warp, rest_output, [
                (("normalized_files", selectindex, 0), "wavg_epi"),
            ]),
            (coreg, rest_output, [("coregistered_source", "coreg_avg_epi")]),
            (coreg, rest_output, [("coregistered_files", "coreg_others")]),
        ])

    # atlas file in fMRI space
    if fmri2mni:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas2fmri")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (wfmri_input, coreg_atlas, [
                ("reference_file", "source"),
                ("atlas_anat", "apply_to_files"),
            ]),
            (in_gunzip, coreg_atlas, [("out_file", "target")]),
            (coreg_atlas, rest_output, [("coregistered_files", "atlas_fmri")]),
        ])

    # smooth and sink
    wf.connect([
        # smooth the final bandpassed image
        (warp, smooth, [(("normalized_files", selectindex, 1), "in_file")]),

        # output
        (smooth, rest_output, [("out_file", "smooth")]),
        (warp, rest_output, [
            (("normalized_files", selectindex, 0), "warped_fmri"),
            (("normalized_files", selectindex, 1), "wtime_filtered"),
        ]),
    ])

    return wf