Пример #1
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        if spm_standalone_is_available():
            use_spm_standalone()

        # Unzipping
        # =========
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node',
                                 iterfield=['in_file'])

        # DARTEL template
        # ===============
        dartel_template = npe.Node(spm.DARTEL(), name='dartel_template')

        # Connection
        # ==========
        self.connect([(self.input_node, unzip_node, [
            ('dartel_input_images', 'in_file')
        ]), (unzip_node, dartel_template, [('out_file', 'image_files')]),
                      (dartel_template, self.output_node,
                       [('dartel_flow_fields', 'dartel_flow_fields'),
                        ('final_template_file', 'final_template_file'),
                        ('template_files', 'template_files')])])
Пример #2
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        if spm_standalone_is_available():
            use_spm_standalone()

        # Unzipping
        # =========
        unzip_node = npe.MapNode(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_node",
            iterfield=["in_file"],
        )

        # DARTEL template
        # ===============
        dartel_template = npe.Node(spm.DARTEL(), name="dartel_template")

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, unzip_node, [("dartel_input_images", "in_file")
                                           ]),
            (unzip_node, dartel_template, [("out_file", "image_files")]),
            (dartel_template, self.output_node,
             [("dartel_flow_fields", "dartel_flow_fields"),
              ("final_template_file", "final_template_file"),
              ("template_files", "template_files")]),
        ])
Пример #3
0
def run_m_script(m_file):
    """
        Runs a matlab m file for SPM, determining automatically if it must be launched with SPM or SPM Standalone
        If launch with spm standalone, the line 'spm_jobman('run', matlabbatch)' must be removed because unnecessary

    Args:
        m_file: (str) path to Matlab m file

    Returns:
        output_mat_file: (str) path to the SPM.mat file needed in SPM analysis
    """
    from os.path import isfile, dirname, basename, abspath, join
    from os import system
    from clinica.utils.spm import use_spm_standalone
    import clinica.pipelines.statistics_volume.statistics_volume_utils as utls
    from nipype.interfaces.matlab import MatlabCommand, get_matlab_command
    import platform

    assert isinstance(m_file, str), '[Error] Argument must be a string'
    if not isfile(m_file):
        raise FileNotFoundError('[Error] File ' + m_file + 'does not exist')
    assert m_file[-2:] == '.m', '[Error] ' + m_file + ' is not a Matlab file (extension must be .m)'

    # Generate command line to run
    if use_spm_standalone():
        utls.delete_last_line(m_file)
        # SPM standalone must be run directly from its root folder
        if platform.system().lower().startswith('darwin'):
            # Mac OS
            cmdline = 'cd $SPMSTANDALONE_HOME && ./run_spm12.sh $MCR_HOME batch ' + m_file
        elif platform.system().lower().startswith('linux'):
            # Linux OS
            cmdline = '$SPMSTANDALONE_HOME/run_spm12.sh $MCR_HOME batch ' + m_file
        else:
            raise SystemError('Clinica only support Mac OS and Linux')
        system(cmdline)
    else:
        MatlabCommand.set_default_matlab_cmd(get_matlab_command())
        matlab = MatlabCommand()
        if platform.system().lower().startswith('linux'):
            matlab.inputs.args = '-nosoftwareopengl'
        matlab.inputs.paths = dirname(m_file)
        matlab.inputs.script = basename(m_file)[:-2]
        matlab.inputs.single_comp_thread = False
        matlab.inputs.logfile = abspath('./matlab_output.log')
        matlab.run()
    output_mat_file = abspath(join(dirname(m_file), '..', '2_sample_t_test', 'SPM.mat'))
    if not isfile(output_mat_file):
        raise RuntimeError('Output matrix ' + output_mat_file + ' was not produced')
    return output_mat_file
Пример #4
0
    def build_core_nodes(self):
        """Build and connect an output node to the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.spm.utils as spmutils
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from nipype.interfaces.petpvc import PETPVC

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        from .pet_volume_utils import (
            apply_binary_mask,
            atlas_statistics,
            create_binary_mask,
            create_pvc_mask,
            get_from_list,
            init_input_node,
            normalize_to_reference,
            pet_pvc_name,
        )

        if spm_standalone_is_available():
            use_spm_standalone()

        # Initialize pipeline
        # ===================
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=["pet_nii"],
                output_names=["pet_nii"],
                function=init_input_node,
            ),
            name="init_pipeline",
        )

        # Unzipping
        # =========
        unzip_pet_image = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_pet_image",
        )

        unzip_t1_image_native = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_t1_image_native",
        )

        unzip_flow_fields = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_flow_fields",
        )

        unzip_dartel_template = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_dartel_template",
        )

        unzip_reference_mask = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_reference_mask",
        )

        unzip_mask_tissues = npe.MapNode(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_mask_tissues",
            iterfield=["in_file"],
        )

        # Coregister PET into T1 native space
        # ===================================
        coreg_pet_t1 = npe.Node(spm.Coregister(), name="coreg_pet_t1")

        # Spatially normalize PET into MNI
        # ================================
        dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name="dartel_mni_reg")
        dartel_mni_reg.inputs.modulate = False
        dartel_mni_reg.inputs.fwhm = 0

        # Reslice reference region mask into PET
        # ======================================
        reslice = npe.Node(spmutils.Reslice(), name="reslice")

        # Normalize PET values according to reference region
        # ==================================================
        norm_to_ref = npe.Node(
            nutil.Function(
                input_names=["pet_image", "region_mask"],
                output_names=["suvr_pet_path"],
                function=normalize_to_reference,
            ),
            name="norm_to_ref",
        )

        # Create binary mask from segmented tissues
        # =========================================
        binary_mask = npe.Node(
            nutil.Function(
                input_names=["tissues", "threshold"],
                output_names=["out_mask"],
                function=create_binary_mask,
            ),
            name="binary_mask",
        )
        binary_mask.inputs.threshold = self.parameters["mask_threshold"]

        # Mask PET image
        # ==============
        apply_mask = npe.Node(
            nutil.Function(
                input_names=["image", "binary_mask"],
                output_names=["masked_image_path"],
                function=apply_binary_mask,
            ),
            name="apply_mask",
        )

        # Smoothing
        # =========
        if self.parameters["smooth"] is not None and len(
                self.parameters["smooth"]) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name="smoothing_node",
                                         iterfield=["fwhm", "out_prefix"])
            smoothing_node.inputs.fwhm = [[x, x, x]
                                          for x in self.parameters["smooth"]]
            smoothing_node.inputs.out_prefix = [
                "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"]
            ]
            # fmt: off
            self.connect([
                (apply_mask, smoothing_node, [("masked_image_path", "in_files")
                                              ]),
                (smoothing_node, self.output_node,
                 [("smoothed_files", "pet_suvr_masked_smoothed")]),
            ])
            # fmt: on
        else:
            self.output_node.inputs.pet_suvr_masked_smoothed = [[]]

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(
            nutil.Function(
                input_names=["in_image", "in_atlas_list"],
                output_names=["atlas_statistics"],
                function=atlas_statistics,
            ),
            name="atlas_stats_node",
            iterfield=["in_image"],
        )
        atlas_stats_node.inputs.in_atlas_list = self.parameters["atlases"]

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, init_node, [("pet_image", "pet_nii")]),
            (init_node, unzip_pet_image, [("pet_nii", "in_file")]),
            (self.input_node, unzip_t1_image_native, [("t1_image_native",
                                                       "in_file")]),
            (self.input_node, unzip_flow_fields, [("flow_fields", "in_file")]),
            (self.input_node, unzip_dartel_template, [("dartel_template",
                                                       "in_file")]),
            (self.input_node, unzip_reference_mask, [("reference_mask",
                                                      "in_file")]),
            (self.input_node, unzip_mask_tissues, [("mask_tissues", "in_file")
                                                   ]),
            (unzip_pet_image, coreg_pet_t1, [("out_file", "source")]),
            (unzip_t1_image_native, coreg_pet_t1, [("out_file", "target")]),
            (unzip_flow_fields, dartel_mni_reg, [("out_file",
                                                  "flowfield_files")]),
            (unzip_dartel_template, dartel_mni_reg, [("out_file",
                                                      "template_file")]),
            (unzip_reference_mask, reslice, [("out_file", "in_file")]),
            (unzip_mask_tissues, binary_mask, [("out_file", "tissues")]),
            (coreg_pet_t1, dartel_mni_reg, [("coregistered_source",
                                             "apply_to_files")]),
            (dartel_mni_reg, reslice, [("normalized_files", "space_defining")
                                       ]),
            (dartel_mni_reg, norm_to_ref, [("normalized_files", "pet_image")]),
            (reslice, norm_to_ref, [("out_file", "region_mask")]),
            (norm_to_ref, apply_mask, [("suvr_pet_path", "image")]),
            (binary_mask, apply_mask, [("out_mask", "binary_mask")]),
            (norm_to_ref, atlas_stats_node, [("suvr_pet_path", "in_image")]),
            (coreg_pet_t1, self.output_node, [("coregistered_source",
                                               "pet_t1_native")]),
            (dartel_mni_reg, self.output_node, [("normalized_files", "pet_mni")
                                                ]),
            (norm_to_ref, self.output_node, [("suvr_pet_path", "pet_suvr")]),
            (binary_mask, self.output_node, [("out_mask", "binary_mask")]),
            (apply_mask, self.output_node, [("masked_image_path",
                                             "pet_suvr_masked")]),
            (atlas_stats_node, self.output_node, [("atlas_statistics",
                                                   "atlas_statistics")]),
        ])
        # fmt: on

        # PVC
        # ==========
        if self.parameters["apply_pvc"]:
            # Unzipping
            # =========
            unzip_pvc_mask_tissues = npe.MapNode(
                nutil.Function(
                    input_names=["in_file"],
                    output_names=["out_file"],
                    function=unzip_nii,
                ),
                name="unzip_pvc_mask_tissues",
                iterfield=["in_file"],
            )

            # Creating Mask to use in PVC
            # ===========================
            pvc_mask = npe.Node(
                nutil.Function(
                    input_names=["tissues"],
                    output_names=["out_mask"],
                    function=create_pvc_mask,
                ),
                name="pvc_mask",
            )
            # PET PVC
            # =======
            petpvc = npe.Node(PETPVC(), name="pvc")
            petpvc.inputs.pvc = "RBV"
            petpvc.inputs.out_file = "pvc.nii"

            # Spatially normalize PET into MNI
            # ================================
            dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(),
                                          name="dartel_mni_reg_pvc")
            dartel_mni_reg_pvc.inputs.modulate = False
            dartel_mni_reg_pvc.inputs.fwhm = 0

            # Reslice reference region mask into PET
            # ======================================
            reslice_pvc = npe.Node(spmutils.Reslice(), name="reslice_pvc")

            # Normalize PET values according to reference region
            # ==================================================
            norm_to_ref_pvc = npe.Node(
                nutil.Function(
                    input_names=["pet_image", "region_mask"],
                    output_names=["suvr_pet_path"],
                    function=normalize_to_reference,
                ),
                name="norm_to_ref_pvc",
            )

            # Mask PET image
            # ==============
            apply_mask_pvc = npe.Node(
                nutil.Function(
                    input_names=["image", "binary_mask"],
                    output_names=["masked_image_path"],
                    function=apply_binary_mask,
                ),
                name="apply_mask_pvc",
            )
            # Smoothing
            # =========
            if (self.parameters["smooth"] is not None
                    and len(self.parameters["smooth"]) > 0):
                smoothing_pvc = npe.MapNode(spm.Smooth(),
                                            name="smoothing_pvc",
                                            iterfield=["fwhm", "out_prefix"])
                smoothing_pvc.inputs.fwhm = [[x, x, x]
                                             for x in self.parameters["smooth"]
                                             ]
                smoothing_pvc.inputs.out_prefix = [
                    "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"]
                ]
                # fmt: off
                self.connect([
                    (apply_mask_pvc, smoothing_pvc, [("masked_image_path",
                                                      "in_files")]),
                    (smoothing_pvc, self.output_node,
                     [("smoothed_files", "pet_pvc_suvr_masked_smoothed")]),
                ])
                # fmt: on
            else:
                self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
            # Atlas Statistics
            # ================
            atlas_stats_pvc = npe.MapNode(
                nutil.Function(
                    input_names=["in_image", "in_atlas_list"],
                    output_names=["atlas_statistics"],
                    function=atlas_statistics,
                ),
                name="atlas_stats_pvc",
                iterfield=["in_image"],
            )
            atlas_stats_pvc.inputs.in_atlas_list = self.parameters["atlases"]

            # Connection
            # ==========
            # fmt: off
            self.connect([
                (self.input_node, unzip_pvc_mask_tissues, [("pvc_mask_tissues",
                                                            "in_file")]),
                (unzip_pvc_mask_tissues, pvc_mask, [("out_file", "tissues")]),
                (unzip_flow_fields, dartel_mni_reg_pvc, [("out_file",
                                                          "flowfield_files")]),
                (unzip_dartel_template, dartel_mni_reg_pvc,
                 [("out_file", "template_file")]),
                (unzip_reference_mask, reslice_pvc, [("out_file", "in_file")]),
                (coreg_pet_t1, petpvc, [("coregistered_source", "in_file"),
                                        (("coregistered_source", pet_pvc_name,
                                          "RBV"), "out_file")]),
                (pvc_mask, petpvc, [("out_mask", "mask_file")]),
                (self.input_node, petpvc,
                 [(("psf", get_from_list, 0), "fwhm_x"),
                  (("psf", get_from_list, 1), "fwhm_y"),
                  (("psf", get_from_list, 2), "fwhm_z")]),
                (petpvc, dartel_mni_reg_pvc, [("out_file", "apply_to_files")]),
                (dartel_mni_reg_pvc, reslice_pvc, [("normalized_files",
                                                    "space_defining")]),
                (dartel_mni_reg_pvc, norm_to_ref_pvc, [("normalized_files",
                                                        "pet_image")]),
                (reslice_pvc, norm_to_ref_pvc, [("out_file", "region_mask")]),
                (norm_to_ref_pvc, apply_mask_pvc, [("suvr_pet_path", "image")
                                                   ]),
                (binary_mask, apply_mask_pvc, [("out_mask", "binary_mask")]),
                (norm_to_ref_pvc, atlas_stats_pvc, [("suvr_pet_path",
                                                     "in_image")]),
                (petpvc, self.output_node, [("out_file", "pet_pvc")]),
                (dartel_mni_reg_pvc, self.output_node, [("normalized_files",
                                                         "pet_pvc_mni")]),
                (norm_to_ref_pvc, self.output_node, [("suvr_pet_path",
                                                      "pet_pvc_suvr")]),
                (apply_mask_pvc, self.output_node, [("masked_image_path",
                                                     "pet_pvc_suvr_masked")]),
                (atlas_stats_pvc, self.output_node,
                 [("atlas_statistics", "pvc_atlas_statistics")]),
            ])
            # fmt: on
        else:
            self.output_node.inputs.pet_pvc = [[]]
            self.output_node.inputs.pet_pvc_mni = [[]]
            self.output_node.inputs.pet_pvc_suvr = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked = [[]]
            self.output_node.inputs.pvc_atlas_statistics = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
Пример #5
0
    def build_core_nodes(self):
        """Build and connect an output node to the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.spm.utils as spmutils
        from nipype.interfaces.petpvc import PETPVC
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone
        import clinica.pipelines.pet_volume.pet_volume_utils as utils

        if spm_standalone_is_available():
            use_spm_standalone()

        # Initialize pipeline
        # ===================
        init_node = npe.Node(interface=nutil.Function(
            input_names=['pet_nii'],
            output_names=['pet_nii'],
            function=utils.init_input_node),
                             name='init_pipeline')

        # Unzipping
        # =========
        unzip_pet_image = npe.Node(nutil.Function(input_names=['in_file'],
                                                  output_names=['out_file'],
                                                  function=unzip_nii),
                                   name='unzip_pet_image')

        unzip_t1_image_native = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_t1_image_native')

        unzip_flow_fields = npe.Node(nutil.Function(input_names=['in_file'],
                                                    output_names=['out_file'],
                                                    function=unzip_nii),
                                     name='unzip_flow_fields')

        unzip_dartel_template = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_dartel_template')

        unzip_reference_mask = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                        name='unzip_reference_mask')

        unzip_mask_tissues = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_mask_tissues',
                                         iterfield=['in_file'])

        # Coregister PET into T1 native space
        # ===================================
        coreg_pet_t1 = npe.Node(spm.Coregister(), name='coreg_pet_t1')

        # Spatially normalize PET into MNI
        # ================================
        dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name='dartel_mni_reg')
        dartel_mni_reg.inputs.modulate = False
        dartel_mni_reg.inputs.fwhm = 0

        # Reslice reference region mask into PET
        # ======================================
        reslice = npe.Node(spmutils.Reslice(), name='reslice')

        # Normalize PET values according to reference region
        # ==================================================
        norm_to_ref = npe.Node(nutil.Function(
            input_names=['pet_image', 'region_mask'],
            output_names=['suvr_pet_path'],
            function=utils.normalize_to_reference),
                               name='norm_to_ref')

        # Create binary mask from segmented tissues
        # =========================================
        binary_mask = npe.Node(nutil.Function(
            input_names=['tissues', 'threshold'],
            output_names=['out_mask'],
            function=utils.create_binary_mask),
                               name='binary_mask')
        binary_mask.inputs.threshold = self.parameters['mask_threshold']

        # Mask PET image
        # ==============
        apply_mask = npe.Node(nutil.Function(
            input_names=['image', 'binary_mask'],
            output_names=['masked_image_path'],
            function=utils.apply_binary_mask),
                              name='apply_mask')

        # Smoothing
        # =========
        if self.parameters['smooth'] is not None and len(
                self.parameters['smooth']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['fwhm', 'out_prefix'])
            smoothing_node.inputs.fwhm = [[x, x, x]
                                          for x in self.parameters['smooth']]
            smoothing_node.inputs.out_prefix = [
                'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']
            ]
            self.connect([(apply_mask, smoothing_node, [('masked_image_path',
                                                         'in_files')]),
                          (smoothing_node, self.output_node,
                           [('smoothed_files', 'pet_suvr_masked_smoothed')])])
        else:
            self.output_node.inputs.pet_suvr_masked_smoothed = [[]]

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(
            input_names=['in_image', 'in_atlas_list'],
            output_names=['atlas_statistics'],
            function=utils.atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlases']

        # Connection
        # ==========
        self.connect([
            (self.input_node, init_node, [('pet_image', 'pet_nii')]),
            (init_node, unzip_pet_image, [('pet_nii', 'in_file')]),
            (self.input_node, unzip_t1_image_native, [('t1_image_native',
                                                       'in_file')]),
            (self.input_node, unzip_flow_fields, [('flow_fields', 'in_file')]),
            (self.input_node, unzip_dartel_template, [('dartel_template',
                                                       'in_file')]),
            (self.input_node, unzip_reference_mask, [('reference_mask',
                                                      'in_file')]),
            (self.input_node, unzip_mask_tissues, [('mask_tissues', 'in_file')
                                                   ]),
            (unzip_pet_image, coreg_pet_t1, [('out_file', 'source')]),
            (unzip_t1_image_native, coreg_pet_t1, [('out_file', 'target')]),
            (unzip_flow_fields, dartel_mni_reg, [('out_file',
                                                  'flowfield_files')]),
            (unzip_dartel_template, dartel_mni_reg, [('out_file',
                                                      'template_file')]),
            (unzip_reference_mask, reslice, [('out_file', 'in_file')]),
            (unzip_mask_tissues, binary_mask, [('out_file', 'tissues')]),
            (coreg_pet_t1, dartel_mni_reg, [('coregistered_source',
                                             'apply_to_files')]),
            (dartel_mni_reg, reslice, [('normalized_files', 'space_defining')
                                       ]),
            (dartel_mni_reg, norm_to_ref, [('normalized_files', 'pet_image')]),
            (reslice, norm_to_ref, [('out_file', 'region_mask')]),
            (norm_to_ref, apply_mask, [('suvr_pet_path', 'image')]),
            (binary_mask, apply_mask, [('out_mask', 'binary_mask')]),
            (norm_to_ref, atlas_stats_node, [('suvr_pet_path', 'in_image')]),
            (coreg_pet_t1, self.output_node, [('coregistered_source',
                                               'pet_t1_native')]),
            (dartel_mni_reg, self.output_node, [('normalized_files', 'pet_mni')
                                                ]),
            (norm_to_ref, self.output_node, [('suvr_pet_path', 'pet_suvr')]),
            (binary_mask, self.output_node, [('out_mask', 'binary_mask')]),
            (apply_mask, self.output_node, [('masked_image_path',
                                             'pet_suvr_masked')]),
            (atlas_stats_node, self.output_node, [('atlas_statistics',
                                                   'atlas_statistics')])
        ])

        # PVC
        # ==========
        if self.parameters['apply_pvc']:
            # Unzipping
            # =========
            unzip_pvc_mask_tissues = npe.MapNode(nutil.Function(
                input_names=['in_file'],
                output_names=['out_file'],
                function=unzip_nii),
                                                 name='unzip_pvc_mask_tissues',
                                                 iterfield=['in_file'])

            # Creating Mask to use in PVC
            # ===========================
            pvc_mask = npe.Node(nutil.Function(input_names=['tissues'],
                                               output_names=['out_mask'],
                                               function=utils.create_pvc_mask),
                                name='pvc_mask')
            # PET PVC
            # =======
            petpvc = npe.Node(PETPVC(), name='pvc')
            petpvc.inputs.pvc = 'RBV'
            petpvc.inputs.out_file = 'pvc.nii'

            # Spatially normalize PET into MNI
            # ================================
            dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(),
                                          name='dartel_mni_reg_pvc')
            dartel_mni_reg_pvc.inputs.modulate = False
            dartel_mni_reg_pvc.inputs.fwhm = 0

            # Reslice reference region mask into PET
            # ======================================
            reslice_pvc = npe.Node(spmutils.Reslice(), name='reslice_pvc')

            # Normalize PET values according to reference region
            # ==================================================
            norm_to_ref_pvc = npe.Node(nutil.Function(
                input_names=['pet_image', 'region_mask'],
                output_names=['suvr_pet_path'],
                function=utils.normalize_to_reference),
                                       name='norm_to_ref_pvc')

            # Mask PET image
            # ==============
            apply_mask_pvc = npe.Node(nutil.Function(
                input_names=['image', 'binary_mask'],
                output_names=['masked_image_path'],
                function=utils.apply_binary_mask),
                                      name='apply_mask_pvc')
            # Smoothing
            # =========
            if self.parameters['smooth'] is not None and len(
                    self.parameters['smooth']) > 0:
                smoothing_pvc = npe.MapNode(spm.Smooth(),
                                            name='smoothing_pvc',
                                            iterfield=['fwhm', 'out_prefix'])
                smoothing_pvc.inputs.fwhm = [[x, x, x]
                                             for x in self.parameters['smooth']
                                             ]
                smoothing_pvc.inputs.out_prefix = [
                    'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']
                ]
                self.connect([(apply_mask_pvc, smoothing_pvc,
                               [('masked_image_path', 'in_files')]),
                              (smoothing_pvc, self.output_node,
                               [('smoothed_files',
                                 'pet_pvc_suvr_masked_smoothed')])])
            else:
                self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
            # Atlas Statistics
            # ================
            atlas_stats_pvc = npe.MapNode(nutil.Function(
                input_names=['in_image', 'in_atlas_list'],
                output_names=['atlas_statistics'],
                function=utils.atlas_statistics),
                                          name='atlas_stats_pvc',
                                          iterfield=['in_image'])
            atlas_stats_pvc.inputs.in_atlas_list = self.parameters['atlases']

            # Connection
            # ==========
            self.connect([
                (self.input_node, unzip_pvc_mask_tissues, [('pvc_mask_tissues',
                                                            'in_file')]),
                (unzip_pvc_mask_tissues, pvc_mask, [('out_file', 'tissues')]),
                (unzip_flow_fields, dartel_mni_reg_pvc, [('out_file',
                                                          'flowfield_files')]),
                (unzip_dartel_template, dartel_mni_reg_pvc,
                 [('out_file', 'template_file')]),
                (unzip_reference_mask, reslice_pvc, [('out_file', 'in_file')]),
                (coreg_pet_t1, petpvc, [('coregistered_source', 'in_file'),
                                        (('coregistered_source',
                                          utils.pet_pvc_name, 'RBV'),
                                         'out_file')]),
                (pvc_mask, petpvc, [('out_mask', 'mask_file')]),
                (self.input_node, petpvc,
                 [(('psf', utils.get_from_list, 0), 'fwhm_x'),
                  (('psf', utils.get_from_list, 1), 'fwhm_y'),
                  (('psf', utils.get_from_list, 2), 'fwhm_z')]),
                (petpvc, dartel_mni_reg_pvc, [('out_file', 'apply_to_files')]),
                (dartel_mni_reg_pvc, reslice_pvc, [('normalized_files',
                                                    'space_defining')]),
                (dartel_mni_reg_pvc, norm_to_ref_pvc, [('normalized_files',
                                                        'pet_image')]),
                (reslice_pvc, norm_to_ref_pvc, [('out_file', 'region_mask')]),
                (norm_to_ref_pvc, apply_mask_pvc, [('suvr_pet_path', 'image')
                                                   ]),
                (binary_mask, apply_mask_pvc, [('out_mask', 'binary_mask')]),
                (norm_to_ref_pvc, atlas_stats_pvc, [('suvr_pet_path',
                                                     'in_image')]),
                (petpvc, self.output_node, [('out_file', 'pet_pvc')]),
                (dartel_mni_reg_pvc, self.output_node, [('normalized_files',
                                                         'pet_pvc_mni')]),
                (norm_to_ref_pvc, self.output_node, [('suvr_pet_path',
                                                      'pet_pvc_suvr')]),
                (apply_mask_pvc, self.output_node, [('masked_image_path',
                                                     'pet_pvc_suvr_masked')]),
                (atlas_stats_pvc, self.output_node, [('atlas_statistics',
                                                      'pvc_atlas_statistics')])
            ])
        else:
            self.output_node.inputs.pet_pvc = [[]]
            self.output_node.inputs.pet_pvc_mni = [[]]
            self.output_node.inputs.pet_pvc_suvr = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked = [[]]
            self.output_node.inputs.pvc_atlas_statistics = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
Пример #6
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.

        The function get_wf constructs a pipeline for one subject (in pet_surface_utils.py) and runs it.
        We use iterables to give to the node all the files and information needed.
        """
        # TODO(@arnaud.marcoux): Convert it to a Node with iterables + MapNodes.
        #   I'm experimenting something to avoid the "MapNode of MapNode" case
        #   with iterables. I'll try to apply it on the tractography pipeline.
        #   Check it out to get inspiration from it when it's ready.

        import os

        import nipype.interfaces.utility as niu
        import nipype.pipeline.engine as npe

        import clinica.pipelines.pet_surface.pet_surface_utils as utils
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        full_pipe = npe.MapNode(
            niu.Function(
                input_names=[
                    "subject_id",
                    "session_id",
                    "caps_dir",
                    "pvc_psf_tsv",
                    "pet",
                    "orig_nu",
                    "white_surface_left",
                    "white_surface_right",
                    "working_directory_subjects",
                    "acq_label",
                    "csv_segmentation",
                    "suvr_reference_region",
                    "matscript_folder_inverse_deformation",
                    "desikan_left",
                    "desikan_right",
                    "destrieux_left",
                    "destrieux_right",
                    "spm_standalone_is_available",
                    "is_longitudinal",
                ],
                output_names=[],
                function=utils.get_wf,
            ),
            name="full_pipeline_mapnode",
            iterfield=[
                "subject_id",
                "session_id",
                "pet",
                "orig_nu",
                "white_surface_left",
                "white_surface_right",
                "desikan_left",
                "desikan_right",
                "destrieux_left",
                "destrieux_right",
            ],
        )

        full_pipe.inputs.subject_id = self.subjects
        full_pipe.inputs.session_id = self.sessions
        full_pipe.inputs.caps_dir = self.caps_directory
        full_pipe.inputs.pvc_psf_tsv = self.parameters["pvc_psf_tsv"]
        full_pipe.inputs.working_directory_subjects = self.base_dir
        full_pipe.inputs.acq_label = self.parameters["acq_label"]
        full_pipe.inputs.suvr_reference_region = self.parameters[
            "suvr_reference_region"
        ]
        full_pipe.inputs.csv_segmentation = os.path.abspath(
            os.path.join(
                os.path.dirname(os.path.realpath(__file__)),
                "..",
                "..",
                "resources",
                "label_conversion_gtmsegmentation.csv",
            )
        )
        full_pipe.inputs.matscript_folder_inverse_deformation = os.path.abspath(
            os.path.dirname(os.path.realpath(__file__))
        )
        full_pipe.inputs.is_longitudinal = self.parameters["longitudinal"]

        # This section of code determines whether to use SPM standalone or not
        if spm_standalone_is_available():
            use_spm_standalone()
            full_pipe.inputs.spm_standalone_is_available = True
        else:
            full_pipe.inputs.spm_standalone_is_available = False

        # Connection
        # ==========
        # fmt: off
        self.connect(
            [
                (self.input_node, full_pipe, [("pet", "pet"),
                                              ("white_surface_left", "white_surface_left"),
                                              ("white_surface_right", "white_surface_right"),
                                              ("orig_nu", "orig_nu"),
                                              ("destrieux_left", "destrieux_left"),
                                              ("destrieux_right", "destrieux_right"),
                                              ("desikan_left", "desikan_left"),
                                              ("desikan_right", "desikan_right")])
            ]
        )
Пример #7
0
# coding: utf8

import os

from nipype.interfaces.base import File, TraitedSpec, traits
from nipype.interfaces.spm.base import SPMCommand, SPMCommandInputSpec, scans_for_fnames
from nipype.utils.filemanip import split_filename

from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

if spm_standalone_is_available():
    use_spm_standalone()


class DARTELExistingTemplateInputSpec(SPMCommandInputSpec):
    image_files = traits.List(
        traits.List(File(exists=True)),
        desc="A list of files to be segmented",
        field="warp1.images",
        copyfile=False,
        mandatory=True,
    )
    regularization_form = traits.Enum(
        "Linear",
        "Membrane",
        "Bending",
        field="warp1.settings.rform",
        desc="Form of regularization energy term",
    )
    iteration_parameters = traits.List(
        traits.Tuple(
Пример #8
0
    def build_core_nodes(self):
        """The function get_wf constructs a pipeline for one subject (in pet_surface_utils.py) and runs it.
        We use iterables to give to the node all the files and information needed.
        """
        # TODO(@arnaud.marcoux): Convert it to a Node with iterables + MapNodes.
        #   I'm experimenting something to avoid the "MapNode of MapNode" case
        #   with iterables. I'll try to apply it on the tractography pipeline.
        #   Check it out to get inspiration from it when it's ready.

        import os
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as niu
        import clinica.pipelines.pet_surface.pet_surface_utils as utils
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        full_pipe = npe.MapNode(niu.Function(input_names=[
            'subject_id', 'session_id', 'caps_dir', 'pvc_psf_tsv', 'pet',
            'orig_nu', 'white_surface_left', 'white_surface_right',
            'working_directory_subjects', 'acq_label', 'csv_segmentation',
            'suvr_reference_region', 'matscript_folder_inverse_deformation',
            'desikan_left', 'desikan_right', 'destrieux_left',
            'destrieux_right', 'spm_standalone_is_available', 'is_longitudinal'
        ],
                                             output_names=[],
                                             function=utils.get_wf),
                                name='full_pipeline_mapnode',
                                iterfield=[
                                    'subject_id', 'session_id', 'pet',
                                    'orig_nu', 'white_surface_left',
                                    'white_surface_right', 'desikan_left',
                                    'desikan_right', 'destrieux_left',
                                    'destrieux_right'
                                ])

        full_pipe.inputs.subject_id = self.subjects
        full_pipe.inputs.session_id = self.sessions
        full_pipe.inputs.caps_dir = self.caps_directory
        full_pipe.inputs.pvc_psf_tsv = self.parameters['pvc_psf_tsv']
        full_pipe.inputs.working_directory_subjects = self.base_dir
        full_pipe.inputs.acq_label = self.parameters['acq_label']
        full_pipe.inputs.suvr_reference_region = self.parameters[
            'suvr_reference_region']
        full_pipe.inputs.csv_segmentation = os.path.abspath(
            os.path.join(os.path.dirname(os.path.realpath(__file__)), '..',
                         '..', 'resources',
                         'label_conversion_gtmsegmentation.csv'))
        full_pipe.inputs.matscript_folder_inverse_deformation = os.path.abspath(
            os.path.dirname(os.path.realpath(__file__)))
        full_pipe.inputs.is_longitudinal = self.parameters['longitudinal']

        # This section of code determines whether to use SPM standalone or not
        if spm_standalone_is_available():
            use_spm_standalone()
            full_pipe.inputs.spm_standalone_is_available = True
        else:
            full_pipe.inputs.spm_standalone_is_available = False

        # Connection
        # ==========
        self.connect([(self.input_node, full_pipe,
                       [('pet', 'pet'),
                        ('white_surface_left', 'white_surface_left'),
                        ('white_surface_right', 'white_surface_right'),
                        ('orig_nu', 'orig_nu'),
                        ('destrieux_left', 'destrieux_left'),
                        ('destrieux_right', 'destrieux_right'),
                        ('desikan_left', 'desikan_left'),
                        ('desikan_right', 'desikan_right')])])
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.filemanip import unzip_nii
        from ..t1_volume_dartel2mni import t1_volume_dartel2mni_utils as dartel2mni_utils
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        if spm_standalone_is_available():
            use_spm_standalone()

        # Unzipping
        # =========
        unzip_tissues_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                        output_names=['out_file'],
                                                        function=unzip_nii),
                                         name='unzip_tissues_node',
                                         iterfield=['in_file'])
        unzip_flowfields_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                           output_names=['out_file'],
                                                           function=unzip_nii),
                                            name='unzip_flowfields_node',
                                            iterfield=['in_file'])
        unzip_template_node = npe.Node(nutil.Function(input_names=['in_file'],
                                                      output_names=['out_file'],
                                                      function=unzip_nii),
                                       name='unzip_template_node')

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(spm.DARTELNorm2MNI(),
                                      name='dartel2MNI',
                                      iterfield=['apply_to_files', 'flowfield_files'])
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = tuple(self.parameters['voxel_size'])
        dartel2mni_node.inputs.modulate = self.parameters['modulate']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['smooth'] is not None and len(self.parameters['smooth']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [('fwhm', [[x, x, x] for x in self.parameters['smooth']]),
                                        ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']])]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(interface=nutil.Function(input_names=['smoothed_normalized_files'],
                                                                        output_names=['smoothed_normalized_files'],
                                                                        function=dartel2mni_utils.join_smoothed_files),
                                               joinsource='smoothing_node',
                                               joinfield='smoothed_normalized_files',
                                               name='join_smoothing_node')
            self.connect([
                (dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]),
                (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]),
                (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])
            ])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_tissues_node, [('native_segmentations', 'in_file')]),
            (self.input_node, unzip_flowfields_node, [('flowfield_files', 'in_file')]),
            (self.input_node, unzip_template_node, [('template_file', 'in_file')]),
            (unzip_tissues_node, dartel2mni_node, [('out_file', 'apply_to_files')]),
            (unzip_flowfields_node, dartel2mni_node, [
                (('out_file', dartel2mni_utils.prepare_flowfields, self.parameters['tissues']), 'flowfield_files')
            ]),
            (unzip_template_node, dartel2mni_node, [('out_file', 'template_file')]),
            (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')])
        ])
Пример #10
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.io as nio
        import nipype.interfaces.spm as spm
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii, zip_nii
        from clinica.utils.nipype import container_from_filename, fix_join
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        from .t1_volume_tissue_segmentation_utils import (
            ApplySegmentationDeformation,
            get_tissue_tuples,
            init_input_node,
            print_end_pipeline,
            zip_list_files,
        )

        if spm_standalone_is_available():
            use_spm_standalone()

        # Get <subject_id> (e.g. sub-CLNC01_ses-M00) from input_node
        # and print begin message
        # =======================
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=self.get_input_fields(),
                output_names=["subject_id"] + self.get_input_fields(),
                function=init_input_node,
            ),
            name="0-InitNode",
        )

        # Unzipping
        # =========
        unzip_node = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="1-UnzipT1w",
        )

        # Unified Segmentation
        # ====================
        new_segment = npe.Node(spm.NewSegment(), name="2-SpmSegmentation")
        new_segment.inputs.write_deformation_fields = [True, True]
        new_segment.inputs.tissues = get_tissue_tuples(
            self.parameters["tissue_probability_maps"],
            self.parameters["tissue_classes"],
            self.parameters["dartel_tissues"],
            self.parameters["save_warped_unmodulated"],
            self.parameters["save_warped_modulated"],
        )

        # Apply segmentation deformation to T1 (into MNI space)
        # =====================================================
        t1_to_mni = npe.Node(ApplySegmentationDeformation(), name="3-T1wToMni")

        # Print end message
        # =================
        print_end_message = npe.Node(
            interface=nutil.Function(
                input_names=["subject_id", "final_file"],
                function=print_end_pipeline,
            ),
            name="WriteEndMessage",
        )

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, init_node, [("t1w", "t1w")]),
            (init_node, unzip_node, [("t1w", "in_file")]),
            (unzip_node, new_segment, [("out_file", "channel_files")]),
            (init_node, print_end_message, [("subject_id", "subject_id")]),
            (unzip_node, t1_to_mni, [("out_file", "in_files")]),
            (new_segment, t1_to_mni, [("forward_deformation_field",
                                       "deformation_field")]),
            (new_segment, self.output_node,
             [("bias_corrected_images", "bias_corrected_images"),
              ("bias_field_images", "bias_field_images"),
              ("dartel_input_images", "dartel_input_images"),
              ("forward_deformation_field", "forward_deformation_field"),
              ("inverse_deformation_field", "inverse_deformation_field"),
              ("modulated_class_images", "modulated_class_images"),
              ("native_class_images", "native_class_images"),
              ("normalized_class_images", "normalized_class_images"),
              ("transformation_mat", "transformation_mat")]),
            (t1_to_mni, self.output_node, [("out_files", "t1_mni")]),
            (self.output_node, print_end_message, [("t1_mni", "final_file")]),
        ])
        # fmt: on

        # Find container path from t1w filename
        # =====================================
        container_path = npe.Node(
            nutil.Function(
                input_names=["bids_or_caps_filename"],
                output_names=["container"],
                function=container_from_filename,
            ),
            name="ContainerPath",
        )

        # Writing CAPS
        # ============
        write_node = npe.Node(name="WriteCAPS", interface=nio.DataSink())
        write_node.inputs.base_directory = self.caps_directory
        write_node.inputs.parameterization = False
        write_node.inputs.regexp_substitutions = [
            (r"(.*)c1(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-graymatter\3"),
            (r"(.*)c2(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-whitematter\3"),
            (r"(.*)c3(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-csf\3"),
            (r"(.*)c4(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-bone\3"),
            (r"(.*)c5(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-softtissue\3"),
            (r"(.*)c6(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-background\3"),
            (r"(.*)(/native_space/sub-.*)(\.nii(\.gz)?)$",
             r"\1\2_probability\3"),
            (
                r"(.*)(/([a-z]+)_deformation_field/)i?y_(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\4_target-Ixi549Space_transformation-\3_deformation\5",
            ),
            (
                r"(.*)(/t1_mni/)w(sub-.*)_T1w(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_T1w\4",
            ),
            (
                r"(.*)(/modulated_normalized/)mw(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_modulated-on_probability\4",
            ),
            (
                r"(.*)(/normalized/)w(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_modulated-off_probability\4",
            ),
            (r"(.*/dartel_input/)r(sub-.*)(\.nii(\.gz)?)$",
             r"\1\2_dartelinput\3"),
            # Will remove trait_added empty folder
            (r"trait_added", r""),
        ]

        # fmt: off
        self.connect([
            (self.input_node, container_path, [("t1w", "bids_or_caps_filename")
                                               ]),
            (container_path, write_node, [(("container", fix_join, "t1", "spm",
                                            "segmentation"), "container")]),
            (self.output_node, write_node,
             [(("native_class_images", zip_list_files, True), "native_space"),
              (("dartel_input_images", zip_list_files, True), "dartel_input")
              ]),
            (self.output_node, write_node, [(("inverse_deformation_field",
                                              zip_nii, True),
                                             "inverse_deformation_field")]),
            (self.output_node, write_node, [(("forward_deformation_field",
                                              zip_nii, True),
                                             "forward_deformation_field")]),
            (self.output_node, write_node, [(("t1_mni", zip_nii, True),
                                             "t1_mni")]),
        ])
        if self.parameters["save_warped_unmodulated"]:
            self.connect([
                (self.output_node, write_node, [(("normalized_class_images",
                                                  zip_list_files, True),
                                                 "normalized")]),
            ])
        if self.parameters["save_warped_modulated"]:
            self.connect([
                (self.output_node, write_node, [(("modulated_class_images",
                                                  zip_list_files, True),
                                                 "modulated_normalized")]),
            ])
Пример #11
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        from ..t1_volume_dartel2mni import (
            t1_volume_dartel2mni_utils as dartel2mni_utils, )

        if spm_standalone_is_available():
            use_spm_standalone()

        # Unzipping
        # =========
        unzip_tissues_node = npe.MapNode(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_tissues_node",
            iterfield=["in_file"],
        )
        unzip_flowfields_node = npe.MapNode(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_flowfields_node",
            iterfield=["in_file"],
        )
        unzip_template_node = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_template_node",
        )

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(
            spm.DARTELNorm2MNI(),
            name="dartel2MNI",
            iterfield=["apply_to_files", "flowfield_files"],
        )
        if self.parameters["voxel_size"] is not None:
            dartel2mni_node.inputs.voxel_size = tuple(
                self.parameters["voxel_size"])
        dartel2mni_node.inputs.modulate = self.parameters["modulate"]
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters["smooth"] is not None and len(
                self.parameters["smooth"]) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name="smoothing_node",
                                         iterfield=["in_files"])

            smoothing_node.iterables = [
                ("fwhm", [[x, x, x] for x in self.parameters["smooth"]]),
                (
                    "out_prefix",
                    [
                        "fwhm-" + str(x) + "mm_"
                        for x in self.parameters["smooth"]
                    ],
                ),
            ]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(
                interface=nutil.Function(
                    input_names=["smoothed_normalized_files"],
                    output_names=["smoothed_normalized_files"],
                    function=dartel2mni_utils.join_smoothed_files,
                ),
                joinsource="smoothing_node",
                joinfield="smoothed_normalized_files",
                name="join_smoothing_node",
            )
            # fmt: off
            self.connect([
                (dartel2mni_node, smoothing_node, [("normalized_files",
                                                    "in_files")]),
                (smoothing_node, join_smoothing_node,
                 [("smoothed_files", "smoothed_normalized_files")]),
                (join_smoothing_node, self.output_node,
                 [("smoothed_normalized_files", "smoothed_normalized_files")]),
            ])
            # fmt: on
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, unzip_tissues_node, [("native_segmentations",
                                                    "in_file")]),
            (self.input_node, unzip_flowfields_node, [("flowfield_files",
                                                       "in_file")]),
            (self.input_node, unzip_template_node, [("template_file",
                                                     "in_file")]),
            (unzip_tissues_node, dartel2mni_node, [("out_file",
                                                    "apply_to_files")]),
            (unzip_flowfields_node, dartel2mni_node,
             [(("out_file", dartel2mni_utils.prepare_flowfields,
                self.parameters["tissues"]), "flowfield_files")]),
            (unzip_template_node, dartel2mni_node, [("out_file",
                                                     "template_file")]),
            (dartel2mni_node, self.output_node, [("normalized_files",
                                                  "normalized_files")]),
        ])
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.io as nio
        import nipype.interfaces.spm as spm
        from ..t1_volume_tissue_segmentation import t1_volume_tissue_segmentation_utils as seg_utils
        from clinica.utils.filemanip import unzip_nii, zip_nii
        from clinica.utils.nipype import fix_join
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        if spm_standalone_is_available():
            use_spm_standalone()

        # Get <subject_id> (e.g. sub-CLNC01_ses-M00) from input_node
        # and print begin message
        # =======================
        init_node = npe.Node(interface=nutil.Function(
            input_names=self.get_input_fields(),
            output_names=['subject_id'] + self.get_input_fields(),
            function=seg_utils.init_input_node),
                             name='0-InitNode')

        # Unzipping
        # =========
        unzip_node = npe.Node(nutil.Function(input_names=['in_file'],
                                             output_names=['out_file'],
                                             function=unzip_nii),
                              name='1-UnzipT1w')

        # Unified Segmentation
        # ====================
        new_segment = npe.Node(spm.NewSegment(), name='2-SpmSegmentation')
        new_segment.inputs.write_deformation_fields = [True, True]
        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(
            self.parameters['tissue_probability_maps'],
            self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # =====================================================
        t1_to_mni = npe.Node(seg_utils.ApplySegmentationDeformation(),
                             name='3-T1wToMni')

        # Print end message
        # =================
        print_end_message = npe.Node(interface=nutil.Function(
            input_names=['subject_id', 'final_file'],
            function=seg_utils.print_end_pipeline),
                                     name='WriteEndMessage')

        # Connection
        # ==========
        self.connect([
            (self.input_node, init_node, [('t1w', 't1w')]),
            (init_node, unzip_node, [('t1w', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (init_node, print_end_message, [('subject_id', 'subject_id')]),
            (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
            (new_segment, t1_to_mni, [('forward_deformation_field',
                                       'deformation_field')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')]),
            (t1_to_mni, self.output_node, [('out_files', 't1_mni')]),
            (self.output_node, print_end_message, [('t1_mni', 'final_file')]),
        ])

        # Find container path from t1w filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['t1w_filename'],
            output_names=['container'],
            function=seg_utils.t1w_container_from_filename),
                                  name='ContainerPath')

        # Writing CAPS
        # ============
        write_node = npe.Node(name='WriteCAPS', interface=nio.DataSink())
        write_node.inputs.base_directory = self.caps_directory
        write_node.inputs.parameterization = False
        write_node.inputs.regexp_substitutions = [
            (r'(.*)c1(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-graymatter\3'),
            (r'(.*)c2(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-whitematter\3'),
            (r'(.*)c3(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-csf\3'),
            (r'(.*)c4(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-bone\3'),
            (r'(.*)c5(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-softtissue\3'),
            (r'(.*)c6(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-background\3'),
            (r'(.*)(/native_space/sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_probability\3'),
            (r'(.*)(/([a-z]+)_deformation_field/)i?y_(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\4_target-Ixi549Space_transformation-\3_deformation\5'
             ),
            (r'(.*)(/t1_mni/)w(sub-.*)_T1w(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_T1w\4'),
            (r'(.*)(/modulated_normalized/)mw(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-on_probability\4'
             ),
            (r'(.*)(/normalized/)w(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-off_probability\4'
             ),
            (r'(.*/dartel_input/)r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_dartelinput\3'),
            # Will remove trait_added empty folder
            (r'trait_added', r'')
        ]

        self.connect([
            (self.input_node, container_path, [('t1w', 't1w_filename')]),
            (container_path, write_node, [(('container', fix_join, ''),
                                           'container')]),
            (self.output_node, write_node,
             [(('native_class_images', seg_utils.zip_list_files, True),
               'native_space'),
              (('dartel_input_images', seg_utils.zip_list_files, True),
               'dartel_input')]),
            (self.output_node, write_node, [(('inverse_deformation_field',
                                              zip_nii, True),
                                             'inverse_deformation_field')]),
            (self.output_node, write_node, [(('forward_deformation_field',
                                              zip_nii, True),
                                             'forward_deformation_field')]),
            (self.output_node, write_node, [(('t1_mni', zip_nii, True),
                                             't1_mni')]),
        ])
        if self.parameters['save_warped_unmodulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('normalized_class_images', seg_utils.zip_list_files, True),
                   'normalized')]),
            ])
        if self.parameters['save_warped_modulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('modulated_class_images', seg_utils.zip_list_files, True),
                   'modulated_normalized')]),
            ])