Exemplo n.º 1
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        if self.parameters["longitudinal"]:
            self.build_input_node_longitudinal()
        else:
            self.build_input_node_cross_sectional()

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(
            self.subjects, self.sessions, folder_participants_tsv
        )

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "List available in %s"
                % os.path.join(folder_participants_tsv, "participants.tsv")
            )
            cprint("The pipeline will last approximately a few hours per image.")
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.exceptions import ClinicaException, ClinicaCAPSError
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.input_files import t1_volume_i_th_iteration_group_template, t1_volume_dartel_input_tissue
        from clinica.utils.ux import print_images_to_process

        read_input_node = npe.Node(name="LoadingCLIArguments",
                                   interface=nutil.IdentityInterface(
                                       fields=self.get_input_fields(),
                                       mandatory_inputs=True))

        all_errors = []

        # Dartel Input Tissues
        # ====================
        d_input = []
        for tissue_number in self.parameters['tissues']:
            try:
                current_file = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory,
                    t1_volume_dartel_input_tissue(tissue_number))
                d_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)

        # Dartel Templates
        # ================
        dartel_iter_templates = []
        for i in range(1, 7):
            try:
                current_iter = clinica_group_reader(
                    self.caps_directory,
                    t1_volume_i_th_iteration_group_template(
                        self.parameters['group_id'], i))

                dartel_iter_templates.append(current_iter)
            except ClinicaException as e:
                all_errors.append(e)

        if len(all_errors) > 0:
            error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaCAPSError(error_message)

        read_input_node.inputs.dartel_input_images = d_input
        read_input_node.inputs.dartel_iteration_templates = dartel_iter_templates

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)

        self.connect([(read_input_node, self.input_node,
                       [('dartel_input_images', 'dartel_input_images')]),
                      (read_input_node, self.input_node,
                       [('dartel_iteration_templates',
                         'dartel_iteration_templates')])])
Exemplo n.º 3
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.input_files import (
            DWI_BVAL,
            DWI_BVEC,
            DWI_JSON,
            DWI_NII,
            T1W_NII,
        )
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        list_bids_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.bids_directory,
            [T1W_NII, DWI_JSON, DWI_NII, DWI_BVEC, DWI_BVAL],
            raise_exception=True,
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                f"List available in {os.path.join(folder_participants_tsv, 'participants.tsv')}"
            )
            cprint(
                "Computational time will depend of the number of volumes in your DWI dataset and the use of CUDA."
            )

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", list_bids_files[0]),
                ("dwi_json", list_bids_files[1]),
                ("dwi", list_bids_files[2]),
                ("bvec", list_bids_files[3]),
                ("bval", list_bids_files[4]),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        # fmt: off
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w"), ("dwi", "dwi"),
                                          ("dwi_json", "dwi_json"),
                                          ("bvec", "bvec"), ("bval", "bval")]),
        ])
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from colorama import Fore

        from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
        from clinica.utils.input_files import t1_volume_template_tpm_in_mni
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import (
            print_groups_in_caps_directory,
            print_images_to_process,
        )

        # Check that group already exists
        if not os.path.exists(
                os.path.join(self.caps_directory, "groups",
                             f"group-{self.parameters['group_label']}")):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                f"%{Fore.RED}Group {self.parameters['group_label']} does not exist. "
                f"Did you run t1-volume or t1-volume-create-dartel pipeline?{Fore.RESET}"
            )

        try:
            gm_mni = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.caps_directory,
                t1_volume_template_tpm_in_mni(self.parameters["group_label"],
                                              1, True),
            )
        except ClinicaException as e:
            final_error_str = "Clinica faced error(s) while trying to read files in your CAPS directory.\n"
            final_error_str += str(e)
            raise ClinicaCAPSError(final_error_str)

        read_parameters_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
        )
        read_parameters_node.inputs.file_list = gm_mni
        read_parameters_node.inputs.atlas_list = self.parameters["atlases"]

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("The pipeline will last a few seconds per image.")

        self.connect([
            (read_parameters_node, self.input_node, [("file_list", "file_list")
                                                     ]),
            (read_parameters_node, self.input_node, [("atlas_list",
                                                      "atlas_list")]),
        ])
Exemplo n.º 5
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.stream import cprint
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import T1W_LINEAR
        from clinica.utils.input_files import T1W_LINEAR_CROPPED
        from clinica.utils.ux import print_images_to_process

        if self.parameters.get('use_uncropped_image'):
            FILE_TYPE = T1W_LINEAR
        else:
            FILE_TYPE = T1W_LINEAR_CROPPED

        # T1w_Linear file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.caps_directory, FILE_TYPE)
        except ClinicaException as e:
            err = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' + str(
                e)
            raise ClinicaBIDSError(err)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('The pipeline will last approximately 30 seconds per image.'
                   )  # Replace by adequate computational time.

        if self.parameters.get('extract_method') == 'slice':
            self.slice_direction = self.parameters.get('slice_direction')
            self.slice_mode = self.parameters.get('slice_mode')
        else:
            self.slice_direction = 'axial'
            self.slice_mode = 'rgb'

        if self.parameters.get('extract_method') == 'patch':
            self.patch_size = self.parameters.get('patch_size')
            self.stride_size = self.parameters.get('stride_size')
        else:
            self.patch_size = 50
            self.stride_size = 50

        # The reading node
        # -------------------------
        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ('input_nifti', t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()))

        self.connect([
            (read_node, self.input_node, [('input_nifti', 'input_nifti')]),
        ])
Exemplo n.º 6
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        all_errors = []
        t1w_in_ixi549space = {
            "pattern":
            os.path.join(
                "t1",
                "spm",
                "segmentation",
                "normalized_space",
                "*_*_space-Ixi549Space_T1w.nii*",
            ),
            "description":
            "Tissue probability map in native space",
            "needed_pipeline":
            "t1-volume-tissue-segmentation",
        }
        try:
            t1w_files = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.caps_directory,
                t1w_in_ixi549space,
            )
        except ClinicaException as e:
            all_errors.append(e)

        # Raise all errors if some happened
        if len(all_errors) > 0:
            error_message = "Clinica faced errors while trying to read files in your CAPS directory.\n"
            for msg in all_errors:
                error_message += str(msg)
            raise RuntimeError(error_message)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("The pipeline will last a few seconds per image.")

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("norm_t1w", t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        self.connect([
            (read_node, self.input_node, [("norm_t1w", "norm_t1w")]),
        ])
Exemplo n.º 7
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline.

        Raise:
            ClinicaBIDSError: If there are duplicated files or missing files for any subject
        """
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.iotools.utils.data_handling import (
            check_volume_location_in_world_coordinate_system, )
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err = f"Clinica faced error(s) while trying to read files in your BIDS directory.\n{str(e)}"
            raise ClinicaBIDSError(err)

        check_volume_location_in_world_coordinate_system(
            t1w_files,
            self.bids_directory,
            skip_question=self.parameters["skip_question"],
        )

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "The pipeline will last approximately 10 minutes per image.")

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w")]),
        ])
Exemplo n.º 8
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from colorama import Fore
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import t1_volume_template_tpm_in_mni
        from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process

        # Check that group already exists
        if not os.path.exists(
                os.path.join(self.caps_directory, 'groups',
                             'group-' + self.parameters['group_id'])):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s'
                % (Fore.RED, self.parameters['group_id'], Fore.RESET))

        try:
            gm_mni = clinica_file_reader(
                self.subjects, self.sessions, self.caps_directory,
                t1_volume_template_tpm_in_mni(self.parameters['group_id'], 1,
                                              True))
        except ClinicaException as e:
            final_error_str = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n'
            final_error_str += str(e)
            raise ClinicaCAPSError(final_error_str)

        read_parameters_node = npe.Node(name="LoadingCLIArguments",
                                        interface=nutil.IdentityInterface(
                                            fields=self.get_input_fields(),
                                            mandatory_inputs=True))
        read_parameters_node.inputs.file_list = gm_mni
        read_parameters_node.inputs.atlas_list = self.parameters['atlases']

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('The pipeline will last a few seconds per image.')

        self.connect([(read_parameters_node, self.input_node, [('file_list',
                                                                'file_list')]),
                      (read_parameters_node, self.input_node,
                       [('atlas_list', 'atlas_list')])])
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from colorama import Fore
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.input_files import (
            t1_volume_final_group_template, t1_volume_native_tpm,
            t1_volume_deformation_to_template)
        from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process

        # Check that group already exists
        if not os.path.exists(
                os.path.join(self.caps_directory, 'groups',
                             'group-' + self.parameters['group_id'])):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s'
                % (Fore.RED, self.parameters['group_id'], Fore.RESET))

        all_errors = []
        read_input_node = npe.Node(name="LoadingCLIArguments",
                                   interface=nutil.IdentityInterface(
                                       fields=self.get_input_fields(),
                                       mandatory_inputs=True))

        # Segmented Tissues
        # =================
        tissues_input = []
        for tissue_number in self.parameters['tissues']:
            try:
                native_space_tpm = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory,
                    t1_volume_native_tpm(tissue_number))
                tissues_input.append(native_space_tpm)
            except ClinicaException as e:
                all_errors.append(e)
        # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of
        # len(self.subjects). We want the opposite : a list of size len(self.subjects) whose elements have a size of
        # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list)
        tissues_input_rearranged = []
        for subject_tissue_list in zip(*tissues_input):
            tissues_input_rearranged.append(subject_tissue_list)

        read_input_node.inputs.native_segmentations = tissues_input_rearranged

        # Flow Fields
        # ===========
        try:
            read_input_node.inputs.flowfield_files = clinica_file_reader(
                self.subjects, self.sessions, self.caps_directory,
                t1_volume_deformation_to_template(self.parameters['group_id']))
        except ClinicaException as e:
            all_errors.append(e)

        # Dartel Template
        # ================
        try:
            read_input_node.inputs.template_file = clinica_group_reader(
                self.caps_directory,
                t1_volume_final_group_template(self.parameters['group_id']))
        except ClinicaException as e:
            all_errors.append(e)

        if len(all_errors) > 0:
            error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaCAPSError(error_message)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('The pipeline will last a few minutes per image.')

        self.connect([(read_input_node, self.input_node,
                       [('native_segmentations', 'native_segmentations')]),
                      (read_input_node, self.input_node,
                       [('flowfield_files', 'flowfield_files')]),
                      (read_input_node, self.input_node, [('template_file',
                                                           'template_file')])])
Exemplo n.º 10
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        from os import pardir
        from os.path import abspath, dirname, exists, join

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.exceptions import (
            ClinicaBIDSError,
            ClinicaCAPSError,
            ClinicaException,
        )
        from clinica.utils.filemanip import extract_subjects_sessions_from_filename
        from clinica.utils.input_files import (
            T1W_NII,
            T1W_TO_MNI_TRANSFROM,
            bids_pet_nii,
        )
        from clinica.utils.inputs import (
            RemoteFileStructure,
            clinica_file_reader,
            fetch_file,
        )
        from clinica.utils.pet import get_suvr_mask
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # from clinica.iotools.utils.data_handling import check_volume_location_in_world_coordinate_system
        # Import references files
        root = dirname(abspath(join(abspath(__file__), pardir, pardir)))
        path_to_mask = join(root, "resources", "masks")
        url_aramis = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/"
        FILE1 = RemoteFileStructure(
            filename="mni_icbm152_t1_tal_nlin_sym_09c.nii",
            url=url_aramis,
            checksum=
            "93359ab97c1c027376397612a9b6c30e95406c15bf8695bd4a8efcb2064eaa34",
        )
        FILE2 = RemoteFileStructure(
            filename="ref_cropped_template.nii.gz",
            url=url_aramis,
            checksum=
            "67e1e7861805a8fd35f7fcf2bdf9d2a39d7bcb2fd5a201016c4d2acdd715f5b3",
        )

        self.ref_template = join(path_to_mask, FILE1.filename)
        self.ref_crop = join(path_to_mask, FILE2.filename)
        self.ref_mask = get_suvr_mask(self.parameters["suvr_reference_region"])

        if not (exists(self.ref_template)):
            try:
                fetch_file(FILE1, path_to_mask)
            except IOError as err:
                cprint(
                    msg=
                    f"Unable to download required template (mni_icbm152) for processing: {err}",
                    lvl="error",
                )
        if not (exists(self.ref_crop)):
            try:
                fetch_file(FILE2, path_to_mask)
            except IOError as err:
                cprint(
                    msg=
                    f"Unable to download required template (ref_crop) for processing: {err}",
                    lvl="error",
                )

        # Inputs from BIDS directory
        # pet file:
        PET_NII = bids_pet_nii(self.parameters["acq_label"])
        try:
            pet_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, PET_NII)
        except ClinicaException as e:
            err = (
                "Clinica faced error(s) while trying to read pet files in your BIDS directory.\n"
                + str(e))
            raise ClinicaBIDSError(err)

        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err = (
                "Clinica faced error(s) while trying to read t1w files in your BIDS directory.\n"
                + str(e))
            raise ClinicaBIDSError(err)

        # Inputs from t1-linear pipeline
        # Transformation files from T1w files to MNI:
        try:
            t1w_to_mni_transformation_files = clinica_file_reader(
                self.subjects, self.sessions, self.caps_directory,
                T1W_TO_MNI_TRANSFROM)
        except ClinicaException as e:
            err = (
                "Clinica faced error(s) while trying to read transformation files in your CAPS directory.\n"
                + str(e))
            raise ClinicaCAPSError(err)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("The pipeline will last approximately 3 minutes per image.")

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            iterables=[
                ("t1w", t1w_files),
                ("pet", pet_files),
                ("t1w_to_mni", t1w_to_mni_transformation_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        # fmt: off
        self.connect([
            (read_input_node, self.input_node, [("t1w", "t1w")]),
            (read_input_node, self.input_node, [("pet", "pet")]),
            (read_input_node, self.input_node, [("t1w_to_mni", "t1w_to_mni")]),
        ])
Exemplo n.º 11
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        import sys
        from colorama import Fore
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import t1_volume_dartel_input_tissue
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process, print_begin_image

        representative_output = os.path.join(
            self.caps_directory, 'groups',
            'group-' + self.parameters['group_label'], 't1',
            'group-' + self.parameters['group_label'] + '_template.nii.gz')
        if os.path.exists(representative_output):
            cprint(
                "%sDARTEL template for %s already exists. Currently, Clinica does not propose to overwrite outputs "
                "for this pipeline.%s" %
                (Fore.YELLOW, self.parameters['group_label'], Fore.RESET))
            print_groups_in_caps_directory(self.caps_directory)
            sys.exit(0)

        # Check that there is at least 2 subjects
        if len(self.subjects) <= 1:
            raise ClinicaException(
                '%sThis pipeline needs at least 2 images to create DARTEL template but '
                'Clinica only found %s.%s' %
                (Fore.RED, len(self.subjects), Fore.RESET))

        read_parameters_node = npe.Node(name="LoadingCLIArguments",
                                        interface=nutil.IdentityInterface(
                                            fields=self.get_input_fields(),
                                            mandatory_inputs=True))
        all_errors = []
        d_input = []
        for tissue_number in self.parameters['dartel_tissues']:
            try:
                current_file = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory,
                    t1_volume_dartel_input_tissue(tissue_number))
                d_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)

        # Raise all errors if some happened
        if len(all_errors) > 0:
            error_message = 'Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise RuntimeError(error_message)

        # d_input is a list of size len(self.parameters['dartel_tissues'])
        #     Each element of this list is a list of size len(self.subjects)
        read_parameters_node.inputs.dartel_inputs = d_input

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                'Computational time for DARTEL creation will depend on the number of images.'
            )
            print_begin_image('group-' + self.parameters['group_label'])

        self.connect([(read_parameters_node, self.input_node,
                       [('dartel_inputs', 'dartel_input_images')])])
Exemplo n.º 12
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline.
        """
        from os import pardir
        from os.path import dirname, join, abspath, exists
        from colorama import Fore
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.filemanip import extract_subjects_sessions_from_filename
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import fetch_file, RemoteFileStructure
        from clinica.utils.ux import print_images_to_process
        from clinica.utils.stream import cprint

        root = dirname(abspath(join(abspath(__file__), pardir, pardir)))
        path_to_mask = join(root, 'resources', 'masks')
        url_aramis = 'https://aramislab.paris.inria.fr/files/data/img_t1_linear/'
        FILE1 = RemoteFileStructure(
            filename='ref_cropped_template.nii.gz',
            url=url_aramis,
            checksum=
            '67e1e7861805a8fd35f7fcf2bdf9d2a39d7bcb2fd5a201016c4d2acdd715f5b3')
        FILE2 = RemoteFileStructure(
            filename='mni_icbm152_t1_tal_nlin_sym_09c.nii',
            url=url_aramis,
            checksum=
            '93359ab97c1c027376397612a9b6c30e95406c15bf8695bd4a8efcb2064eaa34')

        self.ref_template = join(path_to_mask, FILE2.filename)
        self.ref_crop = join(path_to_mask, FILE1.filename)

        if not (exists(self.ref_template)):
            try:
                fetch_file(FILE2, path_to_mask)
            except IOError as err:
                cprint(
                    'Unable to download required template (mni_icbm152) for processing:',
                    err)

        if not (exists(self.ref_crop)):
            try:
                fetch_file(FILE1, path_to_mask)
            except IOError as err:
                cprint(
                    'Unable to download required template (ref_crop) for processing:',
                    err)

        # Display image(s) already present in CAPS folder
        # ===============================================
        processed_ids = self.get_processed_images(self.caps_directory,
                                                  self.subjects, self.sessions)
        if len(processed_ids) > 0:
            cprint(
                "%sClinica found %s image(s) already processed in CAPS directory:%s"
                % (Fore.YELLOW, len(processed_ids), Fore.RESET))
            for image_id in processed_ids:
                cprint("%s\t%s%s" %
                       (Fore.YELLOW, image_id.replace('_', ' | '), Fore.RESET))
            cprint("%s\nImage(s) will be ignored by Clinica.\n%s" %
                   (Fore.YELLOW, Fore.RESET))
            input_ids = [
                p_id + '_' + s_id
                for p_id, s_id in zip(self.subjects, self.sessions)
            ]
            to_process_ids = list(set(input_ids) - set(processed_ids))
            self.subjects, self.sessions = extract_subjects_sessions_from_filename(
                to_process_ids)

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err = 'Clinica faced error(s) while trying to read files in your BIDS directory.\n' + str(
                e)
            raise ClinicaBIDSError(err)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('The pipeline will last approximately 6 minutes per image.')

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ('t1w', t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()))
        self.connect([
            (read_node, self.input_node, [('t1w', 't1w')]),
        ])
Exemplo n.º 13
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline.

        Raise:
            ClinicaBIDSError: If there are duplicated files or missing files for any subject
        """
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from colorama import Fore

        from clinica.iotools.utils.data_handling import (
            check_volume_location_in_world_coordinate_system, )
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.filemanip import (
            extract_subjects_sessions_from_filename,
            save_participants_sessions,
        )
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # Display image(s) already present in CAPS folder
        # ===============================================
        processed_ids = self.get_processed_images(self.caps_directory,
                                                  self.subjects, self.sessions)
        if len(processed_ids) > 0:
            cprint(f"{Fore.YELLOW}Clinica found {len(processed_ids)} image(s) "
                   f"already processed in CAPS directory:{Fore.RESET}")
            for image_id in processed_ids:
                cprint(
                    f"{Fore.YELLOW}\t{image_id.replace('_', ' | ')}{Fore.RESET}"
                )
            if self.overwrite_caps:
                output_folder = "<CAPS>/subjects/<participant_id>/<session_id>/t1/freesurfer_cross_sectional"
                cprint(
                    f"{Fore.YELLOW}\nOutput folders in {output_folder} will be recreated.\n{Fore.RESET}"
                )
            else:
                cprint(
                    f"{Fore.YELLOW}\nImage(s) will be ignored by Clinica.\n{Fore.RESET}"
                )
                input_ids = [
                    p_id + "_" + s_id
                    for p_id, s_id in zip(self.subjects, self.sessions)
                ]
                to_process_ids = list(set(input_ids) - set(processed_ids))
                self.subjects, self.sessions = extract_subjects_sessions_from_filename(
                    to_process_ids)

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err_msg = (
                "Clinica faced error(s) while trying to read files in your BIDS directory.\n"
                + str(e))
            raise ClinicaBIDSError(err_msg)

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("List available in %s" %
                   os.path.join(folder_participants_tsv, "participants.tsv"))
            cprint("The pipeline will last approximately 10 hours per image.")

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        check_volume_location_in_world_coordinate_system(
            t1w_files, self.bids_directory)
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w")]),
        ])
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        from colorama import Fore
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import (t1_volume_template_tpm_in_mni,
                                               pet_volume_normalized_suvr_pet)
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process, print_begin_image

        all_errors = []
        if self.parameters['orig_input_data'] == 'pet-volume':
            if not (self.parameters["acq_label"]
                    and self.parameters["suvr_reference_region"]):
                raise ValueError(
                    f"Missing value(s) in parameters from pet-volume pipeline. Given values:\n"
                    f"- acq_label: {self.parameters['acq_label']}\n"
                    f"- suvr_reference_region: {self.parameters['suvr_reference_region']}\n"
                    f"- use_pvc_data: {self.parameters['use_pvc_data']}\n")

            self.parameters['measure_label'] = self.parameters['acq_label']
            information_dict = pet_volume_normalized_suvr_pet(
                acq_label=self.parameters["acq_label"],
                group_label=self.parameters["group_label_dartel"],
                suvr_reference_region=self.parameters["suvr_reference_region"],
                use_brainmasked_image=True,
                use_pvc_data=self.parameters["use_pvc_data"],
                fwhm=self.parameters['full_width_at_half_maximum'])
        elif self.parameters['orig_input_data'] == 't1-volume':
            self.parameters['measure_label'] = 'graymatter'
            information_dict = t1_volume_template_tpm_in_mni(
                self.parameters['group_label_dartel'], 0, True)

        elif self.parameters['orig_input_data'] == 'custom-pipeline':
            if self.parameters['custom_file'] is None:
                raise ClinicaException(
                    f"{Fore.RED}Custom pipeline was selected but no 'custom_file' was specified.{Fore.RESET}"
                )
            # If custom file are grabbed, information of fwhm is irrelevant and should not appear on final filenames
            self.parameters['full_width_at_half_maximum'] = None
            information_dict = {
                'pattern': self.parameters['custom_file'],
                'description': 'custom file provided by user'
            }
        else:
            raise ValueError(
                f"Input data {self.parameters['orig_input_data']} unknown.")

        try:
            input_files = clinica_file_reader(self.subjects, self.sessions,
                                              self.caps_directory,
                                              information_dict)
        except ClinicaException as e:
            all_errors.append(e)

        if len(all_errors) > 0:
            error_message = 'Clinica faced errors while trying to read files in your CAPS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        read_parameters_node = npe.Node(name="LoadingCLIArguments",
                                        interface=nutil.IdentityInterface(
                                            fields=self.get_input_fields(),
                                            mandatory_inputs=True),
                                        synchronize=True)
        read_parameters_node.inputs.input_files = input_files

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                'The pipeline will last a few minutes. Images generated by SPM will popup during the pipeline.'
            )
            print_begin_image(f"group-{self.parameters['group_label']}")

        self.connect([(read_parameters_node, self.input_node,
                       [('input_files', 'input_files')])])
Exemplo n.º 15
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        from os import pardir
        from os.path import abspath, dirname, exists, join

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.filemanip import extract_subjects_sessions_from_filename
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import (
            RemoteFileStructure,
            clinica_file_reader,
            fetch_file,
        )
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        root = dirname(abspath(join(abspath(__file__), pardir, pardir)))
        path_to_mask = join(root, "resources", "masks")
        url_aramis = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/"
        FILE1 = RemoteFileStructure(
            filename="ref_cropped_template.nii.gz",
            url=url_aramis,
            checksum=
            "67e1e7861805a8fd35f7fcf2bdf9d2a39d7bcb2fd5a201016c4d2acdd715f5b3",
        )
        FILE2 = RemoteFileStructure(
            filename="mni_icbm152_t1_tal_nlin_sym_09c.nii",
            url=url_aramis,
            checksum=
            "93359ab97c1c027376397612a9b6c30e95406c15bf8695bd4a8efcb2064eaa34",
        )

        self.ref_template = join(path_to_mask, FILE2.filename)
        self.ref_crop = join(path_to_mask, FILE1.filename)

        if not (exists(self.ref_template)):
            try:
                fetch_file(FILE2, path_to_mask)
            except IOError as err:
                cprint(
                    msg=
                    f"Unable to download required template (mni_icbm152) for processing: {err}",
                    lvl="error",
                )

        if not (exists(self.ref_crop)):
            try:
                fetch_file(FILE1, path_to_mask)
            except IOError as err:
                cprint(
                    msg=
                    f"Unable to download required template (ref_crop) for processing: {err}",
                    lvl="error",
                )

        # Display image(s) already present in CAPS folder
        # ===============================================
        processed_ids = self.get_processed_images(self.caps_directory,
                                                  self.subjects, self.sessions)
        if len(processed_ids) > 0:
            cprint(
                msg=
                f"Clinica found {len(processed_ids)} image(s) already processed in CAPS directory:",
                lvl="warning",
            )
            for image_id in processed_ids:
                cprint(msg=f"{image_id.replace('_', ' | ')}", lvl="warning")
            cprint(msg=f"Image(s) will be ignored by Clinica.", lvl="warning")
            input_ids = [
                p_id + "_" + s_id
                for p_id, s_id in zip(self.subjects, self.sessions)
            ]
            to_process_ids = list(set(input_ids) - set(processed_ids))
            self.subjects, self.sessions = extract_subjects_sessions_from_filename(
                to_process_ids)

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err = (
                "Clinica faced error(s) while trying to read files in your BIDS directory.\n"
                + str(e))
            raise ClinicaBIDSError(err)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("The pipeline will last approximately 6 minutes per image.")

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w")]),
        ])
Exemplo n.º 16
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from os.path import join, exists
        from colorama import Fore
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.input_files import (
            t1_volume_final_group_template, t1_volume_native_tpm,
            t1_volume_native_tpm_in_mni, t1_volume_deformation_to_template,
            bids_pet_nii, T1W_NII)
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process
        from clinica.iotools.utils.data_handling import check_relative_volume_location_in_world_coordinate_system
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.pet import read_psf_information, get_suvr_mask
        from clinica.utils.stream import cprint

        # Check that group already exists
        if not exists(
                join(self.caps_directory, 'groups',
                     'group-' + self.parameters['group_label'])):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s'
                % (Fore.RED, self.parameters['group_label'], Fore.RESET))

        # Tissues DataGrabber
        # ====================
        all_errors = []

        # Grab reference mask
        reference_mask_file = get_suvr_mask(
            self.parameters['suvr_reference_region'])

        # PET from BIDS directory
        try:
            pet_bids = clinica_file_reader(
                self.subjects, self.sessions, self.bids_directory,
                bids_pet_nii(self.parameters['acq_label']))
        except ClinicaException as e:
            all_errors.append(e)

        # Native T1w-MRI
        try:
            t1w_bids = clinica_file_reader(self.subjects, self.sessions,
                                           self.bids_directory, T1W_NII)
        except ClinicaException as e:
            all_errors.append(e)

        # mask_tissues
        tissues_input = []
        for tissue_number in self.parameters['mask_tissues']:
            try:
                current_file = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory,
                    t1_volume_native_tpm_in_mni(tissue_number, False))
                tissues_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)
        # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of
        # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of
        # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list)
        tissues_input_final = []
        for subject_tissue_list in zip(*tissues_input):
            tissues_input_final.append(subject_tissue_list)
        tissues_input = tissues_input_final

        # Flowfields
        try:
            flowfields_caps = clinica_file_reader(
                self.subjects, self.sessions, self.caps_directory,
                t1_volume_deformation_to_template(
                    self.parameters['group_label']))
        except ClinicaException as e:
            all_errors.append(e)

        # Dartel Template
        try:
            final_template = clinica_group_reader(
                self.caps_directory,
                t1_volume_final_group_template(self.parameters['group_label']))
        except ClinicaException as e:
            all_errors.append(e)

        if self.parameters['pvc_psf_tsv'] is not None:
            iterables_psf = read_psf_information(
                self.parameters['pvc_psf_tsv'], self.subjects, self.sessions)
            self.parameters['apply_pvc'] = True
        else:
            iterables_psf = [[]] * len(self.subjects)
            self.parameters['apply_pvc'] = False

        if self.parameters['apply_pvc']:
            # pvc tissues input
            pvc_tissues_input = []
            for tissue_number in self.parameters['pvc_mask_tissues']:
                try:
                    current_file = clinica_file_reader(
                        self.subjects, self.sessions, self.caps_directory,
                        t1_volume_native_tpm(tissue_number))
                    pvc_tissues_input.append(current_file)
                except ClinicaException as e:
                    all_errors.append(e)

            if len(all_errors) == 0:
                pvc_tissues_input_final = []
                for subject_tissue_list in zip(*pvc_tissues_input):
                    pvc_tissues_input_final.append(subject_tissue_list)
                pvc_tissues_input = pvc_tissues_input_final
        else:
            pvc_tissues_input = []

        if len(all_errors) > 0:
            error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        check_relative_volume_location_in_world_coordinate_system(
            'T1w-MRI', t1w_bids, self.parameters['acq_label'] + ' PET',
            pet_bids, self.bids_directory, self.parameters['acq_label'])

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('List available in %s' %
                   os.path.join(folder_participants_tsv, 'participants.tsv'))
            cprint(
                'The pipeline will last approximately 10 minutes per image.')

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[('pet_image', pet_bids), ('t1_image_native', t1w_bids),
                       ('mask_tissues', tissues_input), ('psf', iterables_psf),
                       ('flow_fields', flowfields_caps),
                       ('pvc_mask_tissues', pvc_tissues_input)],
            synchronize=True)

        read_input_node.inputs.reference_mask = reference_mask_file
        read_input_node.inputs.dartel_template = final_template

        self.connect([(read_input_node, self.input_node,
                       [('pet_image', 'pet_image'),
                        ('t1_image_native', 't1_image_native'),
                        ('mask_tissues', 'mask_tissues'),
                        ('flow_fields', 'flow_fields'),
                        ('dartel_template', 'dartel_template'),
                        ('reference_mask', 'reference_mask'), ('psf', 'psf'),
                        ('pvc_mask_tissues', 'pvc_mask_tissues')])])
Exemplo n.º 17
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        import sys

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.input_files import t1_volume_dartel_input_tissue
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import (
            print_begin_image,
            print_groups_in_caps_directory,
            print_images_to_process,
        )

        representative_output = os.path.join(
            self.caps_directory,
            "groups",
            f"group-{self.parameters['group_label']}",
            "t1",
            f"group-{self.parameters['group_label']}_template.nii.gz",
        )
        if os.path.exists(representative_output):
            cprint(
                msg=
                (f"DARTEL template for {self.parameters['group_label']} already exists. "
                 "Currently, Clinica does not propose to overwrite outputs for this pipeline."
                 ),
                lvl="warning",
            )
            print_groups_in_caps_directory(self.caps_directory)
            sys.exit(0)

        # Check that there is at least 2 subjects
        if len(self.subjects) <= 1:
            raise ClinicaException(
                "This pipeline needs at least 2 images to create DARTEL "
                f"template but Clinica only found {len(self.subjects)}.")

        read_parameters_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
        )
        all_errors = []
        d_input = []
        for tissue_number in self.parameters["dartel_tissues"]:
            try:
                current_file = clinica_file_reader(
                    self.subjects,
                    self.sessions,
                    self.caps_directory,
                    t1_volume_dartel_input_tissue(tissue_number),
                )
                d_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)

        # Raise all errors if some happened
        if len(all_errors) > 0:
            error_message = "Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n"
            for msg in all_errors:
                error_message += str(msg)
            raise RuntimeError(error_message)

        # d_input is a list of size len(self.parameters['dartel_tissues'])
        #     Each element of this list is a list of size len(self.subjects)
        read_parameters_node.inputs.dartel_inputs = d_input

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "Computational time for DARTEL creation will depend on the number of images."
            )
            print_begin_image(f"group-{self.parameters['group_label']}")

        # fmt: off
        self.connect([(read_parameters_node, self.input_node,
                       [("dartel_inputs", "dartel_input_images")])])
Exemplo n.º 18
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        import re

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        import clinica.utils.input_files as input_files
        from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # Read CAPS files
        list_caps_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.caps_directory,
            [
                # Inputs from t1-freesurfer pipeline
                input_files.T1_FS_WM,  # list_caps_files[0]
                input_files.T1_FS_DESIKAN,  # list_caps_files[1]
                input_files.T1_FS_DESTRIEUX,  # list_caps_files[2]
                input_files.T1_FS_BRAIN,  # list_caps_files[3]
                # Inputs from dwi-preprocessing pipeline
                input_files.DWI_PREPROC_NII,  # list_caps_files[4]
                input_files.DWI_PREPROC_BRAINMASK,  # list_caps_files[5]
                input_files.DWI_PREPROC_BVEC,  # list_caps_files[6]
                input_files.DWI_PREPROC_BVAL,  # list_caps_files[7]
            ],
            raise_exception=True,
        )

        # Check space of DWI dataset
        dwi_file_spaces = [
            re.search(".*_space-(.*)_preproc.nii.*", file, re.IGNORECASE).group(1)
            for file in list_caps_files[4]
        ]

        # Return an error if all the DWI files are not in the same space
        if any(a != dwi_file_spaces[0] for a in dwi_file_spaces):
            raise ClinicaCAPSError(
                "Preprocessed DWI files are not all in the same space. "
                "Please process them separately using the appropriate subjects/sessions `.tsv` file (-tsv option)."
            )
        list_atlas_files = [
            [aparc_aseg, aparc_aseg_a2009]
            for aparc_aseg, aparc_aseg_a2009 in zip(
                list_caps_files[1], list_caps_files[2]
            )
        ]

        list_grad_fsl = [
            (bvec, bval) for bvec, bval in zip(list_caps_files[6], list_caps_files[7])
        ]

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(
            self.subjects, self.sessions, folder_participants_tsv
        )

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "Computational time will depend of the number of volumes in your DWI dataset and "
                "the number of streamlines you selected."
            )

        if dwi_file_spaces[0] == "b0":
            self.parameters["dwi_space"] = "b0"
            read_node = npe.Node(
                name="ReadingFiles",
                iterables=[
                    ("wm_mask_file", list_caps_files[0]),
                    ("t1_brain_file", list_caps_files[3]),
                    ("dwi_file", list_caps_files[4]),
                    ("dwi_brainmask_file", list_caps_files[5]),
                    ("grad_fsl", list_grad_fsl),
                    ("atlas_files", list_atlas_files),
                ],
                synchronize=True,
                interface=nutil.IdentityInterface(fields=self.get_input_fields()),
            )
            # fmt: off
            self.connect(
                [
                    (read_node, self.input_node, [("t1_brain_file", "t1_brain_file")]),
                    (read_node, self.input_node, [("wm_mask_file", "wm_mask_file")]),
                    (read_node, self.input_node, [("dwi_file", "dwi_file")]),
                    (read_node, self.input_node, [("dwi_brainmask_file", "dwi_brainmask_file")]),
                    (read_node, self.input_node, [("grad_fsl", "grad_fsl")]),
                    (read_node, self.input_node, [("atlas_files", "atlas_files")]),
                ]
            )
            # fmt: on
        elif dwi_file_spaces[0] == "T1w":
            self.parameters["dwi_space"] = "T1w"
            read_node = npe.Node(
                name="ReadingFiles",
                iterables=[
                    ("wm_mask_file", list_caps_files[0]),
                    ("dwi_file", list_caps_files[4]),
                    ("dwi_brainmask_file", list_caps_files[5]),
                    ("grad_fsl", list_grad_fsl),
                    ("atlas_files", list_atlas_files),
                ],
                synchronize=True,
                interface=nutil.IdentityInterface(fields=self.get_input_fields()),
            )
            # fmt: off
            self.connect(
                [
                    (read_node, self.input_node, [("wm_mask_file", "wm_mask_file")]),
                    (read_node, self.input_node, [("dwi_file", "dwi_file")]),
                    (read_node, self.input_node, [("dwi_brainmask_file", "dwi_brainmask_file")]),
                    (read_node, self.input_node, [("grad_fsl", "grad_fsl")]),
                    (read_node, self.input_node, [("atlas_files", "atlas_files")]),
                ]
            )
            # fmt: on
        else:
            raise ClinicaCAPSError(
                "Bad preprocessed DWI space. Please check your CAPS folder."
            )
Exemplo n.º 19
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        import clinica.utils.input_files as input_files
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        list_caps_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.caps_directory,
            [
                input_files.DWI_PREPROC_NII,
                input_files.DWI_PREPROC_BVEC,
                input_files.DWI_PREPROC_BVAL,
                input_files.DWI_PREPROC_BRAINMASK,
            ],
            raise_exception=True,
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                f"List available in {os.path.join(folder_participants_tsv, 'participants.tsv')}"
            )
            cprint(
                "The pipeline will last approximately 20 minutes per image.")

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[
                ("preproc_dwi", list_caps_files[0]),
                ("preproc_bvec", list_caps_files[1]),
                ("preproc_bval", list_caps_files[2]),
                ("b0_mask", list_caps_files[3]),
            ],
            synchronize=True,
        )

        self.connect([
            (read_input_node, self.input_node, [("b0_mask", "b0_mask")]),
            (read_input_node, self.input_node, [("preproc_dwi", "preproc_dwi")
                                                ]),
            (read_input_node, self.input_node, [("preproc_bval",
                                                 "preproc_bval")]),
            (read_input_node, self.input_node, [("preproc_bvec",
                                                 "preproc_bvec")]),
        ])
Exemplo n.º 20
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline.
        """
        from os import pardir
        from os.path import dirname, join, abspath, split, exists
        import sys
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.inputs import check_bids_folder
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import fetch_file
        from clinica.utils.ux import print_images_to_process
        from clinica.utils.stream import cprint

        root = dirname(abspath(join(abspath(__file__), pardir, pardir)))
        path_to_mask = join(root, 'resources', 'masks')
        self.ref_template = join(
                path_to_mask, 'mni_icbm152_t1_tal_nlin_sym_09c.nii')
        self.ref_crop = join(path_to_mask, 'ref_cropped_template.nii.gz')
        url1 = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/ref_cropped_template.nii.gz"
        url2 = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/mni_icbm152_t1_tal_nlin_sym_09c.nii"

        if not(exists(self.ref_template)):
            try:
                fetch_file(url2, self.ref_template)
            except IOError as err:
                cprint('Unable to download required template (mni_icbm152) for processing:', err)

        if not(exists(self.ref_crop)):
            try:
                fetch_file(url1, self.ref_crop)
            except IOError as err:
                cprint('Unable to download required template (ref_crop) for processing:', err)

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(
                    self.subjects,
                    self.sessions,
                    self.bids_directory,
                    T1W_NII)
        except ClinicaException as e:
            err = 'Clinica faced error(s) while trying to read files in your BIDS directory.\n' + str(e)
            raise ClinicaBIDSError(err)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('The pipeline will last approximately 6 minutes per image.')

        read_node = npe.Node(
                name="ReadingFiles",
                iterables=[
                    ('t1w', t1w_files),
                    ],
                synchronize=True,
                interface=nutil.IdentityInterface(
                    fields=self.get_input_fields())
                )
        self.connect([
            (read_node, self.input_node, [('t1w', 't1w')]),
            ])
Exemplo n.º 21
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from os.path import exists, join

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.iotools.utils.data_handling import (
            check_relative_volume_location_in_world_coordinate_system, )
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.input_files import (
            T1W_NII,
            bids_pet_nii,
            t1_volume_deformation_to_template,
            t1_volume_final_group_template,
            t1_volume_native_tpm,
            t1_volume_native_tpm_in_mni,
        )
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.pet import get_suvr_mask, read_psf_information
        from clinica.utils.stream import cprint
        from clinica.utils.ux import (
            print_groups_in_caps_directory,
            print_images_to_process,
        )

        # Check that group already exists
        if not exists(
                join(self.caps_directory, "groups",
                     f"group-{self.parameters['group_label']}")):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                f"Group {self.parameters['group_label']} does not exist. "
                "Did you run t1-volume or t1-volume-create-dartel pipeline?")

        # Tissues DataGrabber
        # ====================
        all_errors = []

        # Grab reference mask
        reference_mask_file = get_suvr_mask(
            self.parameters["suvr_reference_region"])

        # PET from BIDS directory
        try:
            pet_bids = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.bids_directory,
                bids_pet_nii(self.parameters["acq_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        # Native T1w-MRI
        try:
            t1w_bids = clinica_file_reader(self.subjects, self.sessions,
                                           self.bids_directory, T1W_NII)
        except ClinicaException as e:
            all_errors.append(e)

        # mask_tissues
        tissues_input = []
        for tissue_number in self.parameters["mask_tissues"]:
            try:
                current_file = clinica_file_reader(
                    self.subjects,
                    self.sessions,
                    self.caps_directory,
                    t1_volume_native_tpm_in_mni(tissue_number, False),
                )
                tissues_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)
        # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of
        # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of
        # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list)
        tissues_input_final = []
        for subject_tissue_list in zip(*tissues_input):
            tissues_input_final.append(subject_tissue_list)
        tissues_input = tissues_input_final

        # Flowfields
        try:
            flowfields_caps = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.caps_directory,
                t1_volume_deformation_to_template(
                    self.parameters["group_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        # Dartel Template
        try:
            final_template = clinica_group_reader(
                self.caps_directory,
                t1_volume_final_group_template(self.parameters["group_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        if self.parameters["pvc_psf_tsv"] is not None:
            iterables_psf = read_psf_information(
                self.parameters["pvc_psf_tsv"],
                self.subjects,
                self.sessions,
                self.parameters["acq_label"],
            )
            self.parameters["apply_pvc"] = True
        else:
            iterables_psf = [[]] * len(self.subjects)
            self.parameters["apply_pvc"] = False

        if self.parameters["apply_pvc"]:
            # pvc tissues input
            pvc_tissues_input = []
            for tissue_number in self.parameters["pvc_mask_tissues"]:
                try:
                    current_file = clinica_file_reader(
                        self.subjects,
                        self.sessions,
                        self.caps_directory,
                        t1_volume_native_tpm(tissue_number),
                    )
                    pvc_tissues_input.append(current_file)
                except ClinicaException as e:
                    all_errors.append(e)

            if len(all_errors) == 0:
                pvc_tissues_input_final = []
                for subject_tissue_list in zip(*pvc_tissues_input):
                    pvc_tissues_input_final.append(subject_tissue_list)
                pvc_tissues_input = pvc_tissues_input_final
        else:
            pvc_tissues_input = []

        if len(all_errors) > 0:
            error_message = "Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n"
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        check_relative_volume_location_in_world_coordinate_system(
            "T1w-MRI",
            t1w_bids,
            self.parameters["acq_label"] + " PET",
            pet_bids,
            self.bids_directory,
            self.parameters["acq_label"],
            skip_question=self.parameters["skip_question"],
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("List available in %s" %
                   os.path.join(folder_participants_tsv, "participants.tsv"))
            cprint(
                "The pipeline will last approximately 10 minutes per image.")

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[
                ("pet_image", pet_bids),
                ("t1_image_native", t1w_bids),
                ("mask_tissues", tissues_input),
                ("psf", iterables_psf),
                ("flow_fields", flowfields_caps),
                ("pvc_mask_tissues", pvc_tissues_input),
            ],
            synchronize=True,
        )

        read_input_node.inputs.reference_mask = reference_mask_file
        read_input_node.inputs.dartel_template = final_template

        # fmt: off
        self.connect([(read_input_node, self.input_node,
                       [("pet_image", "pet_image"),
                        ("t1_image_native", "t1_image_native"),
                        ("mask_tissues", "mask_tissues"),
                        ("flow_fields", "flow_fields"),
                        ("dartel_template", "dartel_template"),
                        ("reference_mask", "reference_mask"), ("psf", "psf"),
                        ("pvc_mask_tissues", "pvc_mask_tissues")])])
Exemplo n.º 22
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from colorama import Fore
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.iotools.utils.data_handling import check_volume_location_in_world_coordinate_system
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.filemanip import extract_subjects_sessions_from_filename, save_participants_sessions
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process, print_begin_image

        gic = '*'
        if self.parameters['group_id_caps'] is not None:
            gic = self.parameters['group_id_caps']

        all_errors = []
        if self.parameters['feature_type'] == 'fdg':
            try:
                input_files = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory, {
                        'pattern':
                        '*_pet_space-Ixi549Space_suvr-pons_mask-brain_fwhm-' +
                        str(self.parameters['full_width_at_half_maximum']) +
                        'mm_pet.nii*',
                        'description':
                        'pons normalized FDG PET image in MNI space (brain masked)',
                        'needed_pipeline':
                        'pet-volume'
                    })
            except ClinicaException as e:
                all_errors.append(e)
        elif self.parameters['feature_type'] == 'graymatter':
            try:
                input_files = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory, {
                        'pattern':
                        't1/spm/dartel/group-' + gic +
                        '/*_T1w_segm-graymatter_space-Ixi549Space_modulated-on_fwhm-'
                        + str(self.parameters['full_width_at_half_maximum']) +
                        'mm_probability.nii.*',
                        'description':
                        'probability map of gray matter segmentation based on T1w image in MNI space',
                        'needed_pipeline':
                        't1-volume or t1-volume-existing-template'
                    })
            except ClinicaException as e:
                all_errors.append(e)

        else:
            if not self.parameters['custom_files']:
                raise ClinicaException(
                    Fore.RED +
                    '[Error] You did not specify the --custom_files flag in the command line for the feature type '
                    + Fore.Blue + self.parameters['feature_type'] + Fore.RED +
                    '! Clinica can\'t ' +
                    'know what file to use in your analysis ! Type: \n\t' +
                    Fore.BLUE + 'clinica run statistics-volume\n' + Fore.RED +
                    ' to have help on how to use the command line.' +
                    Fore.RESET)
            try:
                # If custom file are grabbed, information of fwhm is irrelevant and should not appear on final filenames
                self.parameters['full_width_at_half_maximum'] = None
                input_files = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory, {
                        'pattern': self.parameters['custom_files'],
                        'description': 'custom file provided by user'
                    })
            except ClinicaException as e:
                all_errors.append(e)

        if len(all_errors) > 0:
            error_message = 'Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        read_parameters_node = npe.Node(name="LoadingCLIArguments",
                                        interface=nutil.IdentityInterface(
                                            fields=self.get_input_fields(),
                                            mandatory_inputs=True),
                                        synchronize=True)
        read_parameters_node.inputs.input_files = input_files

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                'The pipeline will last a few minutes. Images generated by SPM will popup during the pipeline.'
            )
            print_begin_image('group-' + self.parameters['group_id'])

        self.connect([(read_parameters_node, self.input_node,
                       [('input_files', 'input_files')])])