Exemplo n.º 1
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        if self.parameters["longitudinal"]:
            self.build_input_node_longitudinal()
        else:
            self.build_input_node_cross_sectional()

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(
            self.subjects, self.sessions, folder_participants_tsv
        )

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "List available in %s"
                % os.path.join(folder_participants_tsv, "participants.tsv")
            )
            cprint("The pipeline will last approximately a few hours per image.")
Exemplo n.º 2
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.input_files import (
            DWI_BVAL,
            DWI_BVEC,
            DWI_JSON,
            DWI_NII,
            T1W_NII,
        )
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        list_bids_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.bids_directory,
            [T1W_NII, DWI_JSON, DWI_NII, DWI_BVEC, DWI_BVAL],
            raise_exception=True,
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                f"List available in {os.path.join(folder_participants_tsv, 'participants.tsv')}"
            )
            cprint(
                "Computational time will depend of the number of volumes in your DWI dataset and the use of CUDA."
            )

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", list_bids_files[0]),
                ("dwi_json", list_bids_files[1]),
                ("dwi", list_bids_files[2]),
                ("bvec", list_bids_files[3]),
                ("bval", list_bids_files[4]),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        # fmt: off
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w"), ("dwi", "dwi"),
                                          ("dwi_json", "dwi_json"),
                                          ("bvec", "bvec"), ("bval", "bval")]),
        ])
Exemplo n.º 3
0
    def run_command(self, args):
        """Run the pipeline with defined args."""
        import os
        import datetime
        from colorama import Fore
        from ..t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_cli import T1VolumeTissueSegmentationCLI
        from ..t1_volume_create_dartel.t1_volume_create_dartel_cli import T1VolumeCreateDartelCLI
        from ..t1_volume_dartel2mni.t1_volume_dartel2mni_cli import T1VolumeDartel2MNICLI
        from ..t1_volume_parcellation.t1_volume_parcellation_cli import T1VolumeParcellationCLI
        from clinica.utils.check_dependency import verify_cat12_atlases
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.participant import get_subject_session_list
        from clinica.utils.stream import cprint

        # If the user wants to use any of the atlases of CAT12 and has not installed it, we just remove it from the list
        # of the computed atlases
        args.atlases = verify_cat12_atlases(args.atlases)

        cprint(
            'The t1-volume pipeline is divided into 4 parts:\n'
            '\t%st1-volume-tissue-segmentation pipeline%s: Tissue segmentation, bias correction and spatial normalization to MNI space\n'
            '\t%st1-volume-create-dartel pipeline%s: Inter-subject registration with the creation of a new DARTEL template\n'
            '\t%st1-volume-dartel2mni pipeline%s: DARTEL template to MNI\n'
            '\t%st1-volume-parcellation pipeline%s: Atlas statistics' %
            (Fore.BLUE, Fore.RESET, Fore.BLUE, Fore.RESET, Fore.BLUE,
             Fore.RESET, Fore.BLUE, Fore.RESET))

        if not self.absolute_path(args.subjects_sessions_tsv):
            session_ids, participant_ids = get_subject_session_list(
                self.absolute_path(args.bids_directory), None, True, False)
            now = datetime.datetime.now().strftime('%H%M%S')
            args.subjects_sessions_tsv = now + '_participants.tsv'
            save_participants_sessions(participant_ids, session_ids,
                                       os.getcwd(), args.subjects_sessions_tsv)

        cprint('%s\nPart 1/4: Running t1-volume-segmentation pipeline%s' %
               (Fore.BLUE, Fore.RESET))
        tissue_segmentation_cli = T1VolumeTissueSegmentationCLI()
        tissue_segmentation_cli.run_command(args)

        cprint('%s\nPart 2/4: Running t1-volume-create-dartel pipeline%s' %
               (Fore.BLUE, Fore.RESET))
        create_dartel_cli = T1VolumeCreateDartelCLI()
        create_dartel_cli.run_command(args)

        cprint('%s\nPart 3/4: Running t1-volume-dartel2mni pipeline%s' %
               (Fore.BLUE, Fore.RESET))
        dartel2mni_cli = T1VolumeDartel2MNICLI()
        dartel2mni_cli.run_command(args)

        cprint('%s\nPart 4/4: Running t1-volume-parcellation pipeline%s' %
               (Fore.BLUE, Fore.RESET))
        parcellation_cli = T1VolumeParcellationCLI()
        parcellation_cli.run_command(args)
Exemplo n.º 4
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from os.path import exists, join

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.iotools.utils.data_handling import (
            check_relative_volume_location_in_world_coordinate_system, )
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.input_files import (
            T1W_NII,
            bids_pet_nii,
            t1_volume_deformation_to_template,
            t1_volume_final_group_template,
            t1_volume_native_tpm,
            t1_volume_native_tpm_in_mni,
        )
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.pet import get_suvr_mask, read_psf_information
        from clinica.utils.stream import cprint
        from clinica.utils.ux import (
            print_groups_in_caps_directory,
            print_images_to_process,
        )

        # Check that group already exists
        if not exists(
                join(self.caps_directory, "groups",
                     f"group-{self.parameters['group_label']}")):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                f"Group {self.parameters['group_label']} does not exist. "
                "Did you run t1-volume or t1-volume-create-dartel pipeline?")

        # Tissues DataGrabber
        # ====================
        all_errors = []

        # Grab reference mask
        reference_mask_file = get_suvr_mask(
            self.parameters["suvr_reference_region"])

        # PET from BIDS directory
        try:
            pet_bids = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.bids_directory,
                bids_pet_nii(self.parameters["acq_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        # Native T1w-MRI
        try:
            t1w_bids = clinica_file_reader(self.subjects, self.sessions,
                                           self.bids_directory, T1W_NII)
        except ClinicaException as e:
            all_errors.append(e)

        # mask_tissues
        tissues_input = []
        for tissue_number in self.parameters["mask_tissues"]:
            try:
                current_file = clinica_file_reader(
                    self.subjects,
                    self.sessions,
                    self.caps_directory,
                    t1_volume_native_tpm_in_mni(tissue_number, False),
                )
                tissues_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)
        # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of
        # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of
        # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list)
        tissues_input_final = []
        for subject_tissue_list in zip(*tissues_input):
            tissues_input_final.append(subject_tissue_list)
        tissues_input = tissues_input_final

        # Flowfields
        try:
            flowfields_caps = clinica_file_reader(
                self.subjects,
                self.sessions,
                self.caps_directory,
                t1_volume_deformation_to_template(
                    self.parameters["group_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        # Dartel Template
        try:
            final_template = clinica_group_reader(
                self.caps_directory,
                t1_volume_final_group_template(self.parameters["group_label"]),
            )
        except ClinicaException as e:
            all_errors.append(e)

        if self.parameters["pvc_psf_tsv"] is not None:
            iterables_psf = read_psf_information(
                self.parameters["pvc_psf_tsv"],
                self.subjects,
                self.sessions,
                self.parameters["acq_label"],
            )
            self.parameters["apply_pvc"] = True
        else:
            iterables_psf = [[]] * len(self.subjects)
            self.parameters["apply_pvc"] = False

        if self.parameters["apply_pvc"]:
            # pvc tissues input
            pvc_tissues_input = []
            for tissue_number in self.parameters["pvc_mask_tissues"]:
                try:
                    current_file = clinica_file_reader(
                        self.subjects,
                        self.sessions,
                        self.caps_directory,
                        t1_volume_native_tpm(tissue_number),
                    )
                    pvc_tissues_input.append(current_file)
                except ClinicaException as e:
                    all_errors.append(e)

            if len(all_errors) == 0:
                pvc_tissues_input_final = []
                for subject_tissue_list in zip(*pvc_tissues_input):
                    pvc_tissues_input_final.append(subject_tissue_list)
                pvc_tissues_input = pvc_tissues_input_final
        else:
            pvc_tissues_input = []

        if len(all_errors) > 0:
            error_message = "Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n"
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        check_relative_volume_location_in_world_coordinate_system(
            "T1w-MRI",
            t1w_bids,
            self.parameters["acq_label"] + " PET",
            pet_bids,
            self.bids_directory,
            self.parameters["acq_label"],
            skip_question=self.parameters["skip_question"],
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("List available in %s" %
                   os.path.join(folder_participants_tsv, "participants.tsv"))
            cprint(
                "The pipeline will last approximately 10 minutes per image.")

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[
                ("pet_image", pet_bids),
                ("t1_image_native", t1w_bids),
                ("mask_tissues", tissues_input),
                ("psf", iterables_psf),
                ("flow_fields", flowfields_caps),
                ("pvc_mask_tissues", pvc_tissues_input),
            ],
            synchronize=True,
        )

        read_input_node.inputs.reference_mask = reference_mask_file
        read_input_node.inputs.dartel_template = final_template

        # fmt: off
        self.connect([(read_input_node, self.input_node,
                       [("pet_image", "pet_image"),
                        ("t1_image_native", "t1_image_native"),
                        ("mask_tissues", "mask_tissues"),
                        ("flow_fields", "flow_fields"),
                        ("dartel_template", "dartel_template"),
                        ("reference_mask", "reference_mask"), ("psf", "psf"),
                        ("pvc_mask_tissues", "pvc_mask_tissues")])])
Exemplo n.º 5
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        import clinica.utils.input_files as input_files
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        list_caps_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.caps_directory,
            [
                input_files.DWI_PREPROC_NII,
                input_files.DWI_PREPROC_BVEC,
                input_files.DWI_PREPROC_BVAL,
                input_files.DWI_PREPROC_BRAINMASK,
            ],
            raise_exception=True,
        )

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                f"List available in {os.path.join(folder_participants_tsv, 'participants.tsv')}"
            )
            cprint(
                "The pipeline will last approximately 20 minutes per image.")

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[
                ("preproc_dwi", list_caps_files[0]),
                ("preproc_bvec", list_caps_files[1]),
                ("preproc_bval", list_caps_files[2]),
                ("b0_mask", list_caps_files[3]),
            ],
            synchronize=True,
        )

        self.connect([
            (read_input_node, self.input_node, [("b0_mask", "b0_mask")]),
            (read_input_node, self.input_node, [("preproc_dwi", "preproc_dwi")
                                                ]),
            (read_input_node, self.input_node, [("preproc_bval",
                                                 "preproc_bval")]),
            (read_input_node, self.input_node, [("preproc_bvec",
                                                 "preproc_bvec")]),
        ])
Exemplo n.º 6
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        from os.path import join, exists
        from colorama import Fore
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.inputs import clinica_file_reader, clinica_group_reader
        from clinica.utils.input_files import (
            t1_volume_final_group_template, t1_volume_native_tpm,
            t1_volume_native_tpm_in_mni, t1_volume_deformation_to_template,
            bids_pet_nii, T1W_NII)
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process
        from clinica.iotools.utils.data_handling import check_relative_volume_location_in_world_coordinate_system
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.pet import read_psf_information, get_suvr_mask
        from clinica.utils.stream import cprint

        # Check that group already exists
        if not exists(
                join(self.caps_directory, 'groups',
                     'group-' + self.parameters['group_label'])):
            print_groups_in_caps_directory(self.caps_directory)
            raise ClinicaException(
                '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s'
                % (Fore.RED, self.parameters['group_label'], Fore.RESET))

        # Tissues DataGrabber
        # ====================
        all_errors = []

        # Grab reference mask
        reference_mask_file = get_suvr_mask(
            self.parameters['suvr_reference_region'])

        # PET from BIDS directory
        try:
            pet_bids = clinica_file_reader(
                self.subjects, self.sessions, self.bids_directory,
                bids_pet_nii(self.parameters['acq_label']))
        except ClinicaException as e:
            all_errors.append(e)

        # Native T1w-MRI
        try:
            t1w_bids = clinica_file_reader(self.subjects, self.sessions,
                                           self.bids_directory, T1W_NII)
        except ClinicaException as e:
            all_errors.append(e)

        # mask_tissues
        tissues_input = []
        for tissue_number in self.parameters['mask_tissues']:
            try:
                current_file = clinica_file_reader(
                    self.subjects, self.sessions, self.caps_directory,
                    t1_volume_native_tpm_in_mni(tissue_number, False))
                tissues_input.append(current_file)
            except ClinicaException as e:
                all_errors.append(e)
        # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of
        # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of
        # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list)
        tissues_input_final = []
        for subject_tissue_list in zip(*tissues_input):
            tissues_input_final.append(subject_tissue_list)
        tissues_input = tissues_input_final

        # Flowfields
        try:
            flowfields_caps = clinica_file_reader(
                self.subjects, self.sessions, self.caps_directory,
                t1_volume_deformation_to_template(
                    self.parameters['group_label']))
        except ClinicaException as e:
            all_errors.append(e)

        # Dartel Template
        try:
            final_template = clinica_group_reader(
                self.caps_directory,
                t1_volume_final_group_template(self.parameters['group_label']))
        except ClinicaException as e:
            all_errors.append(e)

        if self.parameters['pvc_psf_tsv'] is not None:
            iterables_psf = read_psf_information(
                self.parameters['pvc_psf_tsv'], self.subjects, self.sessions)
            self.parameters['apply_pvc'] = True
        else:
            iterables_psf = [[]] * len(self.subjects)
            self.parameters['apply_pvc'] = False

        if self.parameters['apply_pvc']:
            # pvc tissues input
            pvc_tissues_input = []
            for tissue_number in self.parameters['pvc_mask_tissues']:
                try:
                    current_file = clinica_file_reader(
                        self.subjects, self.sessions, self.caps_directory,
                        t1_volume_native_tpm(tissue_number))
                    pvc_tissues_input.append(current_file)
                except ClinicaException as e:
                    all_errors.append(e)

            if len(all_errors) == 0:
                pvc_tissues_input_final = []
                for subject_tissue_list in zip(*pvc_tissues_input):
                    pvc_tissues_input_final.append(subject_tissue_list)
                pvc_tissues_input = pvc_tissues_input_final
        else:
            pvc_tissues_input = []

        if len(all_errors) > 0:
            error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n'
            for msg in all_errors:
                error_message += str(msg)
            raise ClinicaException(error_message)

        check_relative_volume_location_in_world_coordinate_system(
            'T1w-MRI', t1w_bids, self.parameters['acq_label'] + ' PET',
            pet_bids, self.bids_directory, self.parameters['acq_label'])

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint('List available in %s' %
                   os.path.join(folder_participants_tsv, 'participants.tsv'))
            cprint(
                'The pipeline will last approximately 10 minutes per image.')

        read_input_node = npe.Node(
            name="LoadingCLIArguments",
            interface=nutil.IdentityInterface(fields=self.get_input_fields(),
                                              mandatory_inputs=True),
            iterables=[('pet_image', pet_bids), ('t1_image_native', t1w_bids),
                       ('mask_tissues', tissues_input), ('psf', iterables_psf),
                       ('flow_fields', flowfields_caps),
                       ('pvc_mask_tissues', pvc_tissues_input)],
            synchronize=True)

        read_input_node.inputs.reference_mask = reference_mask_file
        read_input_node.inputs.dartel_template = final_template

        self.connect([(read_input_node, self.input_node,
                       [('pet_image', 'pet_image'),
                        ('t1_image_native', 't1_image_native'),
                        ('mask_tissues', 'mask_tissues'),
                        ('flow_fields', 'flow_fields'),
                        ('dartel_template', 'dartel_template'),
                        ('reference_mask', 'reference_mask'), ('psf', 'psf'),
                        ('pvc_mask_tissues', 'pvc_mask_tissues')])])
Exemplo n.º 7
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline."""
        import os
        import re

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        import clinica.utils.input_files as input_files
        from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.inputs import clinica_list_of_files_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # Read CAPS files
        list_caps_files = clinica_list_of_files_reader(
            self.subjects,
            self.sessions,
            self.caps_directory,
            [
                # Inputs from t1-freesurfer pipeline
                input_files.T1_FS_WM,  # list_caps_files[0]
                input_files.T1_FS_DESIKAN,  # list_caps_files[1]
                input_files.T1_FS_DESTRIEUX,  # list_caps_files[2]
                input_files.T1_FS_BRAIN,  # list_caps_files[3]
                # Inputs from dwi-preprocessing pipeline
                input_files.DWI_PREPROC_NII,  # list_caps_files[4]
                input_files.DWI_PREPROC_BRAINMASK,  # list_caps_files[5]
                input_files.DWI_PREPROC_BVEC,  # list_caps_files[6]
                input_files.DWI_PREPROC_BVAL,  # list_caps_files[7]
            ],
            raise_exception=True,
        )

        # Check space of DWI dataset
        dwi_file_spaces = [
            re.search(".*_space-(.*)_preproc.nii.*", file, re.IGNORECASE).group(1)
            for file in list_caps_files[4]
        ]

        # Return an error if all the DWI files are not in the same space
        if any(a != dwi_file_spaces[0] for a in dwi_file_spaces):
            raise ClinicaCAPSError(
                "Preprocessed DWI files are not all in the same space. "
                "Please process them separately using the appropriate subjects/sessions `.tsv` file (-tsv option)."
            )
        list_atlas_files = [
            [aparc_aseg, aparc_aseg_a2009]
            for aparc_aseg, aparc_aseg_a2009 in zip(
                list_caps_files[1], list_caps_files[2]
            )
        ]

        list_grad_fsl = [
            (bvec, bval) for bvec, bval in zip(list_caps_files[6], list_caps_files[7])
        ]

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(
            self.subjects, self.sessions, folder_participants_tsv
        )

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint(
                "Computational time will depend of the number of volumes in your DWI dataset and "
                "the number of streamlines you selected."
            )

        if dwi_file_spaces[0] == "b0":
            self.parameters["dwi_space"] = "b0"
            read_node = npe.Node(
                name="ReadingFiles",
                iterables=[
                    ("wm_mask_file", list_caps_files[0]),
                    ("t1_brain_file", list_caps_files[3]),
                    ("dwi_file", list_caps_files[4]),
                    ("dwi_brainmask_file", list_caps_files[5]),
                    ("grad_fsl", list_grad_fsl),
                    ("atlas_files", list_atlas_files),
                ],
                synchronize=True,
                interface=nutil.IdentityInterface(fields=self.get_input_fields()),
            )
            # fmt: off
            self.connect(
                [
                    (read_node, self.input_node, [("t1_brain_file", "t1_brain_file")]),
                    (read_node, self.input_node, [("wm_mask_file", "wm_mask_file")]),
                    (read_node, self.input_node, [("dwi_file", "dwi_file")]),
                    (read_node, self.input_node, [("dwi_brainmask_file", "dwi_brainmask_file")]),
                    (read_node, self.input_node, [("grad_fsl", "grad_fsl")]),
                    (read_node, self.input_node, [("atlas_files", "atlas_files")]),
                ]
            )
            # fmt: on
        elif dwi_file_spaces[0] == "T1w":
            self.parameters["dwi_space"] = "T1w"
            read_node = npe.Node(
                name="ReadingFiles",
                iterables=[
                    ("wm_mask_file", list_caps_files[0]),
                    ("dwi_file", list_caps_files[4]),
                    ("dwi_brainmask_file", list_caps_files[5]),
                    ("grad_fsl", list_grad_fsl),
                    ("atlas_files", list_atlas_files),
                ],
                synchronize=True,
                interface=nutil.IdentityInterface(fields=self.get_input_fields()),
            )
            # fmt: off
            self.connect(
                [
                    (read_node, self.input_node, [("wm_mask_file", "wm_mask_file")]),
                    (read_node, self.input_node, [("dwi_file", "dwi_file")]),
                    (read_node, self.input_node, [("dwi_brainmask_file", "dwi_brainmask_file")]),
                    (read_node, self.input_node, [("grad_fsl", "grad_fsl")]),
                    (read_node, self.input_node, [("atlas_files", "atlas_files")]),
                ]
            )
            # fmt: on
        else:
            raise ClinicaCAPSError(
                "Bad preprocessed DWI space. Please check your CAPS folder."
            )
Exemplo n.º 8
0
    def build_input_node(self):
        """Build and connect an input node to the pipeline.

        Raise:
            ClinicaBIDSError: If there are duplicated files or missing files for any subject
        """
        import os

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from colorama import Fore

        from clinica.iotools.utils.data_handling import (
            check_volume_location_in_world_coordinate_system, )
        from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
        from clinica.utils.filemanip import (
            extract_subjects_sessions_from_filename,
            save_participants_sessions,
        )
        from clinica.utils.input_files import T1W_NII
        from clinica.utils.inputs import clinica_file_reader
        from clinica.utils.stream import cprint
        from clinica.utils.ux import print_images_to_process

        # Display image(s) already present in CAPS folder
        # ===============================================
        processed_ids = self.get_processed_images(self.caps_directory,
                                                  self.subjects, self.sessions)
        if len(processed_ids) > 0:
            cprint(f"{Fore.YELLOW}Clinica found {len(processed_ids)} image(s) "
                   f"already processed in CAPS directory:{Fore.RESET}")
            for image_id in processed_ids:
                cprint(
                    f"{Fore.YELLOW}\t{image_id.replace('_', ' | ')}{Fore.RESET}"
                )
            if self.overwrite_caps:
                output_folder = "<CAPS>/subjects/<participant_id>/<session_id>/t1/freesurfer_cross_sectional"
                cprint(
                    f"{Fore.YELLOW}\nOutput folders in {output_folder} will be recreated.\n{Fore.RESET}"
                )
            else:
                cprint(
                    f"{Fore.YELLOW}\nImage(s) will be ignored by Clinica.\n{Fore.RESET}"
                )
                input_ids = [
                    p_id + "_" + s_id
                    for p_id, s_id in zip(self.subjects, self.sessions)
                ]
                to_process_ids = list(set(input_ids) - set(processed_ids))
                self.subjects, self.sessions = extract_subjects_sessions_from_filename(
                    to_process_ids)

        # Inputs from anat/ folder
        # ========================
        # T1w file:
        try:
            t1w_files = clinica_file_reader(self.subjects, self.sessions,
                                            self.bids_directory, T1W_NII)
        except ClinicaException as e:
            err_msg = (
                "Clinica faced error(s) while trying to read files in your BIDS directory.\n"
                + str(e))
            raise ClinicaBIDSError(err_msg)

        # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
        folder_participants_tsv = os.path.join(self.base_dir, self.name)
        save_participants_sessions(self.subjects, self.sessions,
                                   folder_participants_tsv)

        if len(self.subjects):
            print_images_to_process(self.subjects, self.sessions)
            cprint("List available in %s" %
                   os.path.join(folder_participants_tsv, "participants.tsv"))
            cprint("The pipeline will last approximately 10 hours per image.")

        read_node = npe.Node(
            name="ReadingFiles",
            iterables=[
                ("t1w", t1w_files),
            ],
            synchronize=True,
            interface=nutil.IdentityInterface(fields=self.get_input_fields()),
        )
        check_volume_location_in_world_coordinate_system(
            t1w_files, self.bids_directory)
        self.connect([
            (read_node, self.input_node, [("t1w", "t1w")]),
        ])
Exemplo n.º 9
0
def cli(
    ctx: click.Context,
    bids_directory: str,
    caps_directory: str,
    group_label: str,
    smooth: List[int] = (8,),
    tissue_classes: List[int] = (1, 2, 3),
    tissue_probability_maps: Optional[str] = None,
    dont_save_warped_unmodulated: bool = False,
    save_warped_modulated: bool = False,
    dartel_tissues: List[int] = (1, 2, 3),
    tissues: List[int] = (1, 2, 3),
    modulate: bool = True,
    voxel_size: Tuple[float, float, float] = (1.5, 1.5, 1.5),
    subjects_sessions_tsv: Optional[str] = None,
    working_directory: Optional[str] = None,
    n_procs: Optional[int] = None,
    yes: bool = False,
) -> None:
    """Volume-based processing of T1-weighted MR images.

       GROUP_LABEL is an user-defined identifier to target a specific group of subjects.

    https://aramislab.paris.inria.fr/clinica/docs/public/latest/Pipelines/T1_Volume/
    """
    import datetime
    import os

    from clinica.utils.filemanip import save_participants_sessions
    from clinica.utils.participant import get_subject_session_list
    from clinica.utils.stream import cprint

    from ..t1_volume_create_dartel import t1_volume_create_dartel_cli
    from ..t1_volume_dartel2mni import t1_volume_dartel2mni_cli
    from ..t1_volume_parcellation import t1_volume_parcellation_cli
    from ..t1_volume_tissue_segmentation import t1_volume_tissue_segmentation_cli

    cprint(
        "The t1-volume pipeline is divided into 4 parts:\n"
        "\tt1-volume-tissue-segmentation pipeline: "
        "Tissue segmentation, bias correction and spatial normalization to MNI space\n"
        "\tt1-volume-create-dartel pipeline: "
        "Inter-subject registration with the creation of a new DARTEL template\n"
        "\tt1-volume-dartel2mni pipeline: "
        "DARTEL template to MNI\n"
        "\tt1-volume-parcellation pipeline: "
        "Atlas statistics"
    )

    if not subjects_sessions_tsv:
        session_ids, participant_ids = get_subject_session_list(
            bids_directory, None, True, False
        )
        now = datetime.datetime.now().strftime("%H%M%S")
        subjects_sessions_tsv = now + "_participants.tsv"
        save_participants_sessions(
            participant_ids, session_ids, os.getcwd(), subjects_sessions_tsv
        )

    cprint("Part 1/4: Running t1-volume-segmentation pipeline.")
    ctx.invoke(
        t1_volume_tissue_segmentation_cli.cli,
        bids_directory=bids_directory,
        caps_directory=caps_directory,
        tissue_classes=tissue_classes,
        dartel_tissues=dartel_tissues,
        tissue_probability_maps=tissue_probability_maps,
        dont_save_warped_unmodulated=dont_save_warped_unmodulated,
        save_warped_modulated=save_warped_modulated,
        subjects_sessions_tsv=subjects_sessions_tsv,
        working_directory=working_directory,
        n_procs=n_procs,
        yes=yes,
    )

    cprint("Part 2/4: Running t1-volume-create-dartel pipeline.")
    ctx.invoke(
        t1_volume_create_dartel_cli.cli,
        bids_directory=bids_directory,
        caps_directory=caps_directory,
        group_label=group_label,
        dartel_tissues=dartel_tissues,
        subjects_sessions_tsv=subjects_sessions_tsv,
        working_directory=working_directory,
        n_procs=n_procs,
    )

    cprint("Part 3/4: Running t1-volume-dartel2mni pipeline.")
    ctx.invoke(
        t1_volume_dartel2mni_cli.cli,
        bids_directory=bids_directory,
        caps_directory=caps_directory,
        group_label=group_label,
        smooth=smooth,
        tissues=tissues,
        modulate=modulate,
        voxel_size=voxel_size,
        subjects_sessions_tsv=subjects_sessions_tsv,
        working_directory=working_directory,
        n_procs=n_procs,
    )

    cprint("Part 4/4: Running t1-volume-parcellation pipeline.")
    ctx.invoke(
        t1_volume_parcellation_cli.cli,
        caps_directory=caps_directory,
        group_label=group_label,
        subjects_sessions_tsv=subjects_sessions_tsv,
        working_directory=working_directory,
        n_procs=n_procs,
    )
Exemplo n.º 10
0
    def run_command(self, args):
        """Run the pipeline with defined args."""
        import datetime
        import os

        from colorama import Fore

        from clinica.utils.filemanip import save_participants_sessions
        from clinica.utils.participant import get_subject_session_list
        from clinica.utils.stream import cprint

        from ..t1_volume_create_dartel.t1_volume_create_dartel_cli import (
            T1VolumeCreateDartelCLI, )
        from ..t1_volume_dartel2mni.t1_volume_dartel2mni_cli import (
            T1VolumeDartel2MNICLI, )
        from ..t1_volume_parcellation.t1_volume_parcellation_cli import (
            T1VolumeParcellationCLI, )
        from ..t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_cli import (
            T1VolumeTissueSegmentationCLI, )

        cprint(
            f"The t1-volume pipeline is divided into 4 parts:\n"
            f"\t{Fore.BLUE}t1-volume-tissue-segmentation pipeline{Fore.RESET}: "
            f"Tissue segmentation, bias correction and spatial normalization to MNI space\n"
            f"\t{Fore.BLUE}t1-volume-create-dartel pipeline{Fore.RESET}: "
            f"Inter-subject registration with the creation of a new DARTEL template\n"
            f"\t{Fore.BLUE}t1-volume-dartel2mni pipeline{Fore.RESET}: "
            f"DARTEL template to MNI\n"
            f"\t{Fore.BLUE}t1-volume-parcellation pipeline{Fore.RESET}: "
            f"Atlas statistics")

        if not self.absolute_path(args.subjects_sessions_tsv):
            session_ids, participant_ids = get_subject_session_list(
                self.absolute_path(args.bids_directory), None, True, False)
            now = datetime.datetime.now().strftime("%H%M%S")
            args.subjects_sessions_tsv = now + "_participants.tsv"
            save_participants_sessions(participant_ids, session_ids,
                                       os.getcwd(), args.subjects_sessions_tsv)

        cprint(
            f"{Fore.BLUE}\nPart 1/4: Running t1-volume-segmentation pipeline{Fore.RESET}"
        )
        tissue_segmentation_cli = T1VolumeTissueSegmentationCLI()
        tissue_segmentation_cli.run_command(args)

        cprint(
            f"{Fore.BLUE}\nPart 2/4: Running t1-volume-create-dartel pipeline{Fore.RESET}"
        )
        create_dartel_cli = T1VolumeCreateDartelCLI()
        create_dartel_cli.run_command(args)

        cprint(
            f"{Fore.BLUE}\nPart 3/4: Running t1-volume-dartel2mni pipeline{Fore.RESET}"
        )
        dartel2mni_cli = T1VolumeDartel2MNICLI()
        dartel2mni_cli.run_command(args)

        cprint(
            f"{Fore.BLUE}\nPart 4/4: Running t1-volume-parcellation pipeline{Fore.RESET}"
        )
        parcellation_cli = T1VolumeParcellationCLI()
        parcellation_cli.run_command(args)