def read_participant_tsv(tsv_file): """Extract participant IDs and session IDs from TSV file. Raise: ClinicaException if tsv_file is not a file ClinicaException if participant_id or session_id column is missing from TSV file """ import os import pandas as pd from colorama import Fore from clinica.utils.exceptions import ClinicaException if not os.path.isfile(tsv_file): raise ClinicaException( "\n%s[Error] The TSV file you gave is not a file.%s\n" "\n%sError explanations:%s\n" " - Clinica expected the following path to be a file: %s%s%s\n" " - If you gave relative path, did you run Clinica on the good folder?" % (Fore.RED, Fore.RESET, Fore.YELLOW, Fore.RESET, Fore.BLUE, tsv_file, Fore.RESET)) ss_df = pd.io.parsers.read_csv(tsv_file, sep='\t') if 'participant_id' not in list(ss_df.columns.values): raise ClinicaException( "\n%s[Error] The TSV file does not contain participant_id column (path: %s)%s" % (Fore.RED, tsv_file, Fore.RESET)) if 'session_id' not in list(ss_df.columns.values): raise ClinicaException( "\n%s[Error] The TSV file does not contain session_id column (path: %s)%s" % (Fore.RED, tsv_file, Fore.RESET)) participants = list(ss_df.participant_id) sessions = list(ss_df.session_id) # Remove potential whitespace in participant_id or session_id return [sub.strip(' ') for sub in participants], [ses.strip(' ') for ses in sessions]
def read_sessions(caps_dir, participant_id, long_id): """Extract sessions IDs from `caps_dir`/subjects/`participant_id`/`long_id`/`long_id`_sessions.tsv.""" import os import pandas from clinica.utils.exceptions import ClinicaException sessions_file = os.path.join( os.path.expanduser(caps_dir), "subjects", participant_id, long_id, f"{long_id}_sessions.tsv", ) if not os.path.isfile(sessions_file): raise ClinicaException( "The TSV file with sessions associated " f"to {participant_id} for longitudinal ID {long_id} is missing " f"(expected path: {sessions_file})." ) ss_df = pandas.read_csv(sessions_file, sep="\t") if "session_id" not in list(ss_df.columns.values): raise ClinicaException( "The TSV file does not contain session_id column " f"(path: {sessions_file})." ) return list(ss_df.session_id)
def read_participant_tsv(tsv_file): """Extract participant IDs and session IDs from TSV file. Raise: ClinicaException if tsv_file is not a file ClinicaException if participant_id or session_id column is missing from TSV file """ import os import pandas as pd from clinica.utils.exceptions import ClinicaException if not os.path.isfile(tsv_file): raise ClinicaException( "The TSV file you gave is not a file.\n" "Error explanations:\n" f"\t- Clinica expected the following path to be a file: {tsv_file}\n" "\t- If you gave relative path, did you run Clinica on the good folder?" ) ss_df = pd.io.parsers.read_csv(tsv_file, sep="\t") if "participant_id" not in list(ss_df.columns.values): raise ClinicaException( f"The TSV file does not contain participant_id column (path: {tsv_file})" ) if "session_id" not in list(ss_df.columns.values): raise ClinicaException( f"The TSV file does not contain session_id column (path: {tsv_file})" ) participants = list(ss_df.participant_id) sessions = list(ss_df.session_id) # Remove potential whitespace in participant_id or session_id return [sub.strip(" ") for sub in participants], [ses.strip(" ") for ses in sessions]
def check_pipeline_parameters(self): """Check pipeline parameters.""" from .statistics_surface_utils import get_t1_freesurfer_custom_file from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label if 'custom_file' not in self.parameters.keys(): self.parameters['custom_file'] = get_t1_freesurfer_custom_file() if 'feature_label' not in self.parameters.keys(): self.parameters['feature_label'] = 'ct', if 'full_width_at_half_maximum' not in self.parameters.keys(): self.parameters['full_width_at_half_maximum'] = 20 if 'threshold_uncorrected_pvalue' not in self.parameters.keys(): self.parameters['threshold_uncorrected_pvalue'] = 0.001 if 'threshold_corrected_pvalue' not in self.parameters.keys(): self.parameters['threshold_corrected_pvalue'] = 0.05, if 'cluster_threshold' not in self.parameters.keys(): self.parameters['cluster_threshold'] = 0.001, check_group_label(self.parameters['group_label']) if self.parameters['glm_type'] not in [ 'group_comparison', 'correlation' ]: raise ClinicaException( "The glm_type you specified is wrong: it should be group_comparison or " "correlation (given value: %s)." % self.parameters['glm_type']) if self.parameters['full_width_at_half_maximum'] not in [ 0, 5, 10, 15, 20 ]: raise ClinicaException( "FWHM for the surface smoothing you specified is wrong: it should be 0, 5, 10, 15 or 20 " "(given value: %s)." % self.parameters['full_width_at_half_maximum']) if self.parameters[ 'threshold_uncorrected_pvalue'] < 0 or self.parameters[ 'threshold_uncorrected_pvalue'] > 1: raise ClinicaException( "Uncorrected p-value threshold should be a lower than 1 " "(given value: %s)." % self.parameters['threshold_uncorrected_pvalue']) if self.parameters['threshold_corrected_pvalue'] < 0 or self.parameters[ 'threshold_corrected_pvalue'] > 1: raise ClinicaException( "Corrected p-value threshold should be between 0 and 1 " "(given value: %s)." % self.parameters['threshold_corrected_pvalue']) if self.parameters['cluster_threshold'] < 0 or self.parameters[ 'cluster_threshold'] > 1: raise ClinicaException( "Cluster threshold should be between 0 and 1 " "(given value: %s)." % self.parameters['cluster_threshold'])
def run_command(self, args): """Run the pipeline with defined args.""" from colorama import Fore from networkx import Graph from clinica.utils.exceptions import ClinicaException from clinica.utils.ux import print_crash_files_and_exit, print_end_pipeline from .spatial_svm_pipeline import SpatialSVM if args.orig_input_data == "pet-volume": if args.acq_label is None: raise ClinicaException( f"{Fore.RED}You selected pet-volume pipeline without setting --acq_label flag. " f"Clinica will now exit.{Fore.RESET}") if args.suvr_reference_region is None: raise ClinicaException( f"{Fore.RED}You selected pet-volume pipeline without setting --suvr_reference_region flag. " f"Clinica will now exit.{Fore.RESET}") parameters = { # Clinica compulsory arguments "group_label": args.group_label, "orig_input_data": args.orig_input_data, # Optional arguments for inputs from pet-volume pipeline "acq_label": args.acq_label, "use_pvc_data": args.use_pvc_data, "suvr_reference_region": args.suvr_reference_region, # Advanced arguments "fwhm": args.full_width_half_maximum, } pipeline = SpatialSVM( caps_directory=self.absolute_path(args.caps_directory), tsv_file=self.absolute_path(args.subjects_sessions_tsv), base_dir=self.absolute_path(args.working_directory), parameters=parameters, name=self.name, ) if args.n_procs: exec_pipeline = pipeline.run(plugin="MultiProc", plugin_args={"n_procs": args.n_procs}) else: exec_pipeline = pipeline.run() if isinstance(exec_pipeline, Graph): print_end_pipeline(self.name, pipeline.base_dir, pipeline.base_dir_was_specified) else: print_crash_files_and_exit(args.logname, pipeline.base_dir)
def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label from .statistics_surface_utils import get_t1_freesurfer_custom_file # Clinica compulsory parameters self.parameters.setdefault("group_label", None) check_group_label(self.parameters["group_label"]) if "orig_input_data" not in self.parameters.keys(): raise KeyError( "Missing compulsory orig_input_data key in pipeline parameter." ) self.parameters.setdefault("glm_type", None) if self.parameters["glm_type"] not in ["group_comparison", "correlation"]: raise ClinicaException( f"The glm_type you specified is wrong: it should be group_comparison or " f"correlation (given value: {self.parameters['glm_type']})." ) if "contrast" not in self.parameters.keys(): raise KeyError("Missing compulsory contrast key in pipeline parameter.") # Optional parameters self.parameters.setdefault("covariates", None) self.parameters.setdefault("full_width_at_half_maximum", 20) # Optional parameters for inputs from pet-surface pipeline self.parameters.setdefault("acq_label", None) self.parameters.setdefault("suvr_reference_region", None) # Optional parameters for custom pipeline self.parameters.setdefault("custom_file", get_t1_freesurfer_custom_file()) self.parameters.setdefault("measure_label", "ct") # Advanced parameters self.parameters.setdefault("cluster_threshold", 0.001) if ( self.parameters["cluster_threshold"] < 0 or self.parameters["cluster_threshold"] > 1 ): raise ClinicaException( f"Cluster threshold should be between 0 and 1 " f"(given value: {self.parameters['cluster_threshold']})." )
def get_conversion_luts(): import os from clinica.utils.exceptions import ClinicaException try: # For aparc+aseg.mgz file: default = os.path.join( os.environ["MRTRIX_HOME"], "share", "mrtrix3", "labelconvert", "fs_default.txt", ) # For aparc.a2009s+aseg.mgz file: a2009s = os.path.join( os.environ["MRTRIX_HOME"], "share", "mrtrix3", "labelconvert", "fs_a2009s.txt", ) # TODO: Add custom Lausanne2008 conversion LUTs here. except KeyError: raise ClinicaException( "Could not find MRTRIX_HOME environment variable.") return [default, a2009s]
def extract_metadata_from_json(json_file, list_keys): """Extract fields from JSON file.""" import json import datetime from clinica.utils.exceptions import ClinicaException list_values = [] try: with open(json_file, "r") as file: data = json.load(file) for key in list_keys: list_values.append(data[key]) except EnvironmentError: raise EnvironmentError( f"[Error] Clinica could not open the following JSON file: {json_file}" ) except KeyError as e: now = datetime.datetime.now().strftime("%H:%M:%S") error_message = ( f"[{now}] Error: Clinica could not find the e key in the following JSON file: {json_file}" ) raise ClinicaException(error_message) finally: file.close() return list_values
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from colorama import Fore from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException from clinica.utils.input_files import t1_volume_template_tpm_in_mni from clinica.utils.inputs import clinica_file_reader from clinica.utils.stream import cprint from clinica.utils.ux import ( print_groups_in_caps_directory, print_images_to_process, ) # Check that group already exists if not os.path.exists( os.path.join(self.caps_directory, "groups", f"group-{self.parameters['group_label']}")): print_groups_in_caps_directory(self.caps_directory) raise ClinicaException( f"%{Fore.RED}Group {self.parameters['group_label']} does not exist. " f"Did you run t1-volume or t1-volume-create-dartel pipeline?{Fore.RESET}" ) try: gm_mni = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_template_tpm_in_mni(self.parameters["group_label"], 1, True), ) except ClinicaException as e: final_error_str = "Clinica faced error(s) while trying to read files in your CAPS directory.\n" final_error_str += str(e) raise ClinicaCAPSError(final_error_str) read_parameters_node = npe.Node( name="LoadingCLIArguments", interface=nutil.IdentityInterface(fields=self.get_input_fields(), mandatory_inputs=True), ) read_parameters_node.inputs.file_list = gm_mni read_parameters_node.inputs.atlas_list = self.parameters["atlases"] if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint("The pipeline will last a few seconds per image.") self.connect([ (read_parameters_node, self.input_node, [("file_list", "file_list") ]), (read_parameters_node, self.input_node, [("atlas_list", "atlas_list")]), ])
def get_group_1_and_2(tsv, contrast): """ Based on the TSV file given in parameter, compute indexes of each group Args: tsv: (str) path to the tsv file containing information on subjects/sessions with all covariates contrast: (str) name of a column of the tsv Returns: first_group_idx: (list of int) list of indexes of first group second_group_idx: (list of int) list of indexes of second group class_names: (list of str of len 2) list of the class names read in the column contrast of the tsv """ import pandas as pds from clinica.utils.exceptions import ClinicaException # StatisticsVolume pipeline has been instantiated with tsv_file=tsv, # so check of existence and integrity have succeeded # No further check are done when trying to read it tsv = pds.read_csv(tsv, sep="\t") columns = list(tsv.columns) # An error is raised if the contrast column is not found in the tsv if contrast not in columns: raise ClinicaException(contrast + " is not present in " + tsv) # list(set(my_list)) gives unique values of my_list class_names = list(set(tsv[contrast])) # This is a 2-sample t-test: we can only allow 2 classes if len(class_names) != 2: raise ClinicaException( "It must exist only 2 classes in the column " + contrast + " to perform 2-sample t-tests. Here Clinica found: " + str(class_names)) first_group_idx = [ i for i, label in enumerate(list(tsv[contrast])) if label == class_names[0] ] second_group_idx = [ i for i, label in enumerate(list(tsv[contrast])) if label == class_names[1] ] return first_group_idx, second_group_idx, class_names
def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.exceptions import ClinicaException if self.parameters['cluster_threshold'] < 0 or self.parameters[ 'cluster_threshold'] > 1: raise ClinicaException( "Cluster threshold should be between 0 and 1 " "(given value: %s)." % self.parameters['cluster_threshold'])
def check_pipeline_parameters(self): """Check pipeline parameters.""" from .statistics_surface_utils import get_t1_freesurfer_custom_file from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label # Clinica compulsory parameters self.parameters.setdefault('group_label', None) check_group_label(self.parameters['group_label']) if 'orig_input_data' not in self.parameters.keys(): raise KeyError('Missing compulsory orig_input_data key in pipeline parameter.') self.parameters.setdefault('glm_type', None) if self.parameters['glm_type'] not in ['group_comparison', 'correlation']: raise ClinicaException( f"The glm_type you specified is wrong: it should be group_comparison or " f"correlation (given value: {self.parameters['glm_type']})." ) if 'contrast' not in self.parameters.keys(): raise KeyError('Missing compulsory contrast key in pipeline parameter.') # Optional parameters self.parameters.setdefault('covariates', None) self.parameters.setdefault('full_width_at_half_maximum', 20) # Optional parameters for inputs from pet-surface pipeline self.parameters.setdefault('acq_label', None) self.parameters.setdefault('suvr_reference_region', None) # Optional parameters for custom pipeline self.parameters.setdefault('custom_file', get_t1_freesurfer_custom_file()) self.parameters.setdefault('measure_label', 'ct') # Advanced parameters self.parameters.setdefault('cluster_threshold', 0.001) if self.parameters['cluster_threshold'] < 0 or self.parameters['cluster_threshold'] > 1: raise ClinicaException( f"Cluster threshold should be between 0 and 1 " f"(given value: {self.parameters['cluster_threshold']})." )
def read_sessions(caps_dir, participant_id, long_id): """Extract sessions IDs from `caps_dir`/subjects/`participant_id`/`long_id`/`long_id`_sessions.tsv.""" import os import pandas from colorama import Fore from clinica.utils.exceptions import ClinicaException sessions_file = os.path.join(os.path.expanduser(caps_dir), 'subjects', participant_id, long_id, long_id + '_sessions.tsv') if not os.path.isfile(sessions_file): raise ClinicaException( '\n%s[Error] The TSV file with sessions associated to %s for longitudinal ID %s is missing ' '(expected path: %s).%s' % (Fore.RED, participant_id, long_id, sessions_file, Fore.RESET)) ss_df = pandas.read_csv(sessions_file, sep='\t') if 'session_id' not in list(ss_df.columns.values): raise ClinicaException( '\n%s[Error] The TSV file does not contain session_id column (path: %s)%s' % (Fore.RED, sessions_file, Fore.RESET)) return list(ss_df.session_id)
def get_processed_images(caps_directory, subjects, sessions): """Extract processed image IDs in `caps_directory` based on `subjects`_`sessions`. Todo: [Â ] Implement this static method in all pipelines [Â ] Make it abstract to force overload in future pipelines """ from clinica.utils.exceptions import ClinicaException from clinica.utils.stream import cprint cprint(msg="Pipeline finished with errors.", lvl="error") cprint(msg="CAPS outputs were not found for some image(s):", lvl="error") raise ClinicaException( "Implementation on which image(s) failed will appear soon.")
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os from colorama import Fore import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.inputs import clinica_file_reader from clinica.utils.input_files import t1_volume_template_tpm_in_mni from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException from clinica.utils.stream import cprint from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process # Check that group already exists if not os.path.exists( os.path.join(self.caps_directory, 'groups', 'group-' + self.parameters['group_id'])): print_groups_in_caps_directory(self.caps_directory) raise ClinicaException( '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s' % (Fore.RED, self.parameters['group_id'], Fore.RESET)) try: gm_mni = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_template_tpm_in_mni(self.parameters['group_id'], 1, True)) except ClinicaException as e: final_error_str = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' final_error_str += str(e) raise ClinicaCAPSError(final_error_str) read_parameters_node = npe.Node(name="LoadingCLIArguments", interface=nutil.IdentityInterface( fields=self.get_input_fields(), mandatory_inputs=True)) read_parameters_node.inputs.file_list = gm_mni read_parameters_node.inputs.atlas_list = self.parameters['atlases'] if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint('The pipeline will last a few seconds per image.') self.connect([(read_parameters_node, self.input_node, [('file_list', 'file_list')]), (read_parameters_node, self.input_node, [('atlas_list', 'atlas_list')])])
def get_processed_images(caps_directory, subjects, sessions): """Extract processed image IDs in `caps_directory` based on `subjects`_`sessions`. Todo: [Â ] Implement this static method in all pipelines [Â ] Make it abstract to force overload in future pipelines """ from clinica.utils.exceptions import ClinicaException import datetime from colorama import Fore from clinica.utils.stream import cprint now = datetime.datetime.now().strftime('%H:%M:%S') cprint('\n%s[%s] Pipeline finished with errors.%s\n' % (Fore.RED, now, Fore.RESET)) cprint('%sCAPS outputs were not found for some image(s):%s' % (Fore.RED, Fore.RESET)) raise ClinicaException( 'Implementation on which image(s) failed will appear soon.')
def get_luts(): import os from clinica.utils.exceptions import ClinicaException try: # For aparc+aseg.mgz file: default = os.path.join(os.environ["FREESURFER_HOME"], "FreeSurferColorLUT.txt") # For aparc.a2009s+aseg.mgz file: a2009s = os.path.join(os.environ["FREESURFER_HOME"], "FreeSurferColorLUT.txt") # TODO: Add custom Lausanne2008 LUTs here. except KeyError: raise ClinicaException( "Could not find FREESURFER_HOME environment variable.") return [default, a2009s]
def get_processed_images(caps_directory, subjects, sessions): """Extract processed image IDs in `caps_directory` based on `subjects`_`sessions`. Todo: [Â ] Implement this static method in all pipelines [Â ] Make it abstract to force overload in future pipelines """ import datetime from colorama import Fore from clinica.utils.exceptions import ClinicaException from clinica.utils.stream import cprint now = datetime.datetime.now().strftime("%H:%M:%S") cprint(f"\n{Fore.RED}[{now}] Pipeline finished with errors.{Fore.RESET}\n") cprint(f"{Fore.RED}CAPS outputs were not found for some image(s):{Fore.RESET}") raise ClinicaException( "Implementation on which image(s) failed will appear soon." )
def read_part_sess_long_ids_from_tsv(tsv_file): """Extract participant, session and longitudinal from TSV file. TODO: Find a way to merge with utils/filemanip.py::read_participant_tsv into one util """ import os import pandas from colorama import Fore from clinica.utils.exceptions import ClinicaException if not os.path.isfile(tsv_file): raise ClinicaException( f"\n{Fore.RED}[Error] The TSV file you gave is not a file.{Fore.RESET}\n" f"\n{Fore.YELLOW}Error explanations:{Fore.RESET}\n" f" - Clinica expected the following path to be a file: {Fore.BLUE}{tsv_file}{Fore.RESET}\n" f" - If you gave relative path, did you run Clinica on the good folder?" ) df = pandas.read_csv(tsv_file, sep="\t") def check_key_in_data_frame(file, data_frame, key): if key not in list(data_frame.columns.values): raise ClinicaException( f"\n{Fore.RED}[Error] The TSV file does not contain {key} column (path: {file}){Fore.RESET}" ) check_key_in_data_frame(tsv_file, df, "participant_id") check_key_in_data_frame(tsv_file, df, "session_id") check_key_in_data_frame(tsv_file, df, "long_id") participants = list(df.participant_id) sessions = list(df.session_id) longs = list(df.long_id) # Remove potential whitespace in participant, session or longitudinal ID return ( [sub.strip(" ") for sub in participants], [ses.strip(" ") for ses in sessions], [lng.strip(" ") for lng in longs], )
def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label # Clinica compulsory parameters self.parameters.setdefault("group_label", None) check_group_label(self.parameters["group_label"]) if "orig_input_data_volume" not in self.parameters.keys(): raise KeyError( "Missing compulsory orig_input_data_volume key in pipeline parameter." ) if "contrast" not in self.parameters.keys(): raise KeyError( "Missing compulsory contrast key in pipeline parameter.") # Optional parameters self.parameters.setdefault("group_label_dartel", "*") self.parameters.setdefault("full_width_at_half_maximum", 8) # Optional parameters for inputs from pet-volume pipeline self.parameters.setdefault("acq_label", None) self.parameters.setdefault("suvr_reference_region", None) self.parameters.setdefault("use_pvc_data", False) # Optional parameters for custom pipeline self.parameters.setdefault("measure_label", None) self.parameters.setdefault("custom_file", None) # Advanced parameters self.parameters.setdefault("cluster_threshold", 0.001) if (self.parameters["cluster_threshold"] < 0 or self.parameters["cluster_threshold"] > 1): raise ClinicaException( "Cluster threshold should be between 0 and 1 " "(given value: %s)." % self.parameters["cluster_threshold"])
def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label # Clinica compulsory parameters self.parameters.setdefault('group_label', None) check_group_label(self.parameters['group_label']) if 'orig_input_data' not in self.parameters.keys(): raise KeyError( 'Missing compulsory orig_input_data key in pipeline parameter.' ) if 'contrast' not in self.parameters.keys(): raise KeyError( 'Missing compulsory contrast key in pipeline parameter.') # Optional parameters self.parameters.setdefault('group_label_dartel', '*') self.parameters.setdefault('full_width_at_half_maximum', 8) # Optional parameters for inputs from pet-volume pipeline self.parameters.setdefault('acq_label', None) self.parameters.setdefault('suvr_reference_region', None) self.parameters.setdefault('use_pvc_data', False) # Optional parameters for custom pipeline self.parameters.setdefault('measure_label', None) self.parameters.setdefault('custom_file', None) # Advanced parameters self.parameters.setdefault('cluster_threshold', 0.001) if self.parameters['cluster_threshold'] < 0 or self.parameters[ 'cluster_threshold'] > 1: raise ClinicaException( "Cluster threshold should be between 0 and 1 " "(given value: %s)." % self.parameters['cluster_threshold'])
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os import sys from colorama import Fore import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.inputs import clinica_file_reader from clinica.utils.input_files import t1_volume_dartel_input_tissue from clinica.utils.exceptions import ClinicaException from clinica.utils.stream import cprint from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process, print_begin_image representative_output = os.path.join( self.caps_directory, 'groups', 'group-' + self.parameters['group_label'], 't1', 'group-' + self.parameters['group_label'] + '_template.nii.gz') if os.path.exists(representative_output): cprint( "%sDARTEL template for %s already exists. Currently, Clinica does not propose to overwrite outputs " "for this pipeline.%s" % (Fore.YELLOW, self.parameters['group_label'], Fore.RESET)) print_groups_in_caps_directory(self.caps_directory) sys.exit(0) # Check that there is at least 2 subjects if len(self.subjects) <= 1: raise ClinicaException( '%sThis pipeline needs at least 2 images to create DARTEL template but ' 'Clinica only found %s.%s' % (Fore.RED, len(self.subjects), Fore.RESET)) read_parameters_node = npe.Node(name="LoadingCLIArguments", interface=nutil.IdentityInterface( fields=self.get_input_fields(), mandatory_inputs=True)) all_errors = [] d_input = [] for tissue_number in self.parameters['dartel_tissues']: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_dartel_input_tissue(tissue_number)) d_input.append(current_file) except ClinicaException as e: all_errors.append(e) # Raise all errors if some happened if len(all_errors) > 0: error_message = 'Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n' for msg in all_errors: error_message += str(msg) raise RuntimeError(error_message) # d_input is a list of size len(self.parameters['dartel_tissues']) # Each element of this list is a list of size len(self.subjects) read_parameters_node.inputs.dartel_inputs = d_input if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint( 'Computational time for DARTEL creation will depend on the number of images.' ) print_begin_image('group-' + self.parameters['group_label']) self.connect([(read_parameters_node, self.input_node, [('dartel_inputs', 'dartel_input_images')])])
def build_input_node_cross_sectional(self): import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.utils.inputs import clinica_file_reader from clinica.utils.exceptions import ClinicaException from clinica.iotools.utils.data_handling import check_relative_volume_location_in_world_coordinate_system import clinica.utils.input_files as input_files read_parameters_node = npe.Node(name="LoadingCLIArguments", interface=nutil.IdentityInterface( fields=self.get_input_fields(), mandatory_inputs=True), synchronize=True) all_errors = [] try: read_parameters_node.inputs.pet = clinica_file_reader( self.subjects, self.sessions, self.bids_directory, input_files.bids_pet_nii(self.parameters['acq_label'])) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.orig_nu = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_ORIG_NU) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.white_surface_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_WM_SURF_R) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.white_surface_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_WM_SURF_L) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.destrieux_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_DESTRIEUX_PARC_L) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.destrieux_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_DESTRIEUX_PARC_R) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.desikan_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_DESIKAN_PARC_L) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.desikan_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_DESIKAN_PARC_R) except ClinicaException as e: all_errors.append(e) if len(all_errors) > 0: error_message = 'Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n' for msg in all_errors: error_message += str(msg) raise ClinicaException(error_message) check_relative_volume_location_in_world_coordinate_system( 'T1w-MRI (orig_nu.mgz)', read_parameters_node.inputs.orig_nu, self.parameters['acq_label'] + ' PET', read_parameters_node.inputs.pet, self.bids_directory, self.parameters['acq_label']) self.connect([(read_parameters_node, self.input_node, [('pet', 'pet'), ('orig_nu', 'orig_nu'), ('white_surface_left', 'white_surface_left'), ('white_surface_right', 'white_surface_right'), ('destrieux_left', 'destrieux_left'), ('destrieux_right', 'destrieux_right'), ('desikan_left', 'desikan_left'), ('desikan_right', 'desikan_right')])])
def run_command(self, args): from networkx import Graph from colorama import Fore from .statistics_volume_pipeline import StatisticsVolume from clinica.utils.ux import print_end_pipeline, print_crash_files_and_exit from clinica.utils.exceptions import ClinicaException # PET-Volume pipeline if args.orig_input_data == 'pet-volume': if args.acq_label is None: raise ClinicaException( f"{Fore.RED}You selected pet-volume pipeline without setting --acq_label flag. " f"Clinica will now exit.{Fore.RESET}") if args.suvr_reference_region is None: raise ClinicaException( f"{Fore.RED}You selected pet-volume pipeline without setting --suvr_reference_region flag. " f"Clinica will now exit.{Fore.RESET}") # Custom pipeline if args.orig_input_data == 'custom-pipeline': if (args.custom_file is None) or (args.measure_label is None): raise ClinicaException( 'You must set --measure_label and --custom_file flags.') parameters = { # Clinica compulsory arguments 'group_label': args.group_label, 'orig_input_data': args.orig_input_data, 'contrast': args.contrast, # Optional arguments 'group_label_dartel': args.group_label_dartel, 'full_width_at_half_maximum': args.full_width_at_half_maximum, # Optional arguments for inputs from pet-volume pipeline 'acq_label': args.acq_label, 'use_pvc_data': args.use_pvc_data, 'suvr_reference_region': args.suvr_reference_region, # Optional arguments for custom pipeline 'measure_label': args.measure_label, 'custom_file': args.custom_file, # Advanced arguments 'cluster_threshold': args.cluster_threshold, } pipeline = StatisticsVolume( caps_directory=self.absolute_path(args.caps_directory), tsv_file=self.absolute_path( args.subject_visits_with_covariates_tsv), base_dir=self.absolute_path(args.working_directory), parameters=parameters, name=self.name) if args.n_procs: exec_pipeline = pipeline.run(plugin='MultiProc', plugin_args={'n_procs': args.n_procs}) else: exec_pipeline = pipeline.run() if isinstance(exec_pipeline, Graph): print_end_pipeline(self.name, pipeline.base_dir, pipeline.base_dir_was_specified) else: print_crash_files_and_exit(args.logname, pipeline.base_dir)
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os import sys import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.utils.exceptions import ClinicaException from clinica.utils.input_files import t1_volume_dartel_input_tissue from clinica.utils.inputs import clinica_file_reader from clinica.utils.stream import cprint from clinica.utils.ux import ( print_begin_image, print_groups_in_caps_directory, print_images_to_process, ) representative_output = os.path.join( self.caps_directory, "groups", f"group-{self.parameters['group_label']}", "t1", f"group-{self.parameters['group_label']}_template.nii.gz", ) if os.path.exists(representative_output): cprint( msg= (f"DARTEL template for {self.parameters['group_label']} already exists. " "Currently, Clinica does not propose to overwrite outputs for this pipeline." ), lvl="warning", ) print_groups_in_caps_directory(self.caps_directory) sys.exit(0) # Check that there is at least 2 subjects if len(self.subjects) <= 1: raise ClinicaException( "This pipeline needs at least 2 images to create DARTEL " f"template but Clinica only found {len(self.subjects)}.") read_parameters_node = npe.Node( name="LoadingCLIArguments", interface=nutil.IdentityInterface(fields=self.get_input_fields(), mandatory_inputs=True), ) all_errors = [] d_input = [] for tissue_number in self.parameters["dartel_tissues"]: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_dartel_input_tissue(tissue_number), ) d_input.append(current_file) except ClinicaException as e: all_errors.append(e) # Raise all errors if some happened if len(all_errors) > 0: error_message = "Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n" for msg in all_errors: error_message += str(msg) raise RuntimeError(error_message) # d_input is a list of size len(self.parameters['dartel_tissues']) # Each element of this list is a list of size len(self.subjects) read_parameters_node.inputs.dartel_inputs = d_input if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint( "Computational time for DARTEL creation will depend on the number of images." ) print_begin_image(f"group-{self.parameters['group_label']}") # fmt: off self.connect([(read_parameters_node, self.input_node, [("dartel_inputs", "dartel_input_images")])])
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os from os.path import join, exists from colorama import Fore import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.utils.inputs import clinica_file_reader, clinica_group_reader from clinica.utils.input_files import ( t1_volume_final_group_template, t1_volume_native_tpm, t1_volume_native_tpm_in_mni, t1_volume_deformation_to_template, bids_pet_nii, T1W_NII) from clinica.utils.exceptions import ClinicaException from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process from clinica.iotools.utils.data_handling import check_relative_volume_location_in_world_coordinate_system from clinica.utils.filemanip import save_participants_sessions from clinica.utils.pet import read_psf_information, get_suvr_mask from clinica.utils.stream import cprint # Check that group already exists if not exists( join(self.caps_directory, 'groups', 'group-' + self.parameters['group_label'])): print_groups_in_caps_directory(self.caps_directory) raise ClinicaException( '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s' % (Fore.RED, self.parameters['group_label'], Fore.RESET)) # Tissues DataGrabber # ==================== all_errors = [] # Grab reference mask reference_mask_file = get_suvr_mask( self.parameters['suvr_reference_region']) # PET from BIDS directory try: pet_bids = clinica_file_reader( self.subjects, self.sessions, self.bids_directory, bids_pet_nii(self.parameters['acq_label'])) except ClinicaException as e: all_errors.append(e) # Native T1w-MRI try: t1w_bids = clinica_file_reader(self.subjects, self.sessions, self.bids_directory, T1W_NII) except ClinicaException as e: all_errors.append(e) # mask_tissues tissues_input = [] for tissue_number in self.parameters['mask_tissues']: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_native_tpm_in_mni(tissue_number, False)) tissues_input.append(current_file) except ClinicaException as e: all_errors.append(e) # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list) tissues_input_final = [] for subject_tissue_list in zip(*tissues_input): tissues_input_final.append(subject_tissue_list) tissues_input = tissues_input_final # Flowfields try: flowfields_caps = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_deformation_to_template( self.parameters['group_label'])) except ClinicaException as e: all_errors.append(e) # Dartel Template try: final_template = clinica_group_reader( self.caps_directory, t1_volume_final_group_template(self.parameters['group_label'])) except ClinicaException as e: all_errors.append(e) if self.parameters['pvc_psf_tsv'] is not None: iterables_psf = read_psf_information( self.parameters['pvc_psf_tsv'], self.subjects, self.sessions) self.parameters['apply_pvc'] = True else: iterables_psf = [[]] * len(self.subjects) self.parameters['apply_pvc'] = False if self.parameters['apply_pvc']: # pvc tissues input pvc_tissues_input = [] for tissue_number in self.parameters['pvc_mask_tissues']: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_native_tpm(tissue_number)) pvc_tissues_input.append(current_file) except ClinicaException as e: all_errors.append(e) if len(all_errors) == 0: pvc_tissues_input_final = [] for subject_tissue_list in zip(*pvc_tissues_input): pvc_tissues_input_final.append(subject_tissue_list) pvc_tissues_input = pvc_tissues_input_final else: pvc_tissues_input = [] if len(all_errors) > 0: error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n' for msg in all_errors: error_message += str(msg) raise ClinicaException(error_message) check_relative_volume_location_in_world_coordinate_system( 'T1w-MRI', t1w_bids, self.parameters['acq_label'] + ' PET', pet_bids, self.bids_directory, self.parameters['acq_label']) # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv folder_participants_tsv = os.path.join(self.base_dir, self.name) save_participants_sessions(self.subjects, self.sessions, folder_participants_tsv) if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint('List available in %s' % os.path.join(folder_participants_tsv, 'participants.tsv')) cprint( 'The pipeline will last approximately 10 minutes per image.') read_input_node = npe.Node( name="LoadingCLIArguments", interface=nutil.IdentityInterface(fields=self.get_input_fields(), mandatory_inputs=True), iterables=[('pet_image', pet_bids), ('t1_image_native', t1w_bids), ('mask_tissues', tissues_input), ('psf', iterables_psf), ('flow_fields', flowfields_caps), ('pvc_mask_tissues', pvc_tissues_input)], synchronize=True) read_input_node.inputs.reference_mask = reference_mask_file read_input_node.inputs.dartel_template = final_template self.connect([(read_input_node, self.input_node, [('pet_image', 'pet_image'), ('t1_image_native', 't1_image_native'), ('mask_tissues', 'mask_tissues'), ('flow_fields', 'flow_fields'), ('dartel_template', 'dartel_template'), ('reference_mask', 'reference_mask'), ('psf', 'psf'), ('pvc_mask_tissues', 'pvc_mask_tissues')])])
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os from colorama import Fore import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.inputs import clinica_file_reader, clinica_group_reader from clinica.utils.input_files import ( t1_volume_final_group_template, t1_volume_native_tpm, t1_volume_deformation_to_template) from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException from clinica.utils.stream import cprint from clinica.utils.ux import print_groups_in_caps_directory, print_images_to_process # Check that group already exists if not os.path.exists( os.path.join(self.caps_directory, 'groups', 'group-' + self.parameters['group_id'])): print_groups_in_caps_directory(self.caps_directory) raise ClinicaException( '%sGroup %s does not exist. Did you run t1-volume or t1-volume-create-dartel pipeline?%s' % (Fore.RED, self.parameters['group_id'], Fore.RESET)) all_errors = [] read_input_node = npe.Node(name="LoadingCLIArguments", interface=nutil.IdentityInterface( fields=self.get_input_fields(), mandatory_inputs=True)) # Segmented Tissues # ================= tissues_input = [] for tissue_number in self.parameters['tissues']: try: native_space_tpm = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_native_tpm(tissue_number)) tissues_input.append(native_space_tpm) except ClinicaException as e: all_errors.append(e) # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of # len(self.subjects). We want the opposite : a list of size len(self.subjects) whose elements have a size of # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list) tissues_input_rearranged = [] for subject_tissue_list in zip(*tissues_input): tissues_input_rearranged.append(subject_tissue_list) read_input_node.inputs.native_segmentations = tissues_input_rearranged # Flow Fields # =========== try: read_input_node.inputs.flowfield_files = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_deformation_to_template(self.parameters['group_id'])) except ClinicaException as e: all_errors.append(e) # Dartel Template # ================ try: read_input_node.inputs.template_file = clinica_group_reader( self.caps_directory, t1_volume_final_group_template(self.parameters['group_id'])) except ClinicaException as e: all_errors.append(e) if len(all_errors) > 0: error_message = 'Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n' for msg in all_errors: error_message += str(msg) raise ClinicaCAPSError(error_message) if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint('The pipeline will last a few minutes per image.') self.connect([(read_input_node, self.input_node, [('native_segmentations', 'native_segmentations')]), (read_input_node, self.input_node, [('flowfield_files', 'flowfield_files')]), (read_input_node, self.input_node, [('template_file', 'template_file')])])
def build_input_node_longitudinal(self): import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe import clinica.utils.input_files as input_files from clinica.iotools.utils.data_handling import ( check_relative_volume_location_in_world_coordinate_system, ) from clinica.utils.exceptions import ClinicaException from clinica.utils.inputs import clinica_file_reader read_parameters_node = npe.Node( name="LoadingCLIArguments", interface=nutil.IdentityInterface( fields=self.get_input_fields(), mandatory_inputs=True ), synchronize=True, ) all_errors = [] try: read_parameters_node.inputs.pet = clinica_file_reader( self.subjects, self.sessions, self.bids_directory, input_files.bids_pet_nii(self.parameters["acq_label"]), ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.orig_nu = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_ORIG_NU, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.white_surface_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_SURF_R, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.white_surface_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_SURF_L, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.destrieux_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_DESTRIEUX_PARC_L, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.destrieux_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_DESTRIEUX_PARC_R, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.desikan_left = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_DESIKAN_PARC_L, ) except ClinicaException as e: all_errors.append(e) try: read_parameters_node.inputs.desikan_right = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, input_files.T1_FS_LONG_DESIKAN_PARC_R, ) except ClinicaException as e: all_errors.append(e) if len(all_errors) > 0: error_message = "Clinica faced errors while trying to read files in your BIDS or CAPS directories.\n" for msg in all_errors: error_message += str(msg) raise ClinicaException(error_message) check_relative_volume_location_in_world_coordinate_system( "T1w-MRI (orig_nu.mgz)", read_parameters_node.inputs.orig_nu, self.parameters["acq_label"] + " PET", read_parameters_node.inputs.pet, self.bids_directory, self.parameters["acq_label"], ) # fmt: off self.connect( [ (read_parameters_node, self.input_node, [("pet", "pet"), ("orig_nu", "orig_nu"), ("white_surface_left", "white_surface_left"), ("white_surface_right", "white_surface_right"), ("destrieux_left", "destrieux_left"), ("destrieux_right", "destrieux_right"), ("desikan_left", "desikan_left"), ("desikan_right", "desikan_right")]) ] )
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os from os.path import exists, join import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.iotools.utils.data_handling import ( check_relative_volume_location_in_world_coordinate_system, ) from clinica.utils.exceptions import ClinicaException from clinica.utils.filemanip import save_participants_sessions from clinica.utils.input_files import ( T1W_NII, bids_pet_nii, t1_volume_deformation_to_template, t1_volume_final_group_template, t1_volume_native_tpm, t1_volume_native_tpm_in_mni, ) from clinica.utils.inputs import clinica_file_reader, clinica_group_reader from clinica.utils.pet import get_suvr_mask, read_psf_information from clinica.utils.stream import cprint from clinica.utils.ux import ( print_groups_in_caps_directory, print_images_to_process, ) # Check that group already exists if not exists( join(self.caps_directory, "groups", f"group-{self.parameters['group_label']}")): print_groups_in_caps_directory(self.caps_directory) raise ClinicaException( f"Group {self.parameters['group_label']} does not exist. " "Did you run t1-volume or t1-volume-create-dartel pipeline?") # Tissues DataGrabber # ==================== all_errors = [] # Grab reference mask reference_mask_file = get_suvr_mask( self.parameters["suvr_reference_region"]) # PET from BIDS directory try: pet_bids = clinica_file_reader( self.subjects, self.sessions, self.bids_directory, bids_pet_nii(self.parameters["acq_label"]), ) except ClinicaException as e: all_errors.append(e) # Native T1w-MRI try: t1w_bids = clinica_file_reader(self.subjects, self.sessions, self.bids_directory, T1W_NII) except ClinicaException as e: all_errors.append(e) # mask_tissues tissues_input = [] for tissue_number in self.parameters["mask_tissues"]: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_native_tpm_in_mni(tissue_number, False), ) tissues_input.append(current_file) except ClinicaException as e: all_errors.append(e) # Tissues_input has a length of len(self.parameters['mask_tissues']). Each of these elements has a size of # len(self.subjects). We want the opposite: a list of size len(self.subjects) whose elements have a size of # len(self.parameters['mask_tissues']. The trick is to iter on elements with zip(*my_list) tissues_input_final = [] for subject_tissue_list in zip(*tissues_input): tissues_input_final.append(subject_tissue_list) tissues_input = tissues_input_final # Flowfields try: flowfields_caps = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_deformation_to_template( self.parameters["group_label"]), ) except ClinicaException as e: all_errors.append(e) # Dartel Template try: final_template = clinica_group_reader( self.caps_directory, t1_volume_final_group_template(self.parameters["group_label"]), ) except ClinicaException as e: all_errors.append(e) if self.parameters["pvc_psf_tsv"] is not None: iterables_psf = read_psf_information( self.parameters["pvc_psf_tsv"], self.subjects, self.sessions, self.parameters["acq_label"], ) self.parameters["apply_pvc"] = True else: iterables_psf = [[]] * len(self.subjects) self.parameters["apply_pvc"] = False if self.parameters["apply_pvc"]: # pvc tissues input pvc_tissues_input = [] for tissue_number in self.parameters["pvc_mask_tissues"]: try: current_file = clinica_file_reader( self.subjects, self.sessions, self.caps_directory, t1_volume_native_tpm(tissue_number), ) pvc_tissues_input.append(current_file) except ClinicaException as e: all_errors.append(e) if len(all_errors) == 0: pvc_tissues_input_final = [] for subject_tissue_list in zip(*pvc_tissues_input): pvc_tissues_input_final.append(subject_tissue_list) pvc_tissues_input = pvc_tissues_input_final else: pvc_tissues_input = [] if len(all_errors) > 0: error_message = "Clinica faced error(s) while trying to read files in your CAPS/BIDS directories.\n" for msg in all_errors: error_message += str(msg) raise ClinicaException(error_message) check_relative_volume_location_in_world_coordinate_system( "T1w-MRI", t1w_bids, self.parameters["acq_label"] + " PET", pet_bids, self.bids_directory, self.parameters["acq_label"], skip_question=self.parameters["skip_question"], ) # Save subjects to process in <WD>/<Pipeline.name>/participants.tsv folder_participants_tsv = os.path.join(self.base_dir, self.name) save_participants_sessions(self.subjects, self.sessions, folder_participants_tsv) if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint("List available in %s" % os.path.join(folder_participants_tsv, "participants.tsv")) cprint( "The pipeline will last approximately 10 minutes per image.") read_input_node = npe.Node( name="LoadingCLIArguments", interface=nutil.IdentityInterface(fields=self.get_input_fields(), mandatory_inputs=True), iterables=[ ("pet_image", pet_bids), ("t1_image_native", t1w_bids), ("mask_tissues", tissues_input), ("psf", iterables_psf), ("flow_fields", flowfields_caps), ("pvc_mask_tissues", pvc_tissues_input), ], synchronize=True, ) read_input_node.inputs.reference_mask = reference_mask_file read_input_node.inputs.dartel_template = final_template # fmt: off self.connect([(read_input_node, self.input_node, [("pet_image", "pet_image"), ("t1_image_native", "t1_image_native"), ("mask_tissues", "mask_tissues"), ("flow_fields", "flow_fields"), ("dartel_template", "dartel_template"), ("reference_mask", "reference_mask"), ("psf", "psf"), ("pvc_mask_tissues", "pvc_mask_tissues")])])
def build_input_node(self): """Build and connect an input node to the pipeline.""" import os from clinica.utils.exceptions import ClinicaException from clinica.utils.inputs import clinica_file_reader from clinica.utils.stream import cprint # Check if already present in CAPS # ================================ # Check if the group label has been existed, if yes, give an error to the users # Note(AR): if the user wants to compare Cortical Thickness measure with PET measure # using the group_id, Clinica won't allow it. # TODO: Modify this behaviour if os.path.exists( os.path.join( self.caps_directory, "groups", f"group-{self.parameters['group_label']}" ) ): error_message = ( f"Group label {self.parameters['group_label']} already exists, " f"please choose another one or delete the existing folder and " f"also the working directory and rerun the pipeline" ) raise ClinicaException(error_message) # statistics_dir_tsv = os.path.join(input_directory, 'groups', group_id, 'statistics', 'participant.tsv') # # Copy the subjects_visits_tsv to the result folder # # First, check if the subjects_visits_tsv has the same info with the participant.tsv in the folder of statistics. # # If the participant TSV does not exit, copy subjects_visits_tsv in the folder of statistics too, # # if it is here, compare them. # if not os.path.isfile(statistics_dir_tsv): # copy(subjects_visits_tsv, statistics_dir_tsv) # else: # # Compare the two TSV files # if not have_same_subjects(statistics_dir_tsv, subjects_visits_tsv): # raise ValueError("It seems that this round of analysis does not contain the same subjects" # "where you want to put the results, please check it!") # Check input files before calling SurfStat with Matlab # ===================================================== all_errors = [] # clinica_files_reader expects regexp to start at subjects/ so sub-*/ses-*/ is removed here pattern_hemisphere = ( self.parameters["custom_file"] .replace("@subject", "sub-*") .replace("@session", "ses-*") .replace("@fwhm", str(self.parameters["full_width_at_half_maximum"])) .replace("sub-*/ses-*/", "") ) # Files on left hemisphere lh_surface_based_info = { "pattern": pattern_hemisphere.replace("@hemi", "lh"), "description": f"surface-based features on left hemisphere at FWHM = {self.parameters['full_width_at_half_maximum']}", } try: clinica_file_reader( self.subjects, self.sessions, self.caps_directory, lh_surface_based_info ) except ClinicaException as e: all_errors.append(e) rh_surface_based_info = { "pattern": pattern_hemisphere.replace("@hemi", "rh"), "description": f"surface-based features on right hemisphere at FWHM = {self.parameters['full_width_at_half_maximum']}", } try: clinica_file_reader( self.subjects, self.sessions, self.caps_directory, rh_surface_based_info ) except ClinicaException as e: all_errors.append(e) # Raise all errors if something happened if len(all_errors) > 0: error_message = "Clinica faced errors while trying to read files in your CAPS directory.\n" for msg in all_errors: error_message += str(msg) raise RuntimeError(error_message) # Give pipeline info # ================== cprint( "The pipeline will last a few minutes. Images generated by Matlab will popup during the pipeline." )