def convert_config_ini_2_json(config_ini_path): """Convert a configuration file in old INI format to new JSON format. Parameters ---------- config_ini_path : string Path to configuration file in old INI format Returns ------- config_json_path : string Path to converted configuration file in new JSON format """ print('>> Load config file : {}'.format(config_ini_path)) config = configparser.ConfigParser() try: config.read(config_ini_path) except configparser.MissingSectionHeaderError: print_error( ' .. ERROR : file is a datalad git annex but it has not been retrieved yet.' + ' Please do datalad get ... and reload the dataset (File > Load BIDS Dataset...)' ) config_json_path = '.'.join([os.path.splitext(config_ini_path)[0], 'json']) save_configparser_as_json(config, config_json_path, ini_mode=True) print( f' .. Config file converted to JSON and saved as {config_json_path}') return config_json_path
def dep_check(): """Check if dependencies are installed. This includes for the moment: * FSL * FreeSurfer """ nul = open(os.devnull, 'w') error = "" # Check for FSL if subprocess.call("fslorient", stdout=nul, stderr=nul, shell=True) != 255: error = """ .. ERROR: FSL not installed or not working correctly. Check that the FSL_DIR variable is exported and the fsl.sh setup script is sourced.""" # Check for Freesurfer if subprocess.call("mri_info", stdout=nul, stderr=nul, shell=True) != 1: error = """ .. ERROR: FREESURFER not installed or not working correctly. Check that the FREESURFER_HOME variable is exported and the SetUpFreeSurfer.sh setup script is sourced.""" # Check for MRtrix # if subprocess.call("mrconvert", stdout=nul, stderr=nul,shell=True) != 255: # error = """MRtrix3 not installed or not working correctly. Check that PATH variable is updated with MRtrix3 binary (bin) directory.""" # Check for DTK # if subprocess.call("dti_recon", stdout=nul, stderr=nul, shell=True) != 0 or "DSI_PATH" not in os.environ: # error = """Diffusion Toolkit not installed or not working correctly. Check that # the DSI_PATH variable is exported and that the dtk binaries (e.g. dti_recon) are in # your path.""" # Check for DTB # if subprocess.call("DTB_dtk2dir", stdout=nul, stderr=nul, shell=True) != 1: # error = """DTB binaries not installed or not working correctly. Check that the # DTB binaries (e.g. DTB_dtk2dir) are in your path and don't give any error.""" if error != "": print_error(error) sys.exit(2)
def init_dmri_project(project_info, bids_layout, is_new_project, gui=True): """Create and initialize a :class:`DiffusionPipelineUI` instance Parameters ---------- project_info : ProjectInfoUI Instance of :class:`ProjectInfoUI` class bids_layout : bids.BIDSLayout PyBIDS BIDS Layout object describing the BIDS dataset is_new_project : bool If True, this is a new project which has been never processed gui : bool If True, display messages in GUI """ dmri_pipeline = diffusion_pipeline.DiffusionPipelineUI(project_info) derivatives_directory = os.path.join(project_info.base_directory, "derivatives") if (project_info.subject_session != "") and (project_info.subject_session is not None): refresh_folder( derivatives_directory, project_info.subject, dmri_pipeline.input_folders, session=project_info.subject_session, ) else: refresh_folder(derivatives_directory, project_info.subject, dmri_pipeline.input_folders) dmri_inputs_checked = dmri_pipeline.check_input(layout=bids_layout, gui=gui) if dmri_inputs_checked: if is_new_project and dmri_pipeline is not None: print("> Initialize dMRI project") if not os.path.exists(derivatives_directory): try: os.makedirs(derivatives_directory) except os.error: print_warning(" .. INFO: %s was already existing" % derivatives_directory) finally: print(" .. INFO: Created directory %s" % derivatives_directory) if (project_info.subject_session != "") and (project_info.subject_session is not None): project_info.dmri_config_file = os.path.join( derivatives_directory, "%s_%s_diffusion_config.json" % (project_info.subject, project_info.subject_session), ) else: project_info.dmri_config_file = os.path.join( derivatives_directory, "%s_diffusion_config.json" % project_info.subject, ) if os.path.exists(project_info.dmri_config_file): warn_res = project_info.configure_traits( view="dmri_warning_view") if warn_res: print(" .. INFO: Read diffusion config file (%s)" % project_info.dmri_config_file) dmri_save_config(dmri_pipeline, project_info.dmri_config_file) else: return None else: print(" .. INFO: Create diffusion config file (%s)" % project_info.dmri_config_file) dmri_save_config(dmri_pipeline, project_info.dmri_config_file) else: print("> Load dMRI project") dmri_conf_loaded = dmri_load_config_json( dmri_pipeline, project_info.dmri_config_file) if not dmri_conf_loaded: return None dmri_pipeline.config_file = project_info.dmri_config_file else: print_error(" .. ERROR: Missing diffusion inputs") return dmri_inputs_checked, dmri_pipeline
def create_cmp_command(project, run_anat, run_dmri, run_fmri, number_of_threads=1): """Create the command to run the `connectomemapper3` python script. Parameters ---------- project : cmp.project.CMP_Project_Info Instance of `cmp.project.CMP_Project_Info` run_anat : bool If True, append the anatomical configuration file to the command run_dmri : bool If True, append the diffusion configuration file to the command run_fmri : bool If True, append the fMRI configuration file to the command number_of_threads : int Number of threads used OpenMP-parallelized tools (Default: 1) Returns ------- Command : string The command to execute the `connectomemapper3` python script """ cmd = [ "connectomemapper3", "--bids_dir", project.base_directory, "--output_dir", project.output_directory, "--participant_label", project.subject ] if project.subject_session != '': cmd.append("--session_label") cmd.append(project.subject_session) # TODO: review how to handle anatomical pipeline processing if run_anat: cmd.append("--anat_pipeline_config") cmd.append(project.anat_config_file) else: print_error(" .. ERROR: anatomical pipeline is mandatory") if run_dmri: cmd.append("--dwi_pipeline_config") cmd.append(project.dmri_config_file) else: print(" .. INFO: diffusion pipeline not performed") if run_fmri: cmd.append("--func_pipeline_config") cmd.append(project.fmri_config_file) else: print(" .. INFO: functional pipeline not performed") cmd.append('--number_of_threads') cmd.append(str(number_of_threads)) return ' '.join(cmd)
for tool in tools: tool_dir = os.path.join(args.output_dir, tool) if not os.path.isdir(tool_dir): os.makedirs(tool_dir) # Make sure freesurfer is happy with the license print( '> Set $FS_LICENSE which points to FreeSurfer license location (BIDS App)') if os.access(os.path.join('/bids_dir', 'code', 'license.txt'), os.F_OK): os.environ['FS_LICENSE'] = os.path.join('/bids_dir', 'code', 'license.txt') elif args.fs_license: os.environ['FS_LICENSE'] = os.path.abspath(args.fs_license) else: print_error( " .. ERROR: Missing license.txt in code/ directory OR unspecified Freesurfer license with the option --fs_license " ) sys.exit(1) print(' .. INFO: $FS_LICENSE set to {}'.format(os.environ['FS_LICENSE'])) # Get the number of available cores and keep one for light processes if possible max_number_of_cores = multiprocessing.cpu_count() - 1 # handles case with one CPU available if max_number_of_cores < 1: max_number_of_cores = 1 # Setup number of subjects to be processed in parallel if args.number_of_participants_processed_in_parallel is not None: parallel_number_of_subjects = int(
def main(): """Main function of the BIDS App entrypoint script.""" # Parse script arguments cmp_parser = parser.get() args = cmp_parser.parse_args() print('> BIDS dataset: {}'.format(args.bids_dir)) # if not args.skip_bids_validator: # run('bids-validator %s'%args.bids_dir) if args.participant_label: # only for a subset of subjects subjects_to_analyze = args.participant_label else: # for all subjects subject_dirs = glob(os.path.join(args.bids_dir, "sub-*")) subjects_to_analyze = [ subject_dir.split("-")[-1] for subject_dir in subject_dirs ] print("> Subjects to analyze : {}".format(subjects_to_analyze)) # Derivatives directory creation if it does not exist derivatives_dir = os.path.abspath(args.output_dir) if not os.path.isdir(derivatives_dir): os.makedirs(derivatives_dir) tools = [__cmp_directory__, __freesurfer_directory__, __nipype_directory__] for tool in tools: tool_dir = os.path.join(args.output_dir, tool) if not os.path.isdir(tool_dir): os.makedirs(tool_dir) # Make sure freesurfer is happy with the license print( '> Set $FS_LICENSE which points to FreeSurfer license location (BIDS App)' ) if os.access(os.path.join('/bids_dir', 'code', 'license.txt'), os.F_OK): os.environ['FS_LICENSE'] = os.path.join('/bids_dir', 'code', 'license.txt') elif args.fs_license: os.environ['FS_LICENSE'] = os.path.abspath(args.fs_license) else: print_error( " .. ERROR: Missing license.txt in code/ directory OR unspecified Freesurfer license with the option --fs_license " ) return 1 print(' .. INFO: $FS_LICENSE set to {}'.format(os.environ['FS_LICENSE'])) parallel_number_of_subjects, number_of_threads = check_and_return_valid_nb_of_cores( args) # Set number of threads used by programs based on OpenMP multi-threading library # This includes AFNI, Dipy, Freesurfer, FSL, MRtrix3. # os.environ.update(OMP_NUM_THREADS=f'{number_of_threads}') # print(' * OMP_NUM_THREADS set to {} (total of cores: {})'.format(os.environ['OMP_NUM_THREADS'], max_number_of_cores)) # Set number of threads used by ANTs if specified. # Otherwise use the same as the number of OpenMP threads if args.ants_number_of_threads is not None: os.environ[ 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS'] = f'{args.ants_number_of_threads}' print( f' * ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS set to {os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"]}' ) # Initialize random generator for enhanced reproducibility # Numpy needs to be imported after setting the different multi-threading environment variable # See https://stackoverflow.com/questions/30791550/limit-number-of-threads-in-numpy for more details # noinspection PyPep8 numpy.random.seed(1234) # Set random generator seed of MRtrix if specified if args.mrtrix_random_seed is not None: os.environ['MRTRIX_RNG_SEED'] = f'{args.mrtrix_random_seed}' print(f' * MRTRIX_RNG_SEED set to {os.environ["MRTRIX_RNG_SEED"]}') # Set random generator seed of ANTs if specified if args.ants_random_seed is not None: os.environ['ANTS_RANDOM_SEED'] = f'{args.ants_random_seed}' print(f' * ANTS_RANDOM_SEED set to {os.environ["ANTS_RANDOM_SEED"]}') # running participant level if args.analysis_level == "participant": # report_app_run_to_google_analytics() if args.notrack is not True: report_usage('BIDS App', 'Run', __version__) maxprocs = parallel_number_of_subjects processes = [] # find all T1s and skullstrip them for subject_label in subjects_to_analyze: project = ProjectInfo() project.base_directory = args.bids_dir project.output_directory = args.output_dir project.subjects = [ 'sub-{}'.format(label) for label in subjects_to_analyze ] project.subject = 'sub-{}'.format(subject_label) print('> Process subject {}'.format(project.subject)) if args.session_label is not None: print("> Sessions specified by input args : {}".format( args.session_label)) subject_session_labels = args.session_label project.subject_sessions = [ 'ses-{}'.format(subject_session_label) for subject_session_label in subject_session_labels ] # Check if session exists for session in project.subject_sessions: session_path = os.path.join(args.bids_dir, project.subject, session) if not os.path.exists(session_path): print_error( f' .. ERROR: The directory {session_path} corresponding ' f'to the session {session.split("-")[-1]} ' "specified by --session_label input flag DOES NOT exist." ) return 1 else: print( f' .. INFO: The directory {session_path} corresponding ' f'to the session {session.split("-")[-1]} ' 'specified by --session_label input flag DOES exist.' ) else: # Check if multiple session (sub-XX/ses-YY/anat/... structure or sub-XX/anat.. structure?) subject_session_dirs = glob( os.path.join(args.bids_dir, project.subject, "ses-*")) project.subject_sessions = [ 'ses-{}'.format(subject_session_dir.split("-")[-1]) for subject_session_dir in subject_session_dirs ] if len(project.subject_sessions) > 0: # Session structure print("> Sessions to analyze : {}".format( project.subject_sessions)) else: project.subject_sessions = [''] for session in project.subject_sessions: if not args.coverage: while len(processes) == maxprocs: manage_processes(processes) if session != "": print('> Process session {}'.format(session)) project.subject_session = session # Derivatives folder creation for tool in tools: if project.subject_session == "": derivatives_dir = os.path.join(args.output_dir, tool, project.subject) elif project.subject_session != "" and tool == __freesurfer_directory__: derivatives_dir = os.path.join( args.output_dir, tool, f'{project.subject}_{project.subject_session}') elif project.subject_session != "" and tool != __freesurfer_directory__: derivatives_dir = os.path.join(args.output_dir, tool, project.subject, project.subject_session) if not os.path.isdir(derivatives_dir): os.makedirs(derivatives_dir) run_anat = False run_dmri = False run_fmri = False if args.anat_pipeline_config is not None: if check_configuration_format( args.anat_pipeline_config) == '.ini': anat_pipeline_config = convert_config_ini_2_json( args.anat_pipeline_config) else: anat_pipeline_config = args.anat_pipeline_config project.anat_config_file = create_subject_configuration_from_ref( project, anat_pipeline_config, 'anatomical') run_anat = True print( f"\t ... Anatomical config created : {project.anat_config_file}" ) if args.dwi_pipeline_config is not None: if check_configuration_format( args.dwi_pipeline_config) == '.ini': dwi_pipeline_config = convert_config_ini_2_json( args.dwi_pipeline_config) else: dwi_pipeline_config = args.dwi_pipeline_config project.dmri_config_file = create_subject_configuration_from_ref( project, dwi_pipeline_config, 'diffusion') run_dmri = True print( f"\t ... Diffusion config created : {project.dmri_config_file}" ) if args.func_pipeline_config is not None: if check_configuration_format( args.func_pipeline_config) == '.ini': func_pipeline_config = convert_config_ini_2_json( args.func_pipeline_config) else: func_pipeline_config = args.func_pipeline_config project.fmri_config_file = create_subject_configuration_from_ref( project, func_pipeline_config, 'fMRI') run_fmri = True print( f"\t ... fMRI config created : {project.fmri_config_file}" ) if args.anat_pipeline_config is not None: print(" .. INFO: Running pipelines : ") print( "\t\t- Anatomical MRI (segmentation and parcellation)") if args.dwi_pipeline_config is not None: print( "\t\t- Diffusion MRI (structural connectivity matrices)" ) if args.func_pipeline_config is not None: print("\t\t- fMRI (functional connectivity matrices)") if args.coverage: if run_anat: run_individual( project.base_directory, project.output_directory, project.subject, project.subject_session, anat_pipeline_config=project.anat_config_file, dwi_pipeline_config=(None if not run_dmri else project.dmri_config_file), func_pipeline_config=( None if not run_fmri else project.fmri_config_file), number_of_threads=number_of_threads) else: cmd = create_cmp_command( project=project, run_anat=run_anat, run_dmri=run_dmri, run_fmri=run_fmri, number_of_threads=number_of_threads) print_blue("... cmd : {}".format(cmd)) if project.subject_session != "": log_file = '{}_{}_log.txt'.format( project.subject, project.subject_session) else: log_file = '{}_log.txt'.format(project.subject) proc = run(command=cmd, env={}, log_filename=os.path.join( project.output_directory, __cmp_directory__, project.subject, project.subject_session, log_file)) processes.append(proc) else: print("... Error: at least anatomical configuration file " "has to be specified (--anat_pipeline_config)") return 1 if not args.coverage: while len(processes) > 0: manage_processes(processes) clean_cache(args.bids_dir) # running group level; ultimately it will compute average connectivity matrices # elif args.analysis_level == "group": # brain_sizes = [] # for subject_label in subjects_to_analyze: # for brain_file in glob(os.path.join(args.output_dir, "sub-%s*.nii*"%subject_label)): # data = nibabel.load(brain_file).get_data() # # calcualte average mask size in voxels # brain_sizes.append((data != 0).sum()) # # with open(os.path.join(args.output_dir, "avg_brain_size.txt"), 'w') as fp: # fp.write("Average brain size is %g voxels"%numpy.array(brain_sizes).mean()) return 1
def select_subject(self): """Function to select the subject and session for which to inspect outputs.""" print( "> Selection of subject (and session) for which to inspect outputs" ) valid_selected_subject = False select = True aborded = False while not valid_selected_subject and not aborded: # Select subject from BIDS dataset np_res = self.project_info.configure_traits(view="subject_view") if not np_res: aborded = True break print(" .. INFO: Selected subject: {}".format( self.project_info.subject)) # Select session if any bids_layout = BIDSLayout(self.project_info.base_directory) subject = self.project_info.subject.split("-")[1] sessions = bids_layout.get(target="session", return_type="id", subject=subject) if len(sessions) > 0: print(" .. INFO: Input dataset has sessions") print(sessions) self.project_info.subject_sessions = [] for ses in sessions: self.project_info.subject_sessions.append("ses-" + str(ses)) np_res = self.project_info.configure_traits( view="subject_session_view") if not np_res: aborded = True break self.project_info.anat_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_anatomical_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.anat_config_file, os.F_OK): print("> Initialize anatomical pipeline") self.anat_pipeline = project.init_anat_project( self.project_info, False) else: self.anat_pipeline = None if self.dmri_inputs_checked: self.project_info.dmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_diffusion_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.dmri_config_file, os.F_OK): print("> Initialize diffusion pipeline") ( dmri_valid_inputs, self.dmri_pipeline, ) = project.init_dmri_project(self.project_info, bids_layout, False) else: self.dmri_pipeline = None # self.dmri_pipeline.subject = self.project_info.subject # self.dmri_pipeline.global_conf.subject = self.project_info.subject if self.fmri_inputs_checked: self.project_info.fmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_fMRI_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.fmri_config_file, os.F_OK): print("> Initialize fMRI pipeline") ( fmri_valid_inputs, self.fmri_pipeline, ) = project.init_fmri_project(self.project_info, bids_layout, False) else: self.fmri_pipeline = None # self.fmri_pipeline.subject = self.project_info.subject # self.fmri_pipeline.global_conf.subject = self.project_info.subject # self.anat_pipeline.global_conf.subject_session = self.project_info.subject_session # if self.dmri_pipeline is not None: # self.dmri_pipeline.global_conf.subject_session = self.project_info.subject_session # # if self.fmri_pipeline is not None: # self.fmri_pipeline.global_conf.subject_session = self.project_info.subject_session print(" .. INFO: Selected session %s" % self.project_info.subject_session) if self.anat_pipeline is not None: self.anat_pipeline.stages[ "Segmentation"].config.freesurfer_subject_id = os.path.join( self.project_info.base_directory, "derivatives", __freesurfer_directory__, "{}_{}".format(self.project_info.subject, self.project_info.subject_session), ) else: print(" .. INFO: No session detected") self.project_info.anat_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_anatomical_config.json".format( self.project_info.subject), ) if os.access(self.project_info.anat_config_file, os.F_OK): self.anat_pipeline = project.init_anat_project( self.project_info, False) else: self.anat_pipeline = None if self.dmri_inputs_checked: self.project_info.dmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_diffusion_config.json".format( self.project_info.subject), ) if os.access(self.project_info.dmri_config_file, os.F_OK): ( dmri_valid_inputs, self.dmri_pipeline, ) = project.init_dmri_project(self.project_info, bids_layout, False) else: self.dmri_pipeline = None # self.dmri_pipeline.subject = self.project_info.subject # self.dmri_pipeline.global_conf.subject = self.project_info.subject if self.fmri_inputs_checked: self.project_info.fmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_fMRI_config.json".format( self.project_info.subject), ) if os.access(self.project_info.fmri_config_file, os.F_OK): ( fmri_valid_inputs, self.fmri_pipeline, ) = project.init_fmri_project(self.project_info, bids_layout, False) else: self.fmri_pipeline = None # self.fmri_pipeline.subject = self.project_info.subject # self.fmri_pipeline.global_conf.subject = self.project_info.subject # self.anat_pipeline.global_conf.subject_session = '' if self.anat_pipeline is not None: self.anat_pipeline.stages[ "Segmentation"].config.freesurfer_subjects_dir = os.path.join( self.project_info.base_directory, "derivatives", __freesurfer_directory__, "{}".format(self.project_info.subject), ) if self.anat_pipeline is not None: print("> Anatomical pipeline output inspection") self.anat_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.anat_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_anat_available = True if self.dmri_pipeline is not None: print("> Diffusion pipeline output inspection") self.dmri_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.dmri_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_dmri_available = True if self.fmri_pipeline is not None: print("> fMRI pipeline output inspection") self.fmri_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.fmri_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_fmri_available = True print_blue(" .. Anatomical output(s) available : %s" % self.output_anat_available) print_blue(" .. Diffusion output(s) available : %s" % self.output_dmri_available) print_blue(" .. fMRI output(s) available : %s" % self.output_fmri_available) if (self.output_anat_available or self.output_dmri_available or self.output_fmri_available): valid_selected_subject = True else: self.error_msg = ( " .. ERROR: No output available! " + "Please select another subject (and session if any)!") print_error(self.error_msg) select = error(message=self.error_msg, title="Error", buttons=["OK", "Cancel"]) aborded = not select return aborded
def main(): """Main function that runs the connectomemapper3 python script. Returns ------- exit_code : {0, 1} An exit code given to `sys.exit()` that can be: * '0' in case of successful completion * '1' in case of an error """ # Parse script arguments parser = create_parser() args = parser.parse_args() # Check dependencies dep_check() # Add current directory to the path, useful if DTB_ bins not installed os.environ["PATH"] += os.pathsep + os.path.dirname(sys.argv[0]) # Version and copyright message info() project = cmp.project.ProjectInfo() project.base_directory = os.path.abspath(args.bids_dir) project.output_directory = os.path.abspath(args.output_dir) project.subjects = ["{}".format(args.participant_label)] project.subject = "{}".format(args.participant_label) try: bids_layout = BIDSLayout(project.base_directory) except Exception: # pragma: no cover print_error(" .. EXCEPTION: Raised at BIDSLayout") exit_code = 1 return exit_code if args.session_label is not None: project.subject_sessions = ["{}".format(args.session_label)] project.subject_session = "{}".format(args.session_label) print(" .. INFO: Dataset has subject/session layout") else: print(" .. INFO: Dataset has basic subject layout") project.subject_sessions = [""] project.subject_session = "" project.anat_config_file = os.path.abspath(args.anat_pipeline_config) # Perform only the anatomical pipeline if args.dwi_pipeline_config is None and args.func_pipeline_config is None: anat_pipeline = cmp.project.init_anat_project(project, False) if anat_pipeline is not None: anat_valid_inputs = anat_pipeline.check_input(bids_layout, gui=False) if args.number_of_threads is not None: print( f" .. INFO: Set Freesurfer and ANTs to use {args.number_of_threads} threads by the means of OpenMP" ) anat_pipeline.stages[ "Segmentation"].config.number_of_threads = args.number_of_threads if anat_valid_inputs: anat_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code # Perform the anatomical and the diffusion pipelines elif args.dwi_pipeline_config is not None and args.func_pipeline_config is None: project.dmri_config_file = os.path.abspath(args.dwi_pipeline_config) anat_pipeline = cmp.project.init_anat_project(project, False) if anat_pipeline is not None: anat_valid_inputs = anat_pipeline.check_input(bids_layout, gui=False) if args.number_of_threads is not None: print( f" .. INFO: Set Freesurfer and ANTs to use {args.number_of_threads} threads by the means of OpenMP" ) anat_pipeline.stages[ "Segmentation"].config.number_of_threads = args.number_of_threads if anat_valid_inputs: print(">> Process anatomical pipeline") anat_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code anat_valid_outputs, msg = anat_pipeline.check_output() project.freesurfer_subjects_dir = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subjects_dir project.freesurfer_subject_id = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subject_id if anat_valid_outputs: dmri_valid_inputs, dmri_pipeline = cmp.project.init_dmri_project( project, bids_layout, False) if dmri_pipeline is not None: dmri_pipeline.parcellation_scheme = anat_pipeline.parcellation_scheme dmri_pipeline.atlas_info = anat_pipeline.atlas_info if anat_pipeline.parcellation_scheme == "Custom": dmri_pipeline.custom_atlas_name = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.atlas dmri_pipeline.custom_atlas_res = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.res if dmri_valid_inputs: dmri_pipeline.process() else: # pragma: no cover print(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code else: # pragma: no cover print_error( f" .. ERROR: Invalid anatomical outputs for diffusion pipeline" ) print_error(f"{msg}") exit_code = 1 return exit_code # Perform the anatomical and the fMRI pipelines elif args.dwi_pipeline_config is None and args.func_pipeline_config is not None: project.fmri_config_file = os.path.abspath(args.func_pipeline_config) anat_pipeline = cmp.project.init_anat_project(project, False) if anat_pipeline is not None: anat_valid_inputs = anat_pipeline.check_input(bids_layout, gui=False) if args.number_of_threads is not None: print( f" .. INFO: Set Freesurfer and ANTs to use {args.number_of_threads} threads by the means of OpenMP" ) anat_pipeline.stages[ "Segmentation"].config.number_of_threads = args.number_of_threads if anat_valid_inputs: print(">> Process anatomical pipeline") anat_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code anat_valid_outputs, msg = anat_pipeline.check_output() project.freesurfer_subjects_dir = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subjects_dir project.freesurfer_subject_id = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subject_id if anat_valid_outputs: fmri_valid_inputs, fmri_pipeline = cmp.project.init_fmri_project( project, bids_layout, False) if fmri_pipeline is not None: fmri_pipeline.parcellation_scheme = anat_pipeline.parcellation_scheme fmri_pipeline.atlas_info = anat_pipeline.atlas_info if anat_pipeline.parcellation_scheme == "Custom": fmri_pipeline.custom_atlas_name = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.atlas fmri_pipeline.custom_atlas_res = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.res if fmri_valid_inputs: print(">> Process fmri pipeline") fmri_pipeline.process() else: # pragma: no cover print(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code else: # pragma: no cover print_error( f" .. ERROR: Invalid anatomical outputs for fMRI pipeline") print_error(f"{msg}") exit_code = 1 return exit_code # Perform all pipelines (anatomical/diffusion/fMRI) elif args.dwi_pipeline_config is not None and args.func_pipeline_config is not None: project.dmri_config_file = os.path.abspath(args.dwi_pipeline_config) project.fmri_config_file = os.path.abspath(args.func_pipeline_config) anat_pipeline = cmp.project.init_anat_project(project, False) if anat_pipeline is not None: anat_valid_inputs = anat_pipeline.check_input(bids_layout, gui=False) if args.number_of_threads is not None: print( f" .. INFO: Set Freesurfer and ANTs to use {args.number_of_threads} threads by the means of OpenMP" ) anat_pipeline.stages[ "Segmentation"].config.number_of_threads = args.number_of_threads if anat_valid_inputs: print(">> Process anatomical pipeline") anat_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code anat_valid_outputs, msg = anat_pipeline.check_output() project.freesurfer_subjects_dir = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subjects_dir project.freesurfer_subject_id = anat_pipeline.stages[ "Segmentation"].config.freesurfer_subject_id if anat_valid_outputs: dmri_valid_inputs, dmri_pipeline = cmp.project.init_dmri_project( project, bids_layout, False) if dmri_pipeline is not None: dmri_pipeline.parcellation_scheme = anat_pipeline.parcellation_scheme dmri_pipeline.atlas_info = anat_pipeline.atlas_info if anat_pipeline.parcellation_scheme == "Custom": dmri_pipeline.custom_atlas_name = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.atlas dmri_pipeline.custom_atlas_res = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.res if dmri_valid_inputs: print(">> Process diffusion pipeline") dmri_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code fmri_valid_inputs, fmri_pipeline = cmp.project.init_fmri_project( project, bids_layout, False) if fmri_pipeline is not None: fmri_pipeline.parcellation_scheme = anat_pipeline.parcellation_scheme fmri_pipeline.atlas_info = anat_pipeline.atlas_info if anat_pipeline.parcellation_scheme == "Custom": fmri_pipeline.custom_atlas_name = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.atlas fmri_pipeline.custom_atlas_res = anat_pipeline.stages[ "Parcellation"].config.custom_parcellation.res if fmri_valid_inputs: print(">> Process fmri pipeline") fmri_pipeline.process() else: # pragma: no cover print_error(" .. ERROR: Invalid inputs") exit_code = 1 return exit_code else: # pragma: no cover print_error( f" .. ERROR: Invalid anatomical outputs for diffusion and fMRI pipelines" ) print_error(f"{msg}") exit_code = 1 return exit_code exit_code = 0 return exit_code
def save_configparser_as_json(config, config_json_path, ini_mode=False, debug=False): """Save a ConfigParser to JSON file. Parameters ---------- config : Instance(configparser.ConfigParser) Instance of ConfigParser config_json_path : string Output path of JSON configuration file ini_mode : bool If `True`, handles all content stored in strings debug : bool If `True`, show additional prints """ config_json = {} # In the case of diffusion pipeline if 'diffusion_stage' in config.sections(): recon_processing_tool = config['diffusion_stage'].get( 'recon_processing_tool') tracking_processing_tool = config['diffusion_stage'].get( 'tracking_processing_tool') for section in config.sections(): config_json[section] = {} for name, value in config.items(section): # Keep only parameters that are used by the diffusion stage # of the diffusion pipeline. This simplifies the reading of # its configuration file if 'diffusion_stage' in section: # Skip adding diffusion reconstruction parameters if recon_processing_tool == 'Dipy': if 'mrtrix_recon_config' in name: continue elif recon_processing_tool == 'MRtrix': if 'dipy_recon_config' in name: continue # Skip adding tracking parameters if tracking_processing_tool == 'Dipy': if 'mrtrix_tracking_config' in name: continue elif tracking_processing_tool == 'MRtrix': if 'dipy_tracking_config' in name: continue if '_editor' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'log_visualization' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'circular_layout' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue is_iterable = False if ini_mode: try: if not (section == 'parcellation_stage' and name == 'ants_precision_type'): value = eval(value) if debug: print_warning( f' .. DEBUG: String {value} evaluated') else: if debug: print_warning( f' .. DEBUG: String {value} not evaluated') except Exception: if debug: print_error( f' .. EXCEPTION: String {value} COULD NOT BE evaluated' ) pass if isinstance(value, dict): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as dict' ) config_json[section][name] = value is_iterable = True elif isinstance(value, list): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as list' ) config_json[section][name] = value is_iterable = True elif isinstance(value, Iterable) and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as iterable' ) config_json[section][name] = [x for x in value if x] is_iterable = True elif isinstance(value, bool): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as boolean' ) config_json[section][name] = [value] elif value and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as not a string' ) config_json[section][name] = [value] elif value and isinstance(value, str): value = value.strip() if value.isnumeric(): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as number' ) value = float(value) if value.is_integer(): value = int(value) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as string' ) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG : Type: {type(value)} / value : {value}') config_json[section][name] = '' if not is_iterable: if len(config_json[section][name]) == 1: config_json[section][name] = config_json[section][name][0] elif len(config_json[section][name]) == 0: config_json[section][name] = '' if config_json[section][name] == '': del config_json[section][name] config_json['Global']['version'] = __version__ if debug: print_blue(f' .. DEBUG: {config_json}') with open(config_json_path, 'w') as outfile: json.dump(config_json, outfile, indent=4)
def set_pipeline_attributes_from_config(pipeline, config, debug=False): """Set the pipeline stage attributes given a configuration. Parameters ---------- pipeline : Instance(Pipeline) Instance of pipeline config : Dict Dictionary of configuration parameter loaded from the JSON configuration file debug : bool If `True`, show additional prints """ global_keys = [ prop for prop in list(pipeline.global_conf.traits().keys()) if 'trait' not in prop ] # possibly dangerous..? for key in global_keys: if key != "subject" and \ key != "subjects" and \ key != "subject_session" and \ key != "subject_sessions": if key in config['Global'].keys(): conf_value = config['Global'][key] setattr(pipeline.global_conf, key, conf_value) for stage in list(pipeline.stages.values()): stage_keys = [ prop for prop in list(stage.config.traits().keys()) if 'trait' not in prop ] # possibly dangerous..? for key in stage_keys: if 'config' in key: # subconfig sub_config = getattr(stage.config, key) stage_sub_keys = [ prop for prop in list(sub_config.traits().keys()) if 'trait' not in prop ] for sub_key in stage_sub_keys: if stage.name in config.keys(): tmp_key = key + '.' + sub_key if tmp_key in config[stage.name].keys(): conf_value = config[stage.name][tmp_key] try: # Convert parameter to proper expected type if isinstance(getattr(sub_config, sub_key), tuple): conf_value = tuple(conf_value) elif isinstance(getattr(sub_config, sub_key), bool): conf_value = bool(conf_value) elif isinstance(getattr(sub_config, sub_key), list): conf_value = list(conf_value) elif isinstance(getattr(sub_config, sub_key), dict): conf_value = dict(conf_value) elif isinstance(getattr(sub_config, sub_key), int): conf_value = int(float(conf_value)) elif isinstance(getattr(sub_config, sub_key), float): conf_value = float(conf_value) setattr(sub_config, sub_key, conf_value) if debug: print( f' .. DEBUG: Set {sub_config}.{sub_key} to {conf_value}' ) except Exception as e: if debug: print_warning( ' .. EXCEPTION raised while setting ' + f'{sub_config}.{sub_key} to {conf_value}' ) print_error(f' {e}') pass else: if stage.name in config.keys(): if key in config[stage.name].keys(): conf_value = config[stage.name][key] try: # Convert parameter to proper expected type if isinstance(getattr(stage.config, key), tuple): conf_value = tuple(conf_value) elif isinstance(getattr(stage.config, key), bool): conf_value = bool(conf_value) elif isinstance(getattr(stage.config, key), list): conf_value = list(conf_value) elif isinstance(getattr(stage.config, key), dict): conf_value = dict(conf_value) elif isinstance(getattr(stage.config, key), int): conf_value = int(float(conf_value)) elif isinstance(getattr(stage.config, key), float): conf_value = float(conf_value) setattr(stage.config, key, conf_value) if debug: print( f' .. DEBUG: Set {stage.config}.{key} to {conf_value}' ) except Exception as e: if debug: print_warning( ' .. EXCEPTION raised while setting ' + f'{stage.config}.{key} to {conf_value}') print_error(f' {e}') pass setattr(pipeline, 'number_of_cores', int(config['Multi-processing']['number_of_cores']))