def check_configuration_version(config): """Check the version of CMP3 used to generate a configuration. Parameters ---------- config : Dict Dictionary of configuration parameters loaded from JSON file Returns ------- is_same : bool `True` if the version used to generate the configuration matches the version currently used (`cmp.info.__version__`). """ is_same = False if 'version' in config['Global'].keys(): if config['Global']['version'] == __version__: print(BColors.OKGREEN + ' .. INFO: Generated with the same CMP3 version' + BColors.ENDC) is_same = True else: conf_version = config['Global']['version'] print_warning(' .. WARNING: CMP3 version used to generate the ' + f'configuration files ({conf_version}) ' + f' and version of CMP3 used ({__version__}) differ') is_same = False return is_same
def refresh_folder(derivatives_directory, subject, input_folders, session=None): """Creates (if needed) the folder hierarchy. Parameters ---------- derivatives_directory : string subject : string Subject label (``sub-XX``) for which we create the output folder hierarchy session : string Subject session label (``ses-YY``) input_folders : list of string List of folders to create in ``derivative_directory/sub-XX/(ses-YY)/`` folder for the given ``subject`` """ paths = [] if session is None or session == "": paths.append( os.path.join(derivatives_directory, __freesurfer_directory__, subject)) paths.append( os.path.join(derivatives_directory, __cmp_directory__, subject)) paths.append( os.path.join(derivatives_directory, __nipype_directory__, subject)) for in_f in input_folders: paths.append( os.path.join(derivatives_directory, __cmp_directory__, subject, in_f)) else: paths.append( os.path.join(derivatives_directory, __freesurfer_directory__, "%s_%s" % (subject, session))) paths.append( os.path.join(derivatives_directory, __cmp_directory__, subject, session)) paths.append( os.path.join(derivatives_directory, __nipype_directory__, subject, session)) for in_f in input_folders: paths.append( os.path.join(derivatives_directory, __cmp_directory__, subject, session, in_f)) for full_p in paths: if not os.path.exists(full_p): try: os.makedirs(full_p) except os.error: print_warning(" .. INFO: %s was already existing" % full_p) finally: print(" .. INFO: Created directory %s" % full_p)
def info(): """Print version and copyright information.""" print("\nConnectome Mapper {} - BIDS App Manager ".format(__version__)) print_warning("------------------------------------------------------") print_warning("""{}""".format(__copyright__)) print_warning("------------------------------------------------------") print("------------------------------------------------------") print(" .. INFO: Use {} for graphical backend".format(ETSConfig.toolkit)) print("------------------------------------------------------\n")
def clean_cache(bids_root): """Clean cache stored in /tmp. Target issue related to that a dataset directory is mounted into /tmp and used for caching by java/matlab/matplotlib/xvfb-run in the container image. Parameters ---------- bids_root : string BIDS root dataset directory """ print_warning("> Clean generated docker image cache") for d in glob.glob(os.path.join(bids_root, " hsperfdata_cmp")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for f in glob.glob(os.path.join(bids_root, "._java*")): print_warning("... DEL: {}".format(f)) os.remove(f) for f in glob.glob(os.path.join(bids_root, "mri_segstats.tmp*")): print_warning("... DEL: {}".format(f)) os.remove(f) for d in glob.glob(os.path.join(bids_root, "MCR_*")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for d in glob.glob(os.path.join(bids_root, "matplotlib*")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for d in glob.glob(os.path.join(bids_root, "xvfb-run.*")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for d in glob.glob(os.path.join(bids_root, ".X11*")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for d in glob.glob(os.path.join(bids_root, ".X11-unix")): print_warning("... DEL: {}".format(d)) shutil.rmtree(d) for f in glob.glob(os.path.join(bids_root, ".X99*")): print_warning("... DEL: {}".format(f)) os.remove(d)
def init_dmri_project(project_info, bids_layout, is_new_project, gui=True): """Create and initialize a :class:`DiffusionPipelineUI` instance Parameters ---------- project_info : ProjectInfoUI Instance of :class:`ProjectInfoUI` class bids_layout : bids.BIDSLayout PyBIDS BIDS Layout object describing the BIDS dataset is_new_project : bool If True, this is a new project which has been never processed gui : bool If True, display messages in GUI """ dmri_pipeline = diffusion_pipeline.DiffusionPipelineUI(project_info) derivatives_directory = os.path.join(project_info.base_directory, "derivatives") if (project_info.subject_session != "") and (project_info.subject_session is not None): refresh_folder( derivatives_directory, project_info.subject, dmri_pipeline.input_folders, session=project_info.subject_session, ) else: refresh_folder(derivatives_directory, project_info.subject, dmri_pipeline.input_folders) dmri_inputs_checked = dmri_pipeline.check_input(layout=bids_layout, gui=gui) if dmri_inputs_checked: if is_new_project and dmri_pipeline is not None: print("> Initialize dMRI project") if not os.path.exists(derivatives_directory): try: os.makedirs(derivatives_directory) except os.error: print_warning(" .. INFO: %s was already existing" % derivatives_directory) finally: print(" .. INFO: Created directory %s" % derivatives_directory) if (project_info.subject_session != "") and (project_info.subject_session is not None): project_info.dmri_config_file = os.path.join( derivatives_directory, "%s_%s_diffusion_config.json" % (project_info.subject, project_info.subject_session), ) else: project_info.dmri_config_file = os.path.join( derivatives_directory, "%s_diffusion_config.json" % project_info.subject, ) if os.path.exists(project_info.dmri_config_file): warn_res = project_info.configure_traits( view="dmri_warning_view") if warn_res: print(" .. INFO: Read diffusion config file (%s)" % project_info.dmri_config_file) dmri_save_config(dmri_pipeline, project_info.dmri_config_file) else: return None else: print(" .. INFO: Create diffusion config file (%s)" % project_info.dmri_config_file) dmri_save_config(dmri_pipeline, project_info.dmri_config_file) else: print("> Load dMRI project") dmri_conf_loaded = dmri_load_config_json( dmri_pipeline, project_info.dmri_config_file) if not dmri_conf_loaded: return None dmri_pipeline.config_file = project_info.dmri_config_file else: print_error(" .. ERROR: Missing diffusion inputs") return dmri_inputs_checked, dmri_pipeline
def info(): """Print version of copyright.""" print_blue(f"\nConnectome Mapper {__version__}") print_warning(f"{__copyright__}\n")
def save_configparser_as_json(config, config_json_path, ini_mode=False, debug=False): """Save a ConfigParser to JSON file. Parameters ---------- config : Instance(configparser.ConfigParser) Instance of ConfigParser config_json_path : string Output path of JSON configuration file ini_mode : bool If `True`, handles all content stored in strings debug : bool If `True`, show additional prints """ config_json = {} # In the case of diffusion pipeline if 'diffusion_stage' in config.sections(): recon_processing_tool = config['diffusion_stage'].get( 'recon_processing_tool') tracking_processing_tool = config['diffusion_stage'].get( 'tracking_processing_tool') for section in config.sections(): config_json[section] = {} for name, value in config.items(section): # Keep only parameters that are used by the diffusion stage # of the diffusion pipeline. This simplifies the reading of # its configuration file if 'diffusion_stage' in section: # Skip adding diffusion reconstruction parameters if recon_processing_tool == 'Dipy': if 'mrtrix_recon_config' in name: continue elif recon_processing_tool == 'MRtrix': if 'dipy_recon_config' in name: continue # Skip adding tracking parameters if tracking_processing_tool == 'Dipy': if 'mrtrix_tracking_config' in name: continue elif tracking_processing_tool == 'MRtrix': if 'dipy_tracking_config' in name: continue if '_editor' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'log_visualization' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'circular_layout' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue is_iterable = False if ini_mode: try: if not (section == 'parcellation_stage' and name == 'ants_precision_type'): value = eval(value) if debug: print_warning( f' .. DEBUG: String {value} evaluated') else: if debug: print_warning( f' .. DEBUG: String {value} not evaluated') except Exception: if debug: print_error( f' .. EXCEPTION: String {value} COULD NOT BE evaluated' ) pass if isinstance(value, dict): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as dict' ) config_json[section][name] = value is_iterable = True elif isinstance(value, list): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as list' ) config_json[section][name] = value is_iterable = True elif isinstance(value, Iterable) and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as iterable' ) config_json[section][name] = [x for x in value if x] is_iterable = True elif isinstance(value, bool): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as boolean' ) config_json[section][name] = [value] elif value and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as not a string' ) config_json[section][name] = [value] elif value and isinstance(value, str): value = value.strip() if value.isnumeric(): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as number' ) value = float(value) if value.is_integer(): value = int(value) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as string' ) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG : Type: {type(value)} / value : {value}') config_json[section][name] = '' if not is_iterable: if len(config_json[section][name]) == 1: config_json[section][name] = config_json[section][name][0] elif len(config_json[section][name]) == 0: config_json[section][name] = '' if config_json[section][name] == '': del config_json[section][name] config_json['Global']['version'] = __version__ if debug: print_blue(f' .. DEBUG: {config_json}') with open(config_json_path, 'w') as outfile: json.dump(config_json, outfile, indent=4)
def set_pipeline_attributes_from_config(pipeline, config, debug=False): """Set the pipeline stage attributes given a configuration. Parameters ---------- pipeline : Instance(Pipeline) Instance of pipeline config : Dict Dictionary of configuration parameter loaded from the JSON configuration file debug : bool If `True`, show additional prints """ global_keys = [ prop for prop in list(pipeline.global_conf.traits().keys()) if 'trait' not in prop ] # possibly dangerous..? for key in global_keys: if key != "subject" and \ key != "subjects" and \ key != "subject_session" and \ key != "subject_sessions": if key in config['Global'].keys(): conf_value = config['Global'][key] setattr(pipeline.global_conf, key, conf_value) for stage in list(pipeline.stages.values()): stage_keys = [ prop for prop in list(stage.config.traits().keys()) if 'trait' not in prop ] # possibly dangerous..? for key in stage_keys: if 'config' in key: # subconfig sub_config = getattr(stage.config, key) stage_sub_keys = [ prop for prop in list(sub_config.traits().keys()) if 'trait' not in prop ] for sub_key in stage_sub_keys: if stage.name in config.keys(): tmp_key = key + '.' + sub_key if tmp_key in config[stage.name].keys(): conf_value = config[stage.name][tmp_key] try: # Convert parameter to proper expected type if isinstance(getattr(sub_config, sub_key), tuple): conf_value = tuple(conf_value) elif isinstance(getattr(sub_config, sub_key), bool): conf_value = bool(conf_value) elif isinstance(getattr(sub_config, sub_key), list): conf_value = list(conf_value) elif isinstance(getattr(sub_config, sub_key), dict): conf_value = dict(conf_value) elif isinstance(getattr(sub_config, sub_key), int): conf_value = int(float(conf_value)) elif isinstance(getattr(sub_config, sub_key), float): conf_value = float(conf_value) setattr(sub_config, sub_key, conf_value) if debug: print( f' .. DEBUG: Set {sub_config}.{sub_key} to {conf_value}' ) except Exception as e: if debug: print_warning( ' .. EXCEPTION raised while setting ' + f'{sub_config}.{sub_key} to {conf_value}' ) print_error(f' {e}') pass else: if stage.name in config.keys(): if key in config[stage.name].keys(): conf_value = config[stage.name][key] try: # Convert parameter to proper expected type if isinstance(getattr(stage.config, key), tuple): conf_value = tuple(conf_value) elif isinstance(getattr(stage.config, key), bool): conf_value = bool(conf_value) elif isinstance(getattr(stage.config, key), list): conf_value = list(conf_value) elif isinstance(getattr(stage.config, key), dict): conf_value = dict(conf_value) elif isinstance(getattr(stage.config, key), int): conf_value = int(float(conf_value)) elif isinstance(getattr(stage.config, key), float): conf_value = float(conf_value) setattr(stage.config, key, conf_value) if debug: print( f' .. DEBUG: Set {stage.config}.{key} to {conf_value}' ) except Exception as e: if debug: print_warning( ' .. EXCEPTION raised while setting ' + f'{stage.config}.{key} to {conf_value}') print_error(f' {e}') pass setattr(pipeline, 'number_of_cores', int(config['Multi-processing']['number_of_cores']))
def create_subject_configuration_from_ref(project, ref_conf_file, pipeline_type, multiproc_number_of_cores=1): """Create the pipeline configuration file for an individual subject from a reference given as input. Parameters ---------- project : cmp.project.CMP_Project_Info Instance of `cmp.project.CMP_Project_Info` ref_conf_file : string Reference configuration file pipeline_type : 'anatomical', 'diffusion', 'fMRI' Type of pipeline multiproc_number_of_cores : int Number of threads used by Nipype Returns ------- subject_conf_file : string Configuration file of the individual subject """ subject_derivatives_dir = os.path.join(project.output_directory) # print('project.subject_session: {}'.format(project.subject_session)) if project.subject_session != '': # Session structure # print('With session : {}'.format(project.subject_session)) subject_conf_file = os.path.join( subject_derivatives_dir, 'cmp', project.subject, project.subject_session, "{}_{}_{}_config.json".format(project.subject, project.subject_session, pipeline_type)) else: # print('With NO session ') subject_conf_file = os.path.join( subject_derivatives_dir, 'cmp', project.subject, "{}_{}_config.json".format(project.subject, pipeline_type)) if os.path.isfile(subject_conf_file): print_warning( " .. WARNING: rewriting config file {}".format(subject_conf_file)) os.remove(subject_conf_file) # Change relative path to absolute path if needed (required when using singularity) if not os.path.isabs(ref_conf_file): ref_conf_file = os.path.abspath(ref_conf_file) with open(ref_conf_file, 'r') as f: config = json.load(f) config['Global']['subject'] = project.subject config['Global']['subjects'] = project.subjects if 'subject_sessions' in config['Global'].keys(): config['Global']['subject_sessions'] = project.subject_sessions if 'subject_session' in config['Global'].keys(): config['Global']['subject_session'] = project.subject_session config['Multi-processing']['number_of_cores'] = multiproc_number_of_cores with open(subject_conf_file, 'w') as outfile: json.dump(config, outfile, indent=4) return subject_conf_file