def _quality_control_fired(self): """Callback of the "Inspector" button. This displays the Quality Control (Inspector) Window.""" print_blue("[Open Quality Inspector Window]") if self.project_info.t1_available: if os.path.isfile(self.project_info.anat_config_file): print(" .. Anatomical config file : %s" % self.project_info.anat_config_file) if self.project_info.dmri_available: if os.path.isfile(self.project_info.dmri_config_file): print(" .. Diffusion config file : %s" % self.project_info.dmri_config_file) if self.project_info.fmri_available: if os.path.isfile(self.project_info.fmri_config_file): print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) try: self.quality_control_ui = cmp.bidsappmanager.gui.qc.QualityInspectorWindow( project_info=self.project_info, anat_inputs_checked=self.project_info.t1_available, dmri_inputs_checked=self.project_info.dmri_available, fmri_inputs_checked=self.project_info.fmri_available, ) self.quality_control_ui.configure_traits() except Exception as e: print(e)
def _configurator_fired(self): """Callback of the "configurator" button. This displays the Configurator Window.""" print_blue("[Open Pipeline Configurator Window]") if self.project_info.t1_available: if os.path.isfile(self.project_info.anat_config_file): print(" .. Anatomical config file : %s" % self.project_info.anat_config_file) if self.project_info.dmri_available: if os.path.isfile(self.project_info.dmri_config_file): print(" .. Diffusion config file : %s" % self.project_info.dmri_config_file) if self.project_info.fmri_available: if os.path.isfile(self.project_info.fmri_config_file): print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) self.configurator_ui = cmp.bidsappmanager.gui.config.PipelineConfiguratorWindow( project_info=self.project_info, anat_pipeline=self.anat_pipeline, dmri_pipeline=self.dmri_pipeline, fmri_pipeline=self.fmri_pipeline, anat_inputs_checked=self.project_info.t1_available, dmri_inputs_checked=self.project_info.dmri_available, fmri_inputs_checked=self.project_info.fmri_available, ) self.configurator_ui.configure_traits()
def fmri_save_config(pipeline, config_path): """Save the INI configuration file of a fMRI pipeline. Parameters ---------- pipeline : Instance(cmp.pipelines.functional.fMRI.fMRIPipeline) Instance of fMRIPipeline config_path : string Path of the JSON configuration file """ config = create_configparser_from_pipeline(pipeline) save_configparser_as_json(config, config_path) print_blue( ' .. SAVE: Config json file (fMRI) saved as {}'.format(config_path))
def dmri_save_config(pipeline, config_path): """Save the INI configuration file of a diffusion pipeline. Parameters ---------- pipeline : Instance(cmp.pipelines.diffusion.diffusion.DiffusionPipeline) Instance of DiffusionPipeline config_path : string Path of the JSON configuration file """ config = create_configparser_from_pipeline(pipeline) save_configparser_as_json(config, config_path) print_blue(' .. SAVE: Config json file (diffusion) saved as {}'.format( config_path))
def anat_save_config(pipeline, config_path): """Save the configuration file of an anatomical pipeline. Parameters ---------- pipeline : Instance(cmp.pipelines.anatomical.anatomical.AnatomicalPipeline) Instance of AnatomicalPipeline config_path : string Path of the JSON configuration file """ config = create_configparser_from_pipeline(pipeline) save_configparser_as_json(config, config_path) print_blue( ' .. SAVE: Config json file (anat) saved as {}'.format(config_path))
def fmri_load_config_json(pipeline, config_path): """Load the JSON configuration file of a fMRI pipeline. Parameters ---------- pipeline : Instance(cmp.pipelines.functional.fMRI.fMRIPipeline) Instance of fMRIPipeline config_path : string Path of the JSON configuration file """ print_blue(' .. LOAD: Load fMRI config file : {}'.format(config_path)) # datalad_is_available = is_tool('datalad') with open(config_path, 'r') as f: config = json.load(f) check_configuration_version(config) set_pipeline_attributes_from_config(pipeline, config) return True
def _bidsapp_fired(self): """ Callback of the "bidsapp" button. This displays the BIDS App Interface window.""" print_blue("[Open BIDS App Window]") bids_layout = BIDSLayout(self.project_info.base_directory) subjects = bids_layout.get_subjects() anat_config = os.path.join(self.project_info.base_directory, "code/", "ref_anatomical_config.json") dmri_config = os.path.join(self.project_info.base_directory, "code/", "ref_diffusion_config.json") fmri_config = os.path.join(self.project_info.base_directory, "code/", "ref_fMRI_config.json") self.bidsapp_ui = cmp.bidsappmanager.gui.bidsapp.BIDSAppInterfaceWindow( project_info=self.project_info, bids_root=self.project_info.base_directory, subjects=sorted(subjects), anat_config=anat_config, dmri_config=dmri_config, fmri_config=fmri_config, ) self.bidsapp_ui.configure_traits()
project.base_directory, project.output_directory, project.subject, project.subject_session, project.anat_config_file, None, None, number_of_threads=number_of_threads) else: cmd = create_cmp_command( project=project, run_anat=run_anat, run_dmri=run_dmri, run_fmri=run_fmri, number_of_threads=number_of_threads) print_blue("... cmd : {}".format(cmd)) proc = run(command=cmd, env={}, log_filename=os.path.join( project.output_directory, 'cmp', project.subject, project.subject_session, '{}_{}_log.txt'.format( project.subject, project.subject_session))) processes.append(proc) else: print( "... Error: at least anatomical configuration file has to be specified (--anat_pipeline_config)" )
def main(): """Main function of the BIDS App entrypoint script.""" # Parse script arguments cmp_parser = parser.get() args = cmp_parser.parse_args() print('> BIDS dataset: {}'.format(args.bids_dir)) # if not args.skip_bids_validator: # run('bids-validator %s'%args.bids_dir) if args.participant_label: # only for a subset of subjects subjects_to_analyze = args.participant_label else: # for all subjects subject_dirs = glob(os.path.join(args.bids_dir, "sub-*")) subjects_to_analyze = [ subject_dir.split("-")[-1] for subject_dir in subject_dirs ] print("> Subjects to analyze : {}".format(subjects_to_analyze)) # Derivatives directory creation if it does not exist derivatives_dir = os.path.abspath(args.output_dir) if not os.path.isdir(derivatives_dir): os.makedirs(derivatives_dir) tools = [__cmp_directory__, __freesurfer_directory__, __nipype_directory__] for tool in tools: tool_dir = os.path.join(args.output_dir, tool) if not os.path.isdir(tool_dir): os.makedirs(tool_dir) # Make sure freesurfer is happy with the license print( '> Set $FS_LICENSE which points to FreeSurfer license location (BIDS App)' ) if os.access(os.path.join('/bids_dir', 'code', 'license.txt'), os.F_OK): os.environ['FS_LICENSE'] = os.path.join('/bids_dir', 'code', 'license.txt') elif args.fs_license: os.environ['FS_LICENSE'] = os.path.abspath(args.fs_license) else: print_error( " .. ERROR: Missing license.txt in code/ directory OR unspecified Freesurfer license with the option --fs_license " ) return 1 print(' .. INFO: $FS_LICENSE set to {}'.format(os.environ['FS_LICENSE'])) parallel_number_of_subjects, number_of_threads = check_and_return_valid_nb_of_cores( args) # Set number of threads used by programs based on OpenMP multi-threading library # This includes AFNI, Dipy, Freesurfer, FSL, MRtrix3. # os.environ.update(OMP_NUM_THREADS=f'{number_of_threads}') # print(' * OMP_NUM_THREADS set to {} (total of cores: {})'.format(os.environ['OMP_NUM_THREADS'], max_number_of_cores)) # Set number of threads used by ANTs if specified. # Otherwise use the same as the number of OpenMP threads if args.ants_number_of_threads is not None: os.environ[ 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS'] = f'{args.ants_number_of_threads}' print( f' * ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS set to {os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"]}' ) # Initialize random generator for enhanced reproducibility # Numpy needs to be imported after setting the different multi-threading environment variable # See https://stackoverflow.com/questions/30791550/limit-number-of-threads-in-numpy for more details # noinspection PyPep8 numpy.random.seed(1234) # Set random generator seed of MRtrix if specified if args.mrtrix_random_seed is not None: os.environ['MRTRIX_RNG_SEED'] = f'{args.mrtrix_random_seed}' print(f' * MRTRIX_RNG_SEED set to {os.environ["MRTRIX_RNG_SEED"]}') # Set random generator seed of ANTs if specified if args.ants_random_seed is not None: os.environ['ANTS_RANDOM_SEED'] = f'{args.ants_random_seed}' print(f' * ANTS_RANDOM_SEED set to {os.environ["ANTS_RANDOM_SEED"]}') # running participant level if args.analysis_level == "participant": # report_app_run_to_google_analytics() if args.notrack is not True: report_usage('BIDS App', 'Run', __version__) maxprocs = parallel_number_of_subjects processes = [] # find all T1s and skullstrip them for subject_label in subjects_to_analyze: project = ProjectInfo() project.base_directory = args.bids_dir project.output_directory = args.output_dir project.subjects = [ 'sub-{}'.format(label) for label in subjects_to_analyze ] project.subject = 'sub-{}'.format(subject_label) print('> Process subject {}'.format(project.subject)) if args.session_label is not None: print("> Sessions specified by input args : {}".format( args.session_label)) subject_session_labels = args.session_label project.subject_sessions = [ 'ses-{}'.format(subject_session_label) for subject_session_label in subject_session_labels ] # Check if session exists for session in project.subject_sessions: session_path = os.path.join(args.bids_dir, project.subject, session) if not os.path.exists(session_path): print_error( f' .. ERROR: The directory {session_path} corresponding ' f'to the session {session.split("-")[-1]} ' "specified by --session_label input flag DOES NOT exist." ) return 1 else: print( f' .. INFO: The directory {session_path} corresponding ' f'to the session {session.split("-")[-1]} ' 'specified by --session_label input flag DOES exist.' ) else: # Check if multiple session (sub-XX/ses-YY/anat/... structure or sub-XX/anat.. structure?) subject_session_dirs = glob( os.path.join(args.bids_dir, project.subject, "ses-*")) project.subject_sessions = [ 'ses-{}'.format(subject_session_dir.split("-")[-1]) for subject_session_dir in subject_session_dirs ] if len(project.subject_sessions) > 0: # Session structure print("> Sessions to analyze : {}".format( project.subject_sessions)) else: project.subject_sessions = [''] for session in project.subject_sessions: if not args.coverage: while len(processes) == maxprocs: manage_processes(processes) if session != "": print('> Process session {}'.format(session)) project.subject_session = session # Derivatives folder creation for tool in tools: if project.subject_session == "": derivatives_dir = os.path.join(args.output_dir, tool, project.subject) elif project.subject_session != "" and tool == __freesurfer_directory__: derivatives_dir = os.path.join( args.output_dir, tool, f'{project.subject}_{project.subject_session}') elif project.subject_session != "" and tool != __freesurfer_directory__: derivatives_dir = os.path.join(args.output_dir, tool, project.subject, project.subject_session) if not os.path.isdir(derivatives_dir): os.makedirs(derivatives_dir) run_anat = False run_dmri = False run_fmri = False if args.anat_pipeline_config is not None: if check_configuration_format( args.anat_pipeline_config) == '.ini': anat_pipeline_config = convert_config_ini_2_json( args.anat_pipeline_config) else: anat_pipeline_config = args.anat_pipeline_config project.anat_config_file = create_subject_configuration_from_ref( project, anat_pipeline_config, 'anatomical') run_anat = True print( f"\t ... Anatomical config created : {project.anat_config_file}" ) if args.dwi_pipeline_config is not None: if check_configuration_format( args.dwi_pipeline_config) == '.ini': dwi_pipeline_config = convert_config_ini_2_json( args.dwi_pipeline_config) else: dwi_pipeline_config = args.dwi_pipeline_config project.dmri_config_file = create_subject_configuration_from_ref( project, dwi_pipeline_config, 'diffusion') run_dmri = True print( f"\t ... Diffusion config created : {project.dmri_config_file}" ) if args.func_pipeline_config is not None: if check_configuration_format( args.func_pipeline_config) == '.ini': func_pipeline_config = convert_config_ini_2_json( args.func_pipeline_config) else: func_pipeline_config = args.func_pipeline_config project.fmri_config_file = create_subject_configuration_from_ref( project, func_pipeline_config, 'fMRI') run_fmri = True print( f"\t ... fMRI config created : {project.fmri_config_file}" ) if args.anat_pipeline_config is not None: print(" .. INFO: Running pipelines : ") print( "\t\t- Anatomical MRI (segmentation and parcellation)") if args.dwi_pipeline_config is not None: print( "\t\t- Diffusion MRI (structural connectivity matrices)" ) if args.func_pipeline_config is not None: print("\t\t- fMRI (functional connectivity matrices)") if args.coverage: if run_anat: run_individual( project.base_directory, project.output_directory, project.subject, project.subject_session, anat_pipeline_config=project.anat_config_file, dwi_pipeline_config=(None if not run_dmri else project.dmri_config_file), func_pipeline_config=( None if not run_fmri else project.fmri_config_file), number_of_threads=number_of_threads) else: cmd = create_cmp_command( project=project, run_anat=run_anat, run_dmri=run_dmri, run_fmri=run_fmri, number_of_threads=number_of_threads) print_blue("... cmd : {}".format(cmd)) if project.subject_session != "": log_file = '{}_{}_log.txt'.format( project.subject, project.subject_session) else: log_file = '{}_log.txt'.format(project.subject) proc = run(command=cmd, env={}, log_filename=os.path.join( project.output_directory, __cmp_directory__, project.subject, project.subject_session, log_file)) processes.append(proc) else: print("... Error: at least anatomical configuration file " "has to be specified (--anat_pipeline_config)") return 1 if not args.coverage: while len(processes) > 0: manage_processes(processes) clean_cache(args.bids_dir) # running group level; ultimately it will compute average connectivity matrices # elif args.analysis_level == "group": # brain_sizes = [] # for subject_label in subjects_to_analyze: # for brain_file in glob(os.path.join(args.output_dir, "sub-%s*.nii*"%subject_label)): # data = nibabel.load(brain_file).get_data() # # calcualte average mask size in voxels # brain_sizes.append((data != 0).sum()) # # with open(os.path.join(args.output_dir, "avg_brain_size.txt"), 'w') as fp: # fp.write("Average brain size is %g voxels"%numpy.array(brain_sizes).mean()) return 1
def select_subject(self): """Function to select the subject and session for which to inspect outputs.""" print( "> Selection of subject (and session) for which to inspect outputs" ) valid_selected_subject = False select = True aborded = False while not valid_selected_subject and not aborded: # Select subject from BIDS dataset np_res = self.project_info.configure_traits(view="subject_view") if not np_res: aborded = True break print(" .. INFO: Selected subject: {}".format( self.project_info.subject)) # Select session if any bids_layout = BIDSLayout(self.project_info.base_directory) subject = self.project_info.subject.split("-")[1] sessions = bids_layout.get(target="session", return_type="id", subject=subject) if len(sessions) > 0: print(" .. INFO: Input dataset has sessions") print(sessions) self.project_info.subject_sessions = [] for ses in sessions: self.project_info.subject_sessions.append("ses-" + str(ses)) np_res = self.project_info.configure_traits( view="subject_session_view") if not np_res: aborded = True break self.project_info.anat_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_anatomical_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.anat_config_file, os.F_OK): print("> Initialize anatomical pipeline") self.anat_pipeline = project.init_anat_project( self.project_info, False) else: self.anat_pipeline = None if self.dmri_inputs_checked: self.project_info.dmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_diffusion_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.dmri_config_file, os.F_OK): print("> Initialize diffusion pipeline") ( dmri_valid_inputs, self.dmri_pipeline, ) = project.init_dmri_project(self.project_info, bids_layout, False) else: self.dmri_pipeline = None # self.dmri_pipeline.subject = self.project_info.subject # self.dmri_pipeline.global_conf.subject = self.project_info.subject if self.fmri_inputs_checked: self.project_info.fmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}".format(self.project_info.subject_session), "{}_{}_fMRI_config.json".format( self.project_info.subject, self.project_info.subject_session), ) if os.access(self.project_info.fmri_config_file, os.F_OK): print("> Initialize fMRI pipeline") ( fmri_valid_inputs, self.fmri_pipeline, ) = project.init_fmri_project(self.project_info, bids_layout, False) else: self.fmri_pipeline = None # self.fmri_pipeline.subject = self.project_info.subject # self.fmri_pipeline.global_conf.subject = self.project_info.subject # self.anat_pipeline.global_conf.subject_session = self.project_info.subject_session # if self.dmri_pipeline is not None: # self.dmri_pipeline.global_conf.subject_session = self.project_info.subject_session # # if self.fmri_pipeline is not None: # self.fmri_pipeline.global_conf.subject_session = self.project_info.subject_session print(" .. INFO: Selected session %s" % self.project_info.subject_session) if self.anat_pipeline is not None: self.anat_pipeline.stages[ "Segmentation"].config.freesurfer_subject_id = os.path.join( self.project_info.base_directory, "derivatives", __freesurfer_directory__, "{}_{}".format(self.project_info.subject, self.project_info.subject_session), ) else: print(" .. INFO: No session detected") self.project_info.anat_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_anatomical_config.json".format( self.project_info.subject), ) if os.access(self.project_info.anat_config_file, os.F_OK): self.anat_pipeline = project.init_anat_project( self.project_info, False) else: self.anat_pipeline = None if self.dmri_inputs_checked: self.project_info.dmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_diffusion_config.json".format( self.project_info.subject), ) if os.access(self.project_info.dmri_config_file, os.F_OK): ( dmri_valid_inputs, self.dmri_pipeline, ) = project.init_dmri_project(self.project_info, bids_layout, False) else: self.dmri_pipeline = None # self.dmri_pipeline.subject = self.project_info.subject # self.dmri_pipeline.global_conf.subject = self.project_info.subject if self.fmri_inputs_checked: self.project_info.fmri_config_file = os.path.join( self.project_info.base_directory, "derivatives", __cmp_directory__, "{}".format(self.project_info.subject), "{}_fMRI_config.json".format( self.project_info.subject), ) if os.access(self.project_info.fmri_config_file, os.F_OK): ( fmri_valid_inputs, self.fmri_pipeline, ) = project.init_fmri_project(self.project_info, bids_layout, False) else: self.fmri_pipeline = None # self.fmri_pipeline.subject = self.project_info.subject # self.fmri_pipeline.global_conf.subject = self.project_info.subject # self.anat_pipeline.global_conf.subject_session = '' if self.anat_pipeline is not None: self.anat_pipeline.stages[ "Segmentation"].config.freesurfer_subjects_dir = os.path.join( self.project_info.base_directory, "derivatives", __freesurfer_directory__, "{}".format(self.project_info.subject), ) if self.anat_pipeline is not None: print("> Anatomical pipeline output inspection") self.anat_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.anat_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_anat_available = True if self.dmri_pipeline is not None: print("> Diffusion pipeline output inspection") self.dmri_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.dmri_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_dmri_available = True if self.fmri_pipeline is not None: print("> fMRI pipeline output inspection") self.fmri_pipeline.view_mode = "inspect_outputs_view" for stage in list(self.fmri_pipeline.stages.values()): print(" ... Inspect stage {}".format(stage)) stage.define_inspect_outputs() # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) if (len(stage.inspect_outputs) > 0) and (stage.inspect_outputs[0] != "Outputs not available"): self.output_fmri_available = True print_blue(" .. Anatomical output(s) available : %s" % self.output_anat_available) print_blue(" .. Diffusion output(s) available : %s" % self.output_dmri_available) print_blue(" .. fMRI output(s) available : %s" % self.output_fmri_available) if (self.output_anat_available or self.output_dmri_available or self.output_fmri_available): valid_selected_subject = True else: self.error_msg = ( " .. ERROR: No output available! " + "Please select another subject (and session if any)!") print_error(self.error_msg) select = error(message=self.error_msg, title="Error", buttons=["OK", "Cancel"]) aborded = not select return aborded
def info(): """Print version of copyright.""" print_blue(f"\nConnectome Mapper {__version__}") print_warning(f"{__copyright__}\n")
def save_configparser_as_json(config, config_json_path, ini_mode=False, debug=False): """Save a ConfigParser to JSON file. Parameters ---------- config : Instance(configparser.ConfigParser) Instance of ConfigParser config_json_path : string Output path of JSON configuration file ini_mode : bool If `True`, handles all content stored in strings debug : bool If `True`, show additional prints """ config_json = {} # In the case of diffusion pipeline if 'diffusion_stage' in config.sections(): recon_processing_tool = config['diffusion_stage'].get( 'recon_processing_tool') tracking_processing_tool = config['diffusion_stage'].get( 'tracking_processing_tool') for section in config.sections(): config_json[section] = {} for name, value in config.items(section): # Keep only parameters that are used by the diffusion stage # of the diffusion pipeline. This simplifies the reading of # its configuration file if 'diffusion_stage' in section: # Skip adding diffusion reconstruction parameters if recon_processing_tool == 'Dipy': if 'mrtrix_recon_config' in name: continue elif recon_processing_tool == 'MRtrix': if 'dipy_recon_config' in name: continue # Skip adding tracking parameters if tracking_processing_tool == 'Dipy': if 'mrtrix_tracking_config' in name: continue elif tracking_processing_tool == 'MRtrix': if 'dipy_tracking_config' in name: continue if '_editor' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'log_visualization' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue if 'circular_layout' in name: if debug: print_warning( f' .. DEBUG: Skip parameter {section} / {name}') continue is_iterable = False if ini_mode: try: if not (section == 'parcellation_stage' and name == 'ants_precision_type'): value = eval(value) if debug: print_warning( f' .. DEBUG: String {value} evaluated') else: if debug: print_warning( f' .. DEBUG: String {value} not evaluated') except Exception: if debug: print_error( f' .. EXCEPTION: String {value} COULD NOT BE evaluated' ) pass if isinstance(value, dict): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as dict' ) config_json[section][name] = value is_iterable = True elif isinstance(value, list): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as list' ) config_json[section][name] = value is_iterable = True elif isinstance(value, Iterable) and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as iterable' ) config_json[section][name] = [x for x in value if x] is_iterable = True elif isinstance(value, bool): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as boolean' ) config_json[section][name] = [value] elif value and not isinstance(value, str): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as not a string' ) config_json[section][name] = [value] elif value and isinstance(value, str): value = value.strip() if value.isnumeric(): if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as number' ) value = float(value) if value.is_integer(): value = int(value) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG: Processing {section} / {name} / {value} as string' ) config_json[section][name] = [value] else: if debug: print_warning( f' .. DEBUG : Type: {type(value)} / value : {value}') config_json[section][name] = '' if not is_iterable: if len(config_json[section][name]) == 1: config_json[section][name] = config_json[section][name][0] elif len(config_json[section][name]) == 0: config_json[section][name] = '' if config_json[section][name] == '': del config_json[section][name] config_json['Global']['version'] = __version__ if debug: print_blue(f' .. DEBUG: {config_json}') with open(config_json_path, 'w') as outfile: json.dump(config_json, outfile, indent=4)