def load_file(self, file, default=False): if default: return load_json( os.path.join(os.path.dirname(os.path.abspath(__file__)), f'{file}.json')) else: return load_json( os.path.join(self.credentials_home, f'{file}.json'))
def set_project(self, location): if path.exists(path.join(self.credentials_home, 'projects.json')): projects = load_json( path.join(self.credentials_home, 'projects.json')) projects['LOCATION'].append(location) save_json(projects, path.join(self.credentials_home, 'projects.json')) # self.save_json('projects.json', projects, self.credentials_home) new_loc = load_json( path.join(self.credentials_home, 'remote1.json')) new_loc['USER']['user'] = self.username save_json(new_loc, path.join(self.credentials_home, location + '.json'))
def chk_if_processed(self, abs_path2mr): """Check if any unconverted, - if not converted, update config file based on sidecar params (update_config()) - redo run() up to repeat_lim """ ls_niigz_files = [ i for i in os.listdir(self.sub_SUBJDIR_tmp) if '.nii.gz' in i ] if ls_niigz_files: print(f'{" " *12}> remaining nii in {self.sub_SUBJDIR_tmp}') if self.repeat_updating < self.repeat_lim: self.update = False for niigz_f in ls_niigz_files: f_name = niigz_f.replace('.nii.gz', '') sidecar = f'{f_name}.json' self.sidecar_content = load_json( os.path.join(self.sub_SUBJDIR_tmp, sidecar)) self.update_config() if self.update: print(f'{" " *12}removing folder: {self.sub_SUBJDIR_tmp}') self.repeat_updating += 1 self.rm_dir(self.sub_SUBJDIR_tmp) print(f'{" " *12}re-renning dcm2bids') self.run_dcm2bids(abs_path2mr) print(f'{" " *12}looping to another chk_if_processed') self.chk_if_processed(abs_path2mr) else: self.populate_bids_classifed() self.cleaning_after_conversion(abs_path2mr)
def chk_if_processed(self): """Check if any unconverted, - if not converted, update config file based on sidecar params (update_config()) - redo run() up to repeat_lim """ ls_niigz_files = [ i for i in os.listdir(self.sub_SUBJDIR) if '.nii.gz' in i ] if ls_niigz_files: print(" remaining nii in ", self.sub_SUBJDIR) if self.repeat_updating < self.repeat_lim: self.update = False for niigz_f in ls_niigz_files: f_name = niigz_f.replace('.nii.gz', '') sidecar = f'{f_name}.json' self.sidecar_content = load_json( os.path.join(self.sub_SUBJDIR, sidecar)) self.update_config() if self.update: print(' removing folder: ', self.sub_SUBJDIR) self.repeat_updating += 1 self.rm_dir(self.sub_SUBJDIR) print(' re-renning dcm2bids') self.run_dcm2bids() print(' looping to another chk_if_processed') self.chk_if_processed() else: print(' dcm2bids conversion DONE') if os.path.exists(self.sub_SUBJDIR): self.rm_dir(self.sub_SUBJDIR)
def get_subjects(self): # subjects = self.db_nl.get_db() new_subjects_f_name = DEFAULT.app_files[self.app]["new_subjects"] new_subjects_f_path = os.path.join(self.NIMB_tmp, new_subjects_f_name) if os.path.isfile(new_subjects_f_path): print(' reading new subjects to process') new_subj = load_json(new_subjects_f_path) self.db_nl = new_subj
def get_subjects(self): new_subjects_f_name = DEFAULT.app_files[self.app]["new_subjects"] new_subjects_f_path = os.path.join(self.NIMB_tmp, new_subjects_f_name) if os.path.isfile(new_subjects_f_path): print(f'{LogLVL.lvl1}reading new subjects to process') new_subj = load_json(new_subjects_f_path) self.db_dp = new_subj else: print(f'{LogLVL.lvl1}ERR: file with subjects is MISSING')
def run(self, nimb_id='none', ses='none', nimb_classified_per_id=dict()): #run dcm2bids: ''' if nimb_classified.json[nimb_id][archived]: extract from archive specific subject_session start dcm2bids for subject_session Return: self.bids_classified = {'bids_id': {'anat': {'t1': ['local', 'PATH_TO_rawdata/bids_id_label/bids_id_session/modality/bids_label_ses-xx_run-xx_T1w.nii.gz']}, }, {'dwi': {'dwi': ['local', 'PATH_TO_rawdata/bids_id_label/bids_id_session/modality/bids_label_ses-xx_run-xx_dwi.nii.gz'], 'bval': ['local', 'PATH_TO_rawdata/bids_id_label/bids_id_session/modality/bids_label_ses-xx_run-xx_dwi.bval'], 'bvec': ['local', 'PATH_TO_rawdata/bids_id_label/bids_id_session/modality/bids_label_ses-xx_run-xx_dwi.bvec']} } } ''' self.id_classified = nimb_classified_per_id self.bids_classified = dict() print(f'{" " *8}folder with subjects is: {self.DICOM_DIR}') if self.id_classified: self.nimb_id = nimb_id self.ses = ses self.bids_id, self.bids_id_dir = self.make_bids_id( self.nimb_id, self.ses) self.start_stepwise_choice() else: self.nimb_classified = dict() try: self.nimb_classified = load_json( os.path.join(self.DICOM_DIR, DEFAULT.f_nimb_classified)) except Exception as e: print( f'{" " *12} could not load the nimb_classified file at: {self.DICOM_DIR}' ) sys.exit(0) if self.nimb_classified: self.nimb_ids = list(self.nimb_classified.keys()) for self.nimb_id in self.nimb_ids: self.id_classified = self.nimb_classified[self.nimb_id] for self.ses in [ i for i in self.id_classified if i not in ('archived', ) ]: self.bids_id, self.bids_id_dir = self.make_bids_id( self.nimb_id, self.ses) self.start_stepwise_choice() return self.bids_classified, self.bids_id
def read_fdr_images(self): img = load_json(self.param.sig_fdr_json) ls_img = list(img.keys()) for sig in img: analysis_name = img[sig]['analysis_name'] glmdir = path.join(self.param.PATH_img, analysis_name) self.make_images_results_fdr(img[sig]['hemi'], glmdir, analysis_name, img[sig]['fsgd_type_contrast']) print( f" \n\n\n{len(ls_img[ls_img.index(sig):])} images LEFT for extraction" )
def read_mc_images(self): img = load_json(self.param.sig_mc_json) ls_img = list(img.keys()) for sig in img: analysis_name = img[sig]['analysis_name'] contrast = img[sig]['contrast'] cwsig_mc_f = path.join(self.param.PATH_img, img[sig]['cwsig_mc_f']) oannot_mc_f = path.join(self.param.PATH_img, img[sig]['oannot_mc_f']) self.make_images_results_mc(img[sig]['hemi'], analysis_name, contrast, img[sig]['direction'], cwsig_mc_f, oannot_mc_f) print( f" \n\n\n{len(ls_img[ls_img.index(sig):])} images LEFT for extraction" )
def get_dict_4classification(self, dir_abspath): main = dict() self.f_nimb_classified = os.path.join(self.MAIN_DIR, DEFAULT.f_nimb_classified) # remove nimb_classified file from list of files in MAIN_DIR if self.f_nimb_classified in self.dir_2classify: self.dir_2classify.remove(self.f_nimb_classified) # update the existing file ? if os.path.exists(self.f_nimb_classified): if self.update: print('updating file with ids') main = load_json(self.f_nimb_classified) os.remove(self.f_nimb_classified) return main
def update_config(self): """.....""" self.add_criterion = False self.config = load_json(self.config_file) criterion1 = 'SeriesDescription' sidecar_crit1 = self.sidecar_content[criterion1] list_criteria = list() for des in self.config['descriptions']: if des['dataType'] == self.data_Type and \ des["modalityLabel"] == self.modalityLabel: list_criteria.append(des) if len(list_criteria) > 0: print( f'{" " *12}> there is at least one configuration with dataType: {self.data_Type}' ) for des in list_criteria[::-1]: if criterion1 in des['criteria']: if des['criteria'][criterion1] == sidecar_crit1: print( f'{" " *12} sidecar is present in the config file. Add another sidecar criterion in the dcm2bids_helper.py script' ) self.add_criterion = True sys.exit(0) else: list_criteria.remove(des) if len(list_criteria) > 0: print( f'{" " *12}> cannot find a correct sidecar location. Please add more parameters.' ) if len(list_criteria) == 0: print(f'{" " *12}> updating config with value: {sidecar_crit1}') new_des = { 'dataType': self.data_Type, 'modalityLabel': self.modalityLabel, 'criteria': { criterion1: sidecar_crit1 } } self.config['descriptions'].append(new_des) self.update = True if self.update: self.run_stt = 0 save_json(self.config, self.config_file, print_space=12) else: print(f'{" " *12}criterion {criterion1} present in config file')
def set_projects(self): """retrieve projects_ids""" file = os.path.join(self.credentials_home, 'projects.json') default = False if not self.chk_if_defined('projects'): self.define_credentials() default = True shutil.copy( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'projects.json'), file) shutil.copy( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'stats.json'), os.path.join(self.credentials_home, 'stats.json')) print( f' CHECK PROJECTS AND VARIABLES in: {self.credentials_home}' ) self.projects = self.load_file('projects', default=default) self.stats_vars = load_json( path.join(self.credentials_home, 'stats.json')) self.project_ids = self.get_projects_ids()
def run(self, nimb_id='none', ses='none'): #run dcm2bids: ''' if nimb_classified.json[nimb_id][archived]: extract from archive specific subject_session start dcm2bids for subject_session ''' print(f' folder with subjects is: {self.DICOM_DIR}') self.nimb_id = nimb_id self.ses = ses self.bids_id = f'sub-{self.nimb_id}_{self.ses}' if self.id_classified: self.start_stepwise_choice() else: self.nimb_classified = dict() try: self.nimb_classified = load_json( os.path.join(self.DICOM_DIR, DEFAULT.f_nimb_classified)) except Exception as e: print( f' could not load the nimb_classified file at: {self.DICOM_DIR}' ) sys.exit(0) if self.nimb_classified: self.nimb_ids = list(self.nimb_classified.keys()) for self.nimb_id in self.nimb_ids: self.id_classified = self.nimb_classified[self.nimb_id] for self.ses in [ i for i in self.id_classified if i not in ('archived', ) ]: self.start_stepwise_choice() if nimb_id != 'none': return self.bids_id else: return 'none'
def __init__(self, all_vars, PATHglm, sig_fdr_thresh = 3.0): ''' sig_fdr_thresh at 3.0 corresponds to p = 0.001; for p=0.05 use value 1.3, but it should be used ONLY for visualisation. ''' vars_fs = all_vars.location_vars['local']["FREESURFER"] self.FREESURFER_HOME = vars_fs["FREESURFER_HOME"] self.SUBJECTS_DIR = vars_fs["SUBJECTS_DIR"] self.measurements = vars_fs["GLM_measurements"] self.thresholds = vars_fs["GLM_thresholds"] self.mc_cache_thresh = vars_fs["GLM_MCz_cache"] param = fs_definitions.FSGLMParams(PATHglm) self.PATHglm = PATHglm self.sig_fdr_thresh = sig_fdr_thresh self.PATHglm_glm = param.PATHglm_glm self.PATH_img = param.PATH_img self.PATHglm_results = param.PATHglm_results self.sig_fdr_json = param.sig_fdr_json self.sig_mc_json = param.sig_mc_json self.err_mris_preproc_file = param.err_mris_preproc_file self.mcz_sim_direction = param.mcz_sim_direction self.hemispheres = fs_definitions.hemi self.GLM_sim_fwhm4csd = param.GLM_sim_fwhm4csd self.GLM_MCz_meas_codes = param.GLM_MCz_meas_codes self.cluster_stats = param.cluster_stats self.cluster_stats_2csv = param.cluster_stats_2csv self.sig_contrasts = param.sig_contrasts RUN = True # get files_glm. try: files_glm = load_json(param.files_for_glm) print(f' successfully uploaded file: {param.files_for_glm}') except ImportError as e: print(e) print(f' file {param.files_for_glm} is missing') RUN = False # get file with subjects per group try: subjects_per_group = load_json(param.subjects_per_group) print(f' successfully uploaded file: {param.subjects_per_group}') except Exception as e: print(e) print(f' file {param.subjects_per_group} is missing') RUN = False # checking that all subjects are present print(' subjects are located in: {}'.format(self.SUBJECTS_DIR)) for group in subjects_per_group: for subject in subjects_per_group[group]: if subject not in os.listdir(self.SUBJECTS_DIR): print(f' subject is missing from FreeSurfer Subjects folder: {subject}') RUN = False break for subdir in (self.PATHglm_glm, self.PATHglm_results, self.PATH_img): if not os.path.isdir(subdir): os.makedirs(subdir) if not os.path.isfile(self.sig_contrasts): open(self.sig_contrasts,'w').close() if RUN: self.err_preproc = list() self.sig_fdr_data = dict() self.sig_mc_data = dict() self.run_loop(files_glm) if self.err_preproc: save_json(self.err_preproc, self.err_mris_preproc_file) if self.sig_fdr_data: save_json(self.sig_fdr_data, self.sig_fdr_json) if self.sig_mc_data: save_json(self.sig_mc_data, self.sig_mc_json) if os.path.exists(self.cluster_stats): ClusterFile2CSV(self.cluster_stats, self.cluster_stats_2csv) print('\n\nGLM DONE') else: sys.exit('some ERRORS were found. Cannot perform FreeSurfer GLM')