def extract_stats_from_archive(self, subjects, PROCESSED_FS_DIR): ''' checks if subjects are list() are archived extract the "stats" folder of the subject ''' from .manage_archive import is_archive, ZipArchiveManagement archived = list() for sub in subjects: path_2sub = get_path(PROCESSED_FS_DIR, sub) if not os.path.isdir(path_2sub): if is_archive(sub): archived.append(path_2sub) if archived: dirs2extract = [ 'stats', ] tmp_dir = os.path.join(self.NIMB_tmp, DEFAULT.nimb_tmp_dir) makedir_ifnot_exist(tmp_dir) PROCESSED_FS_DIR = tmp_dir for path_2sub in archived: print( 'Must extract folder {} for each subject to destination {}' .format('stats', path_2sub)) ZipArchiveManagement(path_2sub, path2xtrct=tmp_dir, path_err=False, dirs2xtrct=dirs2extract, log=True) return PROCESSED_FS_DIR
def get_local_remote_dir(self, dir_data, _dir='None'): location = dir_data[0] dir_abspath = dir_data[1] print(f'{LogLVL.lvl2}folder {dir_abspath}') print(f'{LogLVL.lvl3}is located on: {location}') if location == 'local': if not os.path.exists(dir_abspath): dir_abspath = get_userdefined_paths(f'{_dir} folder', dir_abspath, '', create=False) makedir_ifnot_exist(dir_abspath) if _dir != 'None': from setup.get_credentials_home import _get_credentials_home if _dir in self.all_vars.projects[self.project]: self.all_vars.projects[ self.project][_dir][1] = dir_abspath abs_path_projects = os.path.join( _get_credentials_home(), 'projects.json') save_json(self.all_vars.projects, abs_path_projects) else: print( ' folder to change is not located in the projects.json variables' ) else: print( ' Folder to change is not defined, cannot create a new one.' ) return True, dir_abspath, 'local' else: return False, dir_abspath, location
def extract_from_archive(self, archive_abspath, path2mr_, tmp_dir): tmp_dir_xtract = os.path.join(tmp_dir, 'tmp_for_classification') tmp_dir_err = os.path.join(tmp_dir, 'tmp_for_classification_err') makedir_ifnot_exist(tmp_dir_xtract) makedir_ifnot_exist(tmp_dir_err) ZipArchiveManagement(archive_abspath, path2xtrct=tmp_dir_xtract, path_err=tmp_dir_err, dirs2xtrct=[ path2mr_, ]) if len(os.listdir(tmp_dir_err)) == 0: shutil.rmtree(tmp_dir_err, ignore_errors=True) return tmp_dir_xtract
def classify_ready(self): ready = True for p in (self.locations['local']['NIMB_PATHS']['NIMB_NEW_SUBJECTS'], self.NIMB_HOME, self.NIMB_tmp): if not os.path.exists(p): try: # if path start with ~ makedir_ifnot_exist(p) except Exception as e: print(e) if not os.path.exists(p): ready = False break return ready
def prep_4stats(self, fs=False): """create DIRs for stats (as per setup/stats.json) get group file (provided by user) return final stats_grid_file that will be used for statistical analysis """ dir_4stats = makedir_ifnot_exist( self.proj_vars["STATS_PATHS"]["STATS_HOME"]) fname_groups = self.proj_vars['fname_groups'] file_other_stats = [] file_names = self.proj_vars["STATS_FILES"] for file in [ "fname_fs_all_stats", "fname_func_all_stats", "fname_other_stats" ]: file_name = self.proj_vars[file] if file_name: if file_name == "default": file_name = file_names[file] file_name = f'{file_name}.{file_names["file_type"]}' file_other_stats.append(file_name) for file in ["fname_Outcor", "fname_eTIVcor", "fname_NaNcor"]: file_name = f'{file_names[file]}.{file_names["file_type"]}' file_other_stats.append(file_name) if not self.get_files_for_stats(dir_4stats, [ fname_groups, ]): sys.exit() self.get_files_for_stats(dir_4stats, file_other_stats) return fname_groups
def extract_from_archive(self, archive_abspath, path2mr_): if self.tmp_dir == 'none': self.tmp_dir = os.path.dirname(archive_abspath) tmp_dir_xtract = os.path.join(self.tmp_dir, 'tmp_for_classification') tmp_dir_err = os.path.join(self.tmp_dir, 'tmp_for_classification_err') # print(f' extracting data: {path2mr_}') makedir_ifnot_exist(tmp_dir_xtract) makedir_ifnot_exist(tmp_dir_err) ZipArchiveManagement(archive_abspath, path2xtrct=tmp_dir_xtract, path_err=tmp_dir_err, dirs2xtrct=[ path2mr_, ]) if len(os.listdir(tmp_dir_err)) == 0: shutil.rmtree(tmp_dir_err, ignore_errors=True) return tmp_dir_xtract
def fs_ready(self): if self.locations['local']['FREESURFER']['install'] == 1: print('FreeSurfer is set to be installed on local computer') if len(self.FREESURFER_HOME) < 1: print("FREESURFER_HOME is missing.") print(" Please define FREESURFER_HOME in the nimb/local.json file") return False if self.check_freesurfer_ready(): SUBJECTS_DIR = self.locations['local']['FREESURFER']['SUBJECTS_DIR'] if not os.path.exists(SUBJECTS_DIR): print(' creating path {}'.format(SUBJECTS_DIR)) makedir_ifnot_exist(SUBJECTS_DIR) return self.fs_chk_fsaverage_ready(SUBJECTS_DIR) else: print('FreeSurfer is not installed yet.') print(' Please define FreeSurfer_install to 1 in the nimb/local.json file') return False
def prep_4fs_glm(self, FS_GLM_dir, fname_groups): FS_GLM_dir = makedir_ifnot_exist(FS_GLM_dir) print('INITIATING: preparation to perform GLM with FreeSurfer') print(' in the folder:', FS_GLM_dir) if not self.get_files_for_stats(FS_GLM_dir, [fname_groups, DEFAULT.f_ids]): sys.exit() f_GLM_group = os.path.join(FS_GLM_dir, fname_groups) f_ids_processed = os.path.join(FS_GLM_dir, DEFAULT.f_ids) SUBJECTS_DIR = self.locations["local"]['FREESURFER']['SUBJECTS_DIR'] if os.path.exists(f_GLM_group) and os.path.exists(f_ids_processed): from processing.freesurfer.fs_glm_prep import CheckIfReady4GLM ready, miss_ls = CheckIfReady4GLM( self.locations["local"]['NIMB_PATHS'], self.locations["local"]['FREESURFER'], self.proj_vars, f_ids_processed, f_GLM_group, FS_GLM_dir).chk_if_subjects_ready() print( f' variables used for GLM are: {self.proj_vars["variables_for_glm"]}' ) print(f' ID column is: {self.proj_vars["id_col"]}') print(f' Group column is: {self.proj_vars["group_col"]}') print( f' variables EXCLUDED from GLM are: {self.proj_vars["other_params"]}' ) print(f' for details check: credentials_path/projects.py') if miss_ls: dirs2extract = [ 'label', 'surf', ] print( ' ATTENTION! some subjects could be prepared for GLM analysis' ) print(f' by extracting the folders: {dirs2extract}') if get_yes_no( ' do you want to prepare the missing subjects? (y/n)' ) == 1: self.prep_4fs_glm_extract_dirs(miss_ls, SUBJECTS_DIR, dirs2extract) self.prep_4fs_glm(FS_GLM_dir, fname_groups) return False else: print( 'all ids are present in the analysis folder, ready for glm analysis' ) print(' GLM file path is:', f_GLM_group) return f_GLM_group, FS_GLM_dir else: print('GLM files are missing: {}, {}'.format( f_GLM_group, f_ids_processed)) return False
def __init__(self, params, repeat_lim=2): self.proj_vars = dict() self.project = params.project self.id_classified = dict() self.run_stt = 0 self.repeat_lim = int(repeat_lim) self.repeat_updating = 0 self.DICOM_DIR = params.src self.tmp_dir = 'none' self.OUTPUT_DIR = makedir_ifnot_exist(params.o) self.archived = False
def __init__(self, proj_vars, project, DICOM_DIR='default', tmp_dir='none', repeat_lim=10): self.proj_vars = proj_vars self.project = project self.run_stt = 0 self.repeat_lim = repeat_lim self.repeat_updating = 0 self.DICOM_DIR = DICOM_DIR if DICOM_DIR == 'default': self.DICOM_DIR = self.get_SUBJ_DIR() self.tmp_dir = tmp_dir self.OUTPUT_DIR = makedir_ifnot_exist( self.proj_vars['SOURCE_BIDS_DIR'][1]) self.archived = False
def prep_4fs_stats(self, subjects=list()): '''create DIR to store stats files check if processed subjects are on the local computer if yes: copy corresponding stats files to stats DIR return OK to perform stats else: return False ''' dir_4stats = makedir_ifnot_exist( self.proj_vars["STATS_PATHS"]["STATS_HOME"]) local, PROCESSED_FS_DIR, _ = self.get_local_remote_dir( self.proj_vars["PROCESSED_FS_DIR"]) subjects = os.listdir(PROCESSED_FS_DIR) if local: fname_groups = self.proj_vars['fname_groups'] if self.get_files_for_stats(dir_4stats, [fname_groups, DEFAULT.f_ids]): print("subjects for stats are: ", subjects) return self.extract_stats_from_archive(subjects, PROCESSED_FS_DIR)