def __init__(self, mpath, lpath): self.mpath = self.abspath err('used to have line: self.lpath = mlib.file.abspath') from mlib.boot.lang import ismac, islinux if ismac(): thispath = mpath else: assert islinux() thispath = lpath super(SyncedDataFolder, self).__init__(thispath)
def dnn(cfg): mode = cfg.MODE log(f'MODE IS {mode}') Project.DNN_FIGS_FIGS_FOLDER.mkdirs() TEMP_FIGS_FOLDER = Folder(cfg.root) last_eg = get_last_exp_group() new_eg = None new_fig_folder = None muscle = Muscle(local=cfg.MUSCLE == 'local') if cfg.CLEAR_EG_DATA and islinux(): Project.DNN_FIGS_FIGS_FOLDER.clear() if 'JUSTRUN' in mode and cfg.SAVE_DATA: TEMP_FIGS_FOLDER.mkdir().clear() if 'next_exp_id' not in Project.STATE: Project.STATE['next_exp_id'] = 1 if 'last_submitted_exp_group_name' not in Project.STATE: Project.STATE['last_submitted_exp_group_name'] = '' def check(a): Project.STATE["last_submitted_exp_group_name"] = a figs_folder, message = get_figs_folder(a) return figs_folder is not None, figs_folder if figs_folder is not None else message if cfg.EXPERIMENT_NAME is None: new_fig_folder = answer_request.answer_request( Project.STATE["last_submitted_exp_group_name"], "Experiment Name:", check, gui=cfg.GUI) else: new_fig_folder = check(cfg.EXPERIMENT_NAME)[1] new_fig_folder = File(new_fig_folder) log(f'{new_fig_folder=}') if 'JUSTRUN' in mode or 'PUSH' in mode: if cfg.MUSCLE != 'local': SyncedFolder(pwd()).sync(config='mitili', lpath='mitili') if 'JUSTRUN' in mode: cfg.tic = str(mlog.TIC) experiments = experiments_from_cfg(cfg, advance_id=True) jobs = make_jobs(cfg, muscle=muscle, experiments=experiments) assert not cfg.GUI muscle.run_all_jobs_main(jobs, serial=cfg.RUN_EXPS_IN_SERIAL, gui=cfg.GUI) temp_eg = DNN_ExperimentGroup.temp(TEMP_FIGS_FOLDER) temp_eg.save_md(cfg) if cfg.SAVE_DATA: new_eg = muscle.pull_data(TEMP_FIGS_FOLDER, cfg, new_fig_folder) exp_group = new_eg or last_eg log(f'MODE IS {mode}') if 'COMPILE_TEST_ALL' in mode: log('in CTA!') analyze_exp_group(exp_group, cfg) # the stuff below is only temporarily commented out makefigs(exp_group.compile_folder, cfg.fig_backend, overwrite=True)
def compare(self, fun: Type[Correlation], GPU=False): special_confuse_mat = zeros(len(self.data), len(self.data)) if (fun == PearsonCorrelation) and any([min(x) == max(x) for x in self.data]): raise MathFail # # Pearson's Correlation Coefficient fails if # # two arrays are commpared that have a zero standard deviation product (divide by zero) # # Using an if statement above, I should prevent this data = self.data # pleasework def _fun(i): # cannot be lambda? return [(i, j, fun.fun(data[i, :], data[j, :])) for j in itr(data)] def _fun_tf(data): # cannot be lambda? return fun.fun_tf(data) MULTIPROCESS = False from pathos.multiprocessing import ProcessPool if islinux() and MULTIPROCESS: # slower than GPU # BUGGY # not optimized with ProcessPool() as p: # if islinux(): # mapid = randrange(0,10000) # print(f'starting map {mapid}') r = p.map(_fun, itr(self.data)) for results in r: for rr in results: special_confuse_mat[rr[0], rr[1]] = rr[2] elif islinux() and GPU: import tensorflow as tf special_confuse_mat = tf.zeros((len(self.data), len(self.data))) with tf.device('/GPU:0'): special_confuse_mat = _fun_tf(self.data).numpy() # results[net] = rsa.numpy() # tfdata = tf.convert_to_tensor(self.data).cuda() else: r = listmap(_fun, itr(self.data)) for results in r: for rr in results: special_confuse_mat[rr[0], rr[1]] = rr[2] return ComparisonMatrix( data=nan_above_eye(naneye(special_confuse_mat)), method_used=fun.__name__, ground_truth=self.ground_truth, class_set=self.class_set )
def JET(): if islinux(): return DATA_FOLDER['jet.mat'].load(silent=True)['c'] else: return File('jet.mat').load(silent=True)['c']
from packaging import version def vers(s): return version.parse(str(s)) def mexit(code, message): log(message) exit(code) REMOTE_CWD = None if islinux(): REMOTE_CWD = pwd() log('Defining Project') class Project(SuperRunner, ABC): INPUT_FILE = File('_input.txt') REQS_FILE = File('reqs.json') STATE = PermaDict('data/_metastate.json') # noinspection PyMethodMayBeStatic,PyMethodParameters def _default_config(): proto = {'placeholder1': None} alll = {'placeholder2': None} return { 'profiles': {