def plot_one_best_func(list_res, path = ['fun'], criterion = np.argmin): run_fun = [ut.extract_from_nested(r, path) for r in list_res] best_arg = criterion(run_fun) best_run = list_res[best_arg] best_func = pFunc_base.pFunc_base.build_pfunc(ut.extract_from_nested(best_run, ['func'])) best_T = ut.extract_from_nested(best_run, ['config', 'testing_dico', 'T']) best_tt = np.linspace(-0.1, best_T + 0.1, 10000) best_func.plot_function(best_tt)
def collect_res_bug(cls, key_path=[], nameFile=None, allPrefix='res_', folderName=None): """Extract results stored in (a) txt file(s) and group them according to some key values (where key_path provides the path in the potentially nested structure of the results to find the key(s)) Output: a dictionary where key is the concatenation of the unique set of keys found and value is the res is a list of all res matching this key """ listRes = cls.read_res_bug(nameFile, allPrefix, folderName) res_keys = [ tuple([ut.extract_from_nested(res, k) for k in key_path]) for res in listRes ] res_keys_unique = list(set(res_keys)) res = { ut.concat2String(*k_u): [listRes[n] for n, r in enumerate(res_keys) if r == k_u] for k_u in res_keys_unique } return res
def collect_res(cls, key_path = [], nameFile = None, allPrefix = 'res_', folderName = None, replace_func = None): """ collect results and group them according to some key values Arguments --------- key_path: <list> if not empty defines how to group results: provides a path in the dict structure of the results to get a value used for the grouping nameFile, allPrefix, folderName: cf. read_res() Output: ------- a dictionary where key is the concatenation of the unique set of keys found and value is the res is a list of all res matching this key """ listRes, listNames = cls.read_res(nameFile, allPrefix, folderName, returnName=True, replace_func=replace_func) if(len(key_path) == 0): res = {k:v for k,v in zip(listNames, listRes)} else: res_keys = [tuple([ut.extract_from_nested(res, k) for k in key_path]) for res in listRes] res_keys_unique = list(set(res_keys)) res = {ut.concat2String(*k_u):[listRes[n] for n, r in enumerate(res_keys) if r == k_u] for k_u in res_keys_unique} return res
def get_keys_from_onefile(cls, name_file, path_keys, replace_func = None): """ Get concatenated keys from one res (passed as the name of the file)""" res = ut.eval_from_file(name_file, evfunc = pFunc_base.eval_with_pFunc, replace_func = replace_func) res_keys = "_".join([ut.extract_from_nested(res, k) for k in path_keys]) return res_keys
def get_keys_from_onefile_safe(cls, name_file, path_keys, replace_func = None): """ Get concatenated keys from one res (passed as the name of the file)""" try: res = ut.eval_from_file(name_file, evfunc = pFunc_base.eval_with_pFunc, replace_func = replace_func) res_keys = "_".join([ut.extract_from_nested(res, k) for k in path_keys]) except: print("Error reading file {}".format(name_file)) res_keys = None return res_keys
def _gen_name_res(cls, config, metadico = {}, type_output = '.txt'): """ Generate a name associated to a config Rules ----- if _OUT_NAME is not None and has a <dic> type: {'k':v} --> 'k_XXX' where XXX has been found in the config structure following path given by v elif _OUT_COUNTER is not None increment this counter for each new config add _OUT_PREFIX """ res_name = '' if(metadico.get('_OUT_NAME') is not None): name_rules = metadico['_OUT_NAME'] if(ut.is_dico(name_rules)): for k, v in name_rules.items(): res_name += (k + "_" + str(ut.extract_from_nested(config, v))) else: raise NotImplementedError() if((config.get("_RDM_RUN") is not None)): res_name += "_" res_name += str(config["_RDM_RUN"]) elif(metadico.get('_OUT_COUNTER') is not None): res_name += str(metadico['_OUT_COUNTER']) metadico['_OUT_COUNTER'] +=1 if(res_name == ''): res_name = str(RandomGenerator.RandomGenerator.gen_seed()) prefix = metadico.get('_OUT_PREFIX', '') res_name = prefix + res_name + type_output return res_name, metadico
# 3T: NM equiv to BO // Is it the same for pcw5 NO maybe because params nb increase=>less local minimas # // Use Exploit strategy sooner # # ### CUSTOM METHODS ARE USED BECAUSE OF A BUG IN RES FILE #============================================================================== # ***GETDATA*** Linear benchmark #============================================================================== name_batch = 'bch_linear' name_folder = name_batch + '/' + name_batch key_T = ['config', 'testing_dico', 'T'] bch_res = learner.collect_res([key_T], None, 'benchmark', name_folder) bch_res_fom = {k: ut.extract_from_nested(r, [0,'test_fom']) for k, r in bch_res.items()} #============================================================================== # ***GETDATA*** #============================================================================== name_batch = 'crab_customs' name_folder = name_batch + '/' + name_batch one_res = learner.eval_from_onefile(name_folder +'/res120.txt') names_keys_res = list(one_res.keys()) ut.print_nested_keys(one_res, ' +') key1 = ['config', 'optim_dico', '_FLAG_NAME'] key2 = ['config', 'model_dico', '_FLAG_NAME'] rawres = learner.collect_res([key1, key2], None, 'res', name_folder) summary = learner.collect_and_process_res([key1, key2], None, 'res', name_folder, True, False)