def _setup_learner_options(self, model, **params_learner): """ save main characteristics, fetch default parameters depending on the algo generate init and bounds of the parameters (most tricky part) """ self._backup_initparams = params_learner # store initial params just in case default = self._ALGO_INFOS[params_learner['algo']][0] # default hyper parameters opt_l = ut.merge_dico(default, params_learner, update_type = 4) opt_l.update({'model': model, 'algo': params_learner['algo'], 'nb_params':model.n_params, 'bounds_obj':params_learner.get('bounds_obj'), 'init_obj':params_learner.get('init_obj'), 'rdm_gen':self.rdm_gen, 'mp_obj':self.mp}) opt_l.update({'bounds_params':self._gen_boundaries_params(**opt_l)}) opt_l.update({'init_params':self._gen_init_params(**opt_l)}) self.options_learner = opt_l
def parse_meta_config(cls, input_file = 'inputfile.txt', update_rules = False, debug = False): """ Parsing an input file containing a meta-config into a list of configs. It relies on generating the cartesian product of the elements Example (txt file containing a meta config) ------- ### _CONTEXT {'_T':5} key1 Val1 Val2 key2 Val3 ### --> [{key1: eval(Val1), key2: eval(Val3)}, {key1: eval(Val2), key2: eval(Val3)}] Syntax of the meta-config file: --------------------------------- Keys starting with a _ and in self.METAPARAMS_INFO.keys() are metaparameters of the parser. o.w. they will be keys of the final config dico. If the first line key = _CONTEXT the Val associated should be of the form {'_contextvar1':val,'_contextvar2':val1} i.e. a dico with keys starting with a '_' Arguments --------- input_file: str path of the meta-config file update_rules : When building the configs should the first value seen (e.g. Val1) be used as a reference. Only works if Vals are <dic>. If True exampel's output becomes: [{key1: eval(Val1), key2: eval(Val3)}, {key1: eval(Val2).update(eval(Val2)), key2: eval(Val3)}] debug : bool debug mode Output ------ list_configs_processing_meta_configs: <list<dic>> """ if(debug): pdb.set_trace() use_context = False with open(input_file, 'r') as csvfile: reader = csv.reader(csvfile, delimiter = ' ') list_values = list([]) list_keys = list([]) nbline = 0 dico_METAPARAMS = {} for line in reader: nbline += 1 if(line in cls.EMPTY_LINE) or (line[0] in cls.LEX_NA): pass elif(line[0] == '_CONTEXT'): context = literal_eval(line[1]) if(ut.is_dico(context) and np.product([c[0] == '_' for c in context])): use_context = True elif(line[0] in cls.METAPARAMS_NAME): assert(len(line) == 2), 'batch input file: 1 arg expected in l.' + str(nbline)+ ' (' +str(line[0]) + ')' dico_METAPARAMS[line[0]] = literal_eval(line[1]) else: assert (len(line)>=2), 'batch input file: not enough args in l.' + str(nbline) list_keys.append(line[0]) if(use_context): line_with_context = [cls._apply_context(line[i], context) for i in range(1,len(line))] ev_tmp = [eval(lwc) for lwc in line_with_context] else: ev_tmp = [literal_eval(line[i]) for i in range(1,len(line))] if(update_rules): ref_value = copy.copy(ev_tmp[0]) ev_tmp_new = [] for ev_i in ev_tmp: in_progress = ut.merge_dico(ref_value, ev_i, update_type = 0, copy = True) ev_tmp_new.append(copy.copy(in_progress)) ev_tmp = ev_tmp_new list_values = ut.cartesianProduct(list_values, ev_tmp, ut.appendList) list_configs = [ut.lists2dico(list_keys, l) for l in list_values] list_configs = cls._apply_metaparams(list_configs, dico_METAPARAMS) list_configs = [cls._processing_meta_configs(c) for c in list_configs] return list_configs