def _add_in_alljson_format(self, train_perf: float, incumbent_id: int, incumbent: Configuration, budget: float, ta_time_used: float, wallclock_time: float) -> None: """Adds entries to AClib2-like (but with configs as json) trajectory file Parameters ---------- train_perf: float Estimated performance on training (sub)set incumbent_id: int Id of incumbent incumbent: Configuration() Current incumbent configuration budget: float budget (cutoff) used in intensifier to limit TA (default: 0) ta_time_used: float CPU time used by the target algorithm wallclock_time: float Wallclock time used so far """ traj_entry = {"cpu_time": ta_time_used, "wallclock_time": wallclock_time, "evaluations": self.stats.ta_runs, "cost": train_perf, "incumbent": incumbent.get_dictionary(), "budget": budget, "origin": incumbent.origin, } with open(self.alljson_traj_fn, "a") as fp: json.dump(traj_entry, fp) fp.write("\n")
def hyperparameter_values(self, value: Configuration): """Encode hyperparameters from object to base64""" if value is None: d = {} else: d = value.get_dictionary() self.hyperparameter_values_64 = object_to_base_64(d)
class Config(object): """ This class is an extension of the ConfigurationSpace-Configuration, introducing module-members scenario, id, repetition etc. """ def __init__(self, scen, ID, repetition=0, updates=""): """ This function creates a configuration with default parameters, which will (at the end of the function) be overwritten with the values in the updates- dictionary. """ if isinstance(updates, str): updates = self.dict_from_file(scen, ID) elif not isinstance(updates, dict): raise ValueError("updates to Config must be of type str (for" "filepath) or dict.") with open("dlas/dlas.pcs", 'r') as f: configspace = pcs.read(f.readlines()) self.default_config = configspace.get_default_configuration() config_dict = self.default_config.get_dictionary() config_dict.update(updates) self.config = Configuration(configspace, config_dict) self.scen = scen self.ID = ID self.rep = repetition self.use_validation = True self.result_path = "results/{}/{}/{}/".format(self.scen, self.ID, self.rep) def __getitem__(self, attr): return self.config[attr] def get_dictionary(self): return self.config.get_dictionary() def dict_from_file(self, s, ID): with open("experiments/{}.txt".format(ID), 'r') as f: content = f.readlines() content = [ tuple(line.strip("\n").split("=")) for line in content if line != "\n" ] content = [(name.strip(), value.strip()) for name, value in content] content = dict(content) for c in content: try: content[c] = float(content[c]) if content[c].is_integer(): content[c] = int(content[c]) except ValueError: pass return content
def _overwrite_configuration(self, config: Configuration, overwrite_args: list): ''' overwrites a given configuration with some new settings Arguments --------- config: Configuration initial configuration to be adapted overwrite_args: list new parameter settings as a list of strings Returns ------- Configuration ''' def pairwise(iterable): a, b = tee(iterable) next(b, None) return zip(a, b) dict_conf = config.get_dictionary() for param, value in pairwise(overwrite_args): try: ok = self.cs.get_hyperparameter(param) except KeyError: ok = None if ok is not None: if type(self.cs.get_hyperparameter( param)) is UniformIntegerHyperparameter: dict_conf[param] = int(value) elif type(self.cs.get_hyperparameter( param)) is UniformFloatHyperparameter: dict_conf[param] = float(value) elif value == "True": dict_conf[param] = True elif value == "False": dict_conf[param] = False else: dict_conf[param] = value else: self.logger.warn("Unknown given parameter: %s %s" % (param, value)) config = Configuration(self.cs, values=dict_conf, allow_inactive_with_values=True) return config
def convert(self, php: Configuration): dict_ = php.get_dictionary() ret = {} for k, v in dict_.items(): if isinstance(v, str): v = _decode(v) key_path = k.split(":") if key_path[-1] == "__choice__": key_path = key_path[:-1] if v is not None: key_path += [v] v = {} if "None" in key_path: continue self.set_kv(ret, key_path, v) # self.split_key(k) return ret
def _overwrite_configuration(self, config: Configuration, overwrite_args: list): ''' overwrites a given configuration with some new settings Arguments --------- config: Configuration initial configuration to be adapted overwrite_args: list new parameter settings as a list of strings Returns ------- Configuration ''' def pairwise(iterable): a, b = tee(iterable) next(b, None) return zip(a, b) dict_conf = config.get_dictionary() for param, value in pairwise(overwrite_args): if dict_conf.get(param): if type(self.cs.get_hyperparameter(param)) is UniformIntegerHyperparameter: dict_conf[param] = int(value) elif type(self.cs.get_hyperparameter(param)) is UniformFloatHyperparameter: dict_conf[param] = float(value) elif value == "True": dict_conf[param] = True elif value == "False": dict_conf[param] = False else: dict_conf[param] = value else: self.logger.warn( "Unknown given parameter: %s %s" % (param, value)) config = Configuration(self.cs, values=dict_conf) return config
def set_hyperparameters( self, configuration: Configuration, init_params: Optional[Dict[str, Any]] = None) -> 'autoPyTorchChoice': """ Applies a configuration to the given component. This method translate a hierarchical configuration key, to an actual parameter of the autoPyTorch component. Args: configuration (Configuration): which configuration to apply to the chosen component init_params (Optional[Dict[str, any]]): Optional arguments to initialize the chosen component Returns: self: returns an instance of self """ new_params = {} params = configuration.get_dictionary() choice = params['__choice__'] del params['__choice__'] for param, value in params.items(): param = param.replace(choice + ':', '') new_params[param] = value if init_params is not None: for param, value in init_params.items(): param = param.replace(choice + ':', '') new_params[param] = value new_params['random_state'] = self.random_state self.new_params = new_params self.choice = self.get_components()[choice](**new_params) return self
def set_hyperparameters(self, configuration: Configuration, init_params: Optional[Dict[str, Any]] = None ) -> 'ImageAugmenter': """ Applies a configuration to the given component. This method translate a hierarchical configuration key, to an actual parameter of the autoPyTorch component. Args: configuration (Configuration): which configuration to apply to the chosen component init_params (Optional[Dict[str, any]]): Optional arguments to initialize the chosen component Returns: self: returns an instance of self """ available_augmenters = get_components() for name, augmenter in available_augmenters.items(): new_params = {} params = configuration.get_dictionary() for param, value in params.items(): if name in param: param = param.replace(name, '').replace(':', '') new_params[param] = value if init_params is not None: for param, value in init_params.items(): if name in param: param = param.replace(name, '').replace(':', '') new_params[param] = value new_params['random_state'] = self.random_state self.available_augmenters[name] = augmenter(**new_params) return self
def set_hyperparameters( self, configuration: Configuration, init_params: Optional[Dict[str, Any]] = None) -> BaseEstimator: """ Applies a configuration to the given component. This method translate a hierarchical configuration key, to an actual parameter of the autoPyTorch component. Args: configuration (Configuration): which configuration to apply to the chosen component init_params (Optional[Dict[str, any]]): Optional arguments to initialize the chosen component Returns: An instance of self """ params = configuration.get_dictionary() for param, value in params.items(): if not hasattr(self, param): raise ValueError('Cannot set hyperparameter %s for %s because ' 'the hyperparameter does not exist.' % (param, str(self))) setattr(self, param, value) if init_params is not None: for param, value in init_params.items(): if not hasattr(self, param): raise ValueError('Cannot set init param %s for %s because ' 'the init param does not exist.' % (param, str(self))) setattr(self, param, value) return self