def handle_update_search_space(self, data): """change json format to ConfigSpace format dict<dict> -> configspace Parameters ---------- data: JSON object search space of this experiment """ search_space = data cs = CS.ConfigurationSpace() for var in search_space: _type = str(search_space[var]["_type"]) if _type == 'choice': cs.add_hyperparameter(CSH.CategoricalHyperparameter( var, choices=search_space[var]["_value"])) elif _type == 'randint': cs.add_hyperparameter(CSH.UniformIntegerHyperparameter( var, lower=0, upper=search_space[var]["_value"][0])) elif _type == 'uniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1])) elif _type == 'quniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2])) elif _type == 'loguniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], log=True)) elif _type == 'qloguniform': cs.add_hyperparameter(CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2], log=True)) elif _type == 'normal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2])) elif _type == 'qnormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3])) elif _type == 'lognormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], log=True)) elif _type == 'qlognormal': cs.add_hyperparameter(CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3], log=True)) else: raise ValueError( 'unrecognized type in search_space, type is {}'.format(_type)) self.search_space = cs
def make_normal(): return csh.NormalFloatHyperparameter(self.name, mu=a, sigma=b, default_value=self.default_value, q=None, log=False)
def normal(quantization, loc, scale, discrete=False, **kwargs): if discrete: return csh.NormalIntegerHyperparameter(mu=loc, sigma=scale, q=quantization, **kwargs) return csh.NormalFloatHyperparameter(mu=loc, sigma=scale, q=quantization, **kwargs)
def test_add_good_dim(self): from deephyper.problem import HpProblem pb = HpProblem() p0 = pb.add_hyperparameter((-10, 10), "p0") p0_csh = csh.UniformIntegerHyperparameter( name="p0", lower=-10, upper=10, log=False ) assert p0 == p0_csh p1 = pb.add_hyperparameter((1, 100, "log-uniform"), "p1") p1_csh = csh.UniformIntegerHyperparameter(name="p1", lower=1, upper=100, log=True) assert p1 == p1_csh p2 = pb.add_hyperparameter((-10.0, 10.0), "p2") p2_csh = csh.UniformFloatHyperparameter( name="p2", lower=-10.0, upper=10.0, log=False ) assert p2 == p2_csh p3 = pb.add_hyperparameter((1.0, 100.0, "log-uniform"), "p3") p3_csh = csh.UniformFloatHyperparameter( name="p3", lower=1.0, upper=100.0, log=True ) assert p3 == p3_csh p4 = pb.add_hyperparameter([1, 2, 3, 4], "p4") p4_csh = csh.OrdinalHyperparameter(name="p4", sequence=[1, 2, 3, 4]) assert p4 == p4_csh p5 = pb.add_hyperparameter([1.0, 2.0, 3.0, 4.0], "p5") p5_csh = csh.OrdinalHyperparameter(name="p5", sequence=[1.0, 2.0, 3.0, 4.0]) assert p5 == p5_csh p6 = pb.add_hyperparameter(["cat0", "cat1"], "p6") p6_csh = csh.CategoricalHyperparameter(name="p6", choices=["cat0", "cat1"]) assert p6 == p6_csh p7 = pb.add_hyperparameter({"mu": 0, "sigma": 1}, "p7") p7_csh = csh.NormalIntegerHyperparameter(name="p7", mu=0, sigma=1) assert p7 == p7_csh if cs.__version__ > "0.4.20": p8 = pb.add_hyperparameter( {"mu": 0, "sigma": 1, "lower": -5, "upper": 5}, "p8" ) p8_csh = csh.NormalIntegerHyperparameter( name="p8", mu=0, sigma=1, lower=-5, upper=5 ) assert p8 == p8_csh p9 = pb.add_hyperparameter({"mu": 0.0, "sigma": 1.0}, "p9") p9_csh = csh.NormalFloatHyperparameter(name="p9", mu=0, sigma=1) assert p9 == p9_csh
def handle_update_search_space(self, data): """change json format to ConfigSpace format dict<dict> -> configspace Parameters ---------- data: JSON object search space of this experiment """ search_space = data cs = None logger.debug(f'Received data: {data}') if self.config_space: logger.info( f'Got a ConfigSpace file path, parsing the search space directly from {self.config_space}. ' 'The NNI search space is ignored.') with open(self.config_space, 'r') as fh: cs = pcs_new.read(fh) else: cs = CS.ConfigurationSpace() for var in search_space: _type = str(search_space[var]["_type"]) if _type == 'choice': cs.add_hyperparameter( CSH.CategoricalHyperparameter( var, choices=search_space[var]["_value"])) elif _type == 'randint': cs.add_hyperparameter( CSH.UniformIntegerHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1] - 1)) elif _type == 'uniform': cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1])) elif _type == 'quniform': cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2])) elif _type == 'loguniform': cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], log=True)) elif _type == 'qloguniform': cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2], log=True)) elif _type == 'normal': cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2])) elif _type == 'qnormal': cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3])) elif _type == 'lognormal': cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], log=True)) elif _type == 'qlognormal': cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3], log=True)) else: raise ValueError( 'unrecognized type in search_space, type is {}'.format( _type)) self.search_space = cs
def check_hyperparameter(parameter, name=None, default_value=None): """Check if the passed parameter is a valid description of an hyperparameter. :meta private: Args: parameter (str|Hyperparameter): an instance of ``ConfigSpace.hyperparameters.hyperparameter`` or a synthetic description (e.g., ``list``, ``tuple``). parameter (str): the name of the hyperparameter. Only required when the parameter is not a ``ConfigSpace.hyperparameters.hyperparameter``. default_value: a default value for the hyperparameter. Returns: Hyperparameter: the ConfigSpace hyperparameter instance corresponding to the ``parameter`` description. """ if isinstance(parameter, csh.Hyperparameter): return parameter if not isinstance(parameter, (list, tuple, np.ndarray, dict)): raise ValueError( "Shortcut definition of an hyper-parameter has to be a list, tuple, array or dict." ) if not (type(name) is str): raise ValueError( "The 'name' of an hyper-parameter should be a string!") kwargs = {} if default_value is not None: kwargs["default_value"] = default_value if type(parameter) is tuple: # Range of reals or integers if len(parameter) == 2: prior = "uniform" elif len(parameter) == 3: prior = parameter[2] assert prior in [ "uniform", "log-uniform", ], f"Prior has to be 'uniform' or 'log-uniform' when {prior} was given for parameter '{name}'" parameter = parameter[:2] log = prior == "log-uniform" if all([isinstance(p, int) for p in parameter]): return csh.UniformIntegerHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif any([isinstance(p, float) for p in parameter]): return csh.UniformFloatHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif type(parameter) is list: # Categorical if any([ isinstance(p, (str, bool)) or isinstance(p, np.bool_) for p in parameter ]): return csh.CategoricalHyperparameter(name, choices=parameter, **kwargs) elif all([isinstance(p, (int, float)) for p in parameter]): return csh.OrdinalHyperparameter(name, sequence=parameter, **kwargs) elif type(parameter) is dict: # Integer or Real distribution # Normal if "mu" in parameter and "sigma" in parameter: if type(parameter["mu"]) is float: return csh.NormalFloatHyperparameter(name=name, **parameter, **kwargs) elif type(parameter["mu"]) is int: return csh.NormalIntegerHyperparameter(name=name, **parameter, **kwargs) else: raise ValueError( "Wrong hyperparameter definition! 'mu' should be either a float or an integer." ) raise ValueError( f"Invalid dimension {name}: {parameter}. Read the documentation for" f" supported types.")
def handle_update_search_space(self, search_space): """change json format to ConfigSpace format dict<dict> -> configspace Parameters ---------- search_space: JSON object search space of this experiment Returns ------- ConfigSpace: search space in ConfigSpace format """ cs = CS.ConfigurationSpace() for var in search_space: if search_space[var]["_type"] is "choice": cs.add_hyperparameter( CSH.CategoricalHyperparameter( var, choices=search_space[var]["_value"])) elif search_space[var]["_type"] is "randint": cs.add_hyperparameter( CSH.UniformIntegerHyperparameter( var, lower=0, upper=search_space[var]["_value"][0])) elif search_space[var]["_type"] is "uniform": cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1])) elif search_space[var]["_type"] is "quniform": cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2])) elif search_space[var]["_type"] is "loguniform": cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], log=True)) elif search_space[var]["_type"] is "qloguniform": cs.add_hyperparameter( CSH.UniformFloatHyperparameter( var, lower=search_space[var]["_value"][0], upper=search_space[var]["_value"][1], q=search_space[var]["_value"][2], log=True)) elif search_space[var]["_type"] is "normal": cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2])) elif search_space[var]["_type"] is "qnormal": cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3])) elif search_space[var]["_type"] is "lognormal": cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], log=True)) elif search_space[var]["_type"] is "qlognormal": cs.add_hyperparameter( CSH.NormalFloatHyperparameter( var, mu=search_space[var]["_value"][1], sigma=search_space[var]["_value"][2], q=search_space[var]["_value"][3], log=True)) else: raise ValueError( 'unrecognized type in search_space, type is %s", search_space[var]["_type"]' ) self.search_space = cs return True
def convert_simple_param(name, param): """ Convert a simple labwatch parameter to a ConfigSpace parameter. Parameters ---------- name: str The name of the parameter. param: dict Dictionary describing the parameter. Returns ------- ConfigSpace.hyperparameters.Hyperparameter: The converted hyperparameter. """ if param["_class"] == 'Constant': return csh.Constant(name, param["value"]) elif param["_class"] == 'Categorical': # convert the choices to only contain # basic types (they might contain Constant parameters basic_choices = [] for choice in param["choices"]: if isinstance(choice, dict): basic_choices.append(choice["default"]) elif not isinstance(choice, basic_types): err = "Choice parameter {} is not " \ "a base type or Constant!" raise ParamValueExcept(err.format(choice)) else: basic_choices.append(choice) return csh.CategoricalHyperparameter(name=name, choices=basic_choices, default_value=basic_choices[0]) elif param["_class"] == 'UniformFloat': return csh.UniformFloatHyperparameter(name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif param["_class"] == 'UniformInt': return csh.UniformIntegerHyperparameter(name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif param["_class"] == 'UniformNumber': ptype = str_to_types[param["type"]] if ptype == float: return csh.UniformFloatHyperparameter( name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) elif ptype == int: return csh.UniformIntegerHyperparameter( name=name, lower=param["lower"], upper=param["upper"], default_value=param["default"], log=param["log_scale"]) else: raise ValueError("Don't know how to represent UniformNumber with " "type: {} in ConfigSpace".format(param["type"])) elif param["_class"] == 'Gaussian': return csh.NormalFloatHyperparameter(name=name, mu=param["mu"], sigma=param["sigma"], log=param["log_scale"]) else: raise ValueError("Don't know how to represent {} in ConfigSpace " "notation.".format(param))
def fit_search_space(self, df): """Apply prior-guided transfer learning based on a DataFrame of results. :meta private: Args: df (str|DataFrame): a checkpoint from a previous search. """ if type(df) is str and df[-4:] == ".csv": df = pd.read_csv(df) assert isinstance(df, pd.DataFrame) cst = self._problem.space if type(cst) != CS.ConfigurationSpace: logging.error(f"{type(cst)}: not supported for trainsfer learning") res_df = df res_df_names = res_df.columns.values best_index = np.argmax(res_df["objective"].values) best_param = res_df.iloc[best_index] fac_numeric = 8.0 fac_categorical = 10.0 cst_new = CS.ConfigurationSpace(seed=1234) hp_names = cst.get_hyperparameter_names() for hp_name in hp_names: hp = cst.get_hyperparameter(hp_name) if hp_name in res_df_names: if (type(hp) is csh.UniformIntegerHyperparameter or type(hp) is csh.UniformFloatHyperparameter): mu = best_param[hp.name] lower = hp.lower upper = hp.upper sigma = max(1.0, (upper - lower) / fac_numeric) if type(hp) is csh.UniformIntegerHyperparameter: param_new = csh.NormalIntegerHyperparameter( name=hp.name, default_value=mu, mu=mu, sigma=sigma, lower=lower, upper=upper, ) else: # type is csh.UniformFloatHyperparameter: param_new = csh.NormalFloatHyperparameter( name=hp.name, default_value=mu, mu=mu, sigma=sigma, lower=lower, upper=upper, ) cst_new.add_hyperparameter(param_new) elif type(hp) is csh.CategoricalHyperparameter: choices = hp.choices weights = len(hp.choices) * [1.0] index = choices.index(best_param[hp.name]) weights[index] = fac_categorical norm_weights = [float(i) / sum(weights) for i in weights] param_new = csh.CategoricalHyperparameter( name=hp.name, choices=choices, weights=norm_weights) cst_new.add_hyperparameter(param_new) else: logging.warning( "Not fitting {hp} because it is not supported!") cst_new.add_hyperparameter(hp) else: logging.warning( "Not fitting {hp} because it was not found in the dataframe!" ) cst_new.add_hyperparameter(hp) # For conditions for cond in cst.get_conditions(): if type(cond) == CS.AndConjunction or type( cond) == CS.OrConjunction: cond_list = [] for comp in cond.components: cond_list.append(self.return_cond(comp, cst_new)) if type(cond) is CS.AndConjunction: cond_new = CS.AndConjunction(*cond_list) elif type(cond) is CS.OrConjunction: cond_new = CS.OrConjunction(*cond_list) else: logging.warning( f"Condition {type(cond)} is not implemented!") else: cond_new = self.return_cond(cond, cst_new) cst_new.add_condition(cond_new) # For forbiddens for cond in cst.get_forbiddens(): if type(cond) is CS.ForbiddenAndConjunction: cond_list = [] for comp in cond.components: cond_list.append(self.return_forbid(comp, cst_new)) cond_new = CS.ForbiddenAndConjunction(*cond_list) elif (type(cond) is CS.ForbiddenEqualsClause or type(cond) is CS.ForbiddenInClause): cond_new = self.return_forbid(cond, cst_new) else: logging.warning(f"Forbidden {type(cond)} is not implemented!") cst_new.add_forbidden_clause(cond_new) self._opt_kwargs["dimensions"] = cst_new
def fit_search_space(self, df, fac_numerical=0.125, fac_categorical=10): """Apply prior-guided transfer learning based on a DataFrame of results. Example Usage: >>> search = CBO(problem, evaluator) >>> search.fit_surrogate("results.csv") Args: df (str|DataFrame): a checkpoint from a previous search. fac_numerical (float): the factor used to compute the sigma of a truncated normal distribution based on ``sigma = max(1.0, (upper - lower) * fac_numerical)``. A small large factor increase exploration while a small factor increase exploitation around the best-configuration from the ``df`` parameter. fac_categorical (float): the weight given to a categorical feature part of the best configuration. A large weight ``> 1`` increase exploitation while a small factor close to ``1`` increase exploration. """ if type(df) is str and df[-4:] == ".csv": df = pd.read_csv(df) assert isinstance(df, pd.DataFrame) # check single or multiple objectives if "objective" in df.columns: # filter failures if pd.api.types.is_string_dtype(df.objective): df = df[~df.objective.str.startswith("F")] df.objective = df.objective.astype(float) else: # filter failures objcol = df.filter(regex=r"^objective_\d+$").columns for col in objcol: if pd.api.types.is_string_dtype(df[col]): df = df[~df[col].str.startswith("F")] df[col] = df[col].astype(float) cst = self._problem.space if type(cst) != CS.ConfigurationSpace: logging.error(f"{type(cst)}: not supported for trainsfer learning") res_df = df res_df_names = res_df.columns.values if "objective" in df.columns: best_index = np.argmax(res_df["objective"].values) best_param = res_df.iloc[best_index] else: best_index = non_dominated_set(-np.asarray(res_df[objcol]), return_mask=False)[0] best_param = res_df.iloc[best_index] cst_new = CS.ConfigurationSpace( seed=self._random_state.randint(0, 2**32)) hp_names = cst.get_hyperparameter_names() for hp_name in hp_names: hp = cst.get_hyperparameter(hp_name) if hp_name in res_df_names: if (type(hp) is csh.UniformIntegerHyperparameter or type(hp) is csh.UniformFloatHyperparameter): mu = best_param[hp.name] lower = hp.lower upper = hp.upper sigma = max(1.0, (upper - lower) * fac_numerical) if type(hp) is csh.UniformIntegerHyperparameter: param_new = csh.NormalIntegerHyperparameter( name=hp.name, default_value=mu, mu=mu, sigma=sigma, lower=lower, upper=upper, ) else: # type is csh.UniformFloatHyperparameter: param_new = csh.NormalFloatHyperparameter( name=hp.name, default_value=mu, mu=mu, sigma=sigma, lower=lower, upper=upper, ) cst_new.add_hyperparameter(param_new) elif (type(hp) is csh.CategoricalHyperparameter or type(hp) is csh.OrdinalHyperparameter): if type(hp) is csh.OrdinalHyperparameter: choices = hp.sequence else: choices = hp.choices weights = len(choices) * [1.0] index = choices.index(best_param[hp.name]) weights[index] = fac_categorical norm_weights = [float(i) / sum(weights) for i in weights] param_new = csh.CategoricalHyperparameter( name=hp.name, choices=choices, weights=norm_weights) cst_new.add_hyperparameter(param_new) else: logging.warning( f"Not fitting {hp} because it is not supported!") cst_new.add_hyperparameter(hp) else: logging.warning( f"Not fitting {hp} because it was not found in the dataframe!" ) cst_new.add_hyperparameter(hp) # For conditions for cond in cst.get_conditions(): if type(cond) == CS.AndConjunction or type( cond) == CS.OrConjunction: cond_list = [] for comp in cond.components: cond_list.append(self._return_cond(comp, cst_new)) if type(cond) is CS.AndConjunction: cond_new = CS.AndConjunction(*cond_list) elif type(cond) is CS.OrConjunction: cond_new = CS.OrConjunction(*cond_list) else: logging.warning( f"Condition {type(cond)} is not implemented!") else: cond_new = self._return_cond(cond, cst_new) cst_new.add_condition(cond_new) # For forbiddens for cond in cst.get_forbiddens(): if type(cond) is CS.ForbiddenAndConjunction: cond_list = [] for comp in cond.components: cond_list.append(self._return_forbid(comp, cst_new)) cond_new = CS.ForbiddenAndConjunction(*cond_list) elif (type(cond) is CS.ForbiddenEqualsClause or type(cond) is CS.ForbiddenInClause): cond_new = self._return_forbid(cond, cst_new) else: logging.warning(f"Forbidden {type(cond)} is not implemented!") cst_new.add_forbidden_clause(cond_new) self._opt_kwargs["dimensions"] = cst_new