def get_hyperparameter_search_space(dataset_properties=None): N = UniformIntegerHyperparameter("N", 5, 20, default=10) precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) cs = ConfigurationSpace() cs.add_hyperparameter(N) cs.add_hyperparameter(precond) return cs
def test_add_hyperparameters_with_equal_names(self): cs = ConfigurationSpace() hp = UniformIntegerHyperparameter("name", 0, 10) cs.add_hyperparameter(hp) self.assertRaisesRegexp( ValueError, "Hyperparameter 'name' is already in the " "configuration space.", cs.add_hyperparameter, hp)
def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter( "strategy", ["mean", "median", "most_frequent"], default="mean") cs = ConfigurationSpace() cs.add_hyperparameter(strategy) return cs
def get_hyperparameter_search_space(dataset_properties=None): keep_variance = UniformFloatHyperparameter("keep_variance", 0.5, 0.9999, default=0.9999) whiten = CategoricalHyperparameter("whiten", ["False", "True"], default="False") cs = ConfigurationSpace() cs.add_hyperparameter(keep_variance) cs.add_hyperparameter(whiten) return cs
def test_write_log10(self): expected = "a [10.0, 1000.0] [100.0]l" cs = ConfigurationSpace() cs.add_hyperparameter( UniformFloatHyperparameter("a", 10, 1000, log=True)) value = pcs_parser.write(cs) self.assertEqual(expected, value)
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) criterion = cs.add_hyperparameter( CategoricalHyperparameter("criterion", ["gini", "entropy"], default="gini")) max_features = cs.add_hyperparameter( UniformFloatHyperparameter("max_features", 0.5, 5, default=1)) max_depth = cs.add_hyperparameter( UnParametrizedHyperparameter(name="max_depth", value="None")) min_samples_split = cs.add_hyperparameter( UniformIntegerHyperparameter("min_samples_split", 2, 20, default=2)) min_samples_leaf = cs.add_hyperparameter( UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1)) min_weight_fraction_leaf = cs.add_hyperparameter( Constant('min_weight_fraction_leaf', 0.)) bootstrap = cs.add_hyperparameter( CategoricalHyperparameter("bootstrap", ["True", "False"], default="False")) return cs
def test_write_q_int(self): expected = "Q16_int_a [16, 1024] [520]i" cs = ConfigurationSpace() cs.add_hyperparameter( UniformIntegerHyperparameter("int_a", 16, 1024, q=16)) value = pcs_parser.write(cs) self.assertEqual(expected, value)
def test_write_q_float(self): expected = "Q16_float_a [16.0, 1024.0] [520.0]" cs = ConfigurationSpace() cs.add_hyperparameter( UniformFloatHyperparameter("float_a", 16, 1024, q=16)) value = pcs_parser.write(cs) self.assertEqual(expected, value)
def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter("strategy", ["none", "weighting"], default="none") cs = ConfigurationSpace() cs.add_hyperparameter(strategy) return cs
def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter( "strategy", ["none", "weighting"], default="none") cs = ConfigurationSpace() cs.add_hyperparameter(strategy) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() C = cs.add_hyperparameter(UniformFloatHyperparameter( "C", 0.03125, 32768, log=True, default=1.0)) loss = cs.add_hyperparameter(CategoricalHyperparameter( "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"], default="squared_epsilon_insensitive")) # Random Guess epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( name="epsilon", lower=0.001, upper=1, default=0.1, log=True)) dual = cs.add_hyperparameter(Constant("dual", "False")) # These are set ad-hoc tol = cs.add_hyperparameter(UniformFloatHyperparameter( "tol", 1e-5, 1e-1, default=1e-4, log=True)) fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) intercept_scaling = cs.add_hyperparameter(Constant( "intercept_scaling", 1)) dual_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(dual, "False"), ForbiddenEqualsClause(loss, "epsilon_insensitive") ) cs.add_forbidden_clause(dual_and_loss) return cs
def get_hyperparameter_search_space(dataset_properties=None): gamma = UniformFloatHyperparameter("gamma", 0.3, 2.0, default=1.0) n_components = UniformIntegerHyperparameter("n_components", 50, 10000, default=100, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(gamma) cs.add_hyperparameter(n_components) return cs
def get_hyperparameter_search_space(dataset_properties=None): max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) cs = ConfigurationSpace() cs.add_hyperparameter(max_epochs) return cs
def test_add_hyperparameters_with_equal_names(self): cs = ConfigurationSpace() hp = UniformIntegerHyperparameter("name", 0, 10) cs.add_hyperparameter(hp) self.assertRaisesRegexp(ValueError, "Hyperparameter 'name' is already in the " "configuration space.", cs.add_hyperparameter, hp)
def get_hyperparameter_search_space(dataset_properties=None): reg_param = UniformFloatHyperparameter('reg_param', 0.0, 10.0, default=0.5) cs = ConfigurationSpace() cs.add_hyperparameter(reg_param) return cs
def get_hyperparameter_search_space(dataset_properties=None): target_dim = UniformIntegerHyperparameter("target_dim", 10, 256, default=128) cs = ConfigurationSpace() cs.add_hyperparameter(target_dim) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() alpha = cs.add_hyperparameter(UniformFloatHyperparameter( "alpha", 10 ** -5, 10., log=True, default=1.)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) tol = cs.add_hyperparameter(UniformFloatHyperparameter( "tol", 1e-5, 1e-1, default=1e-4, log=True)) return cs
def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter("percentile", lower=1, upper=99, default=50) score_func = UnParametrizedHyperparameter(name="score_func", value="f_regression") cs = ConfigurationSpace() cs.add_hyperparameter(percentile) cs.add_hyperparameter(score_func) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() use_minimum_fraction = cs.add_hyperparameter(CategoricalHyperparameter( "use_minimum_fraction", ["True", "False"], default="True")) minimum_fraction = cs.add_hyperparameter(UniformFloatHyperparameter( "minimum_fraction", lower=.0001, upper=0.5, default=0.01, log=True)) cs.add_condition(EqualsCondition(minimum_fraction, use_minimum_fraction, 'True')) return cs
def get_hyperparameter_search_space(dataset_properties=None): alpha = UniformFloatHyperparameter(name="alpha", lower=0.0001, upper=10, default=1.0, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(alpha) return cs
def test_get_conditions(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("parent", [0, 1]) cs.add_hyperparameter(hp1) hp2 = UniformIntegerHyperparameter("child", 0, 10) cs.add_hyperparameter(hp2) self.assertEqual([], cs.get_conditions()) cond1 = EqualsCondition(hp2, hp1, 0) cs.add_condition(cond1) self.assertEqual([cond1], cs.get_conditions())
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_iter = cs.add_hyperparameter( UnParametrizedHyperparameter("n_iter", value=300)) tol = cs.add_hyperparameter( UniformFloatHyperparameter("tol", 10 ** -5, 10 ** -1, default=10 ** -4, log=True)) alpha_1 = cs.add_hyperparameter( UniformFloatHyperparameter(name="alpha_1", lower=10 ** -10, upper=10 ** -3, default=10 ** -6)) alpha_2 = cs.add_hyperparameter( UniformFloatHyperparameter(name="alpha_2", log=True, lower=10 ** -10, upper=10 ** -3, default=10 ** -6)) lambda_1 = cs.add_hyperparameter( UniformFloatHyperparameter(name="lambda_1", log=True, lower=10 ** -10, upper=10 ** -3, default=10 ** -6)) lambda_2 = cs.add_hyperparameter( UniformFloatHyperparameter(name="lambda_2", log=True, lower=10 ** -10, upper=10 ** -3, default=10 ** -6)) threshold_lambda = cs.add_hyperparameter( UniformFloatHyperparameter(name="threshold_lambda", log=True, lower=10 ** 3, upper=10 ** 5, default=10 ** 4)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( name="n_neighbors", lower=1, upper=100, log=True, default=1)) weights = cs.add_hyperparameter(CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform")) p = cs.add_hyperparameter(CategoricalHyperparameter( name="p", choices=[1, 2], default=2)) return cs
def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter( "percentile", lower=1, upper=99, default=50) score_func = UnParametrizedHyperparameter( name="score_func", value="f_regression") cs = ConfigurationSpace() cs.add_hyperparameter(percentile) cs.add_hyperparameter(score_func) return cs
def test_check_forbidden_with_sampled_vector_configuration(self): cs = ConfigurationSpace() metric = CategoricalHyperparameter("metric", ["minkowski", "other"]) cs.add_hyperparameter(metric) forbidden = ForbiddenEqualsClause(metric, "other") cs.add_forbidden_clause(forbidden) configuration = Configuration(cs, vector=np.ones(1, dtype=[('metric', int)])) self.assertRaisesRegexp(ValueError, "violates forbidden clause", cs._check_forbidden, configuration)
def get_hyperparameter_search_space(dataset_properties=None): loss = CategoricalHyperparameter("loss", ["hinge", "squared_hinge"], default="hinge") fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20) C = UniformFloatHyperparameter("C", 1e-5, 10, 1, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(loss) cs.add_hyperparameter(fit_intercept) cs.add_hyperparameter(n_iter) cs.add_hyperparameter(C) return cs
def get_hyperparameter_search_space(dataset_properties=None): nugget = UniformFloatHyperparameter( name="nugget", lower=0.0001, upper=10, default=0.1, log=True) thetaL = UniformFloatHyperparameter( name="thetaL", lower=1e-6, upper=1e-3, default=1e-4, log=True) thetaU = UniformFloatHyperparameter( name="thetaU", lower=0.2, upper=10, default=1.0, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(nugget) cs.add_hyperparameter(thetaL) cs.add_hyperparameter(thetaU) return cs
def test_build_forbidden(self): expected = "a {a, b, c} [a]\nb {a, b, c} [c]\n\n" \ "{a=a, b=a}\n{a=a, b=b}\n{a=b, b=a}\n{a=b, b=b}" cs = ConfigurationSpace() a = CategoricalHyperparameter("a", ["a", "b", "c"], "a") b = CategoricalHyperparameter("b", ["a", "b", "c"], "c") cs.add_hyperparameter(a) cs.add_hyperparameter(b) fb = ForbiddenAndConjunction(ForbiddenInClause(a, ["a", "b"]), ForbiddenInClause(b, ["a", "b"])) cs.add_forbidden_clause(fb) value = pcs_parser.write(cs) self.assertIn(expected, value)
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() # base_estimator = Constant(name="base_estimator", value="None") n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter( name="n_estimators", lower=50, upper=500, default=50, log=False)) learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) algorithm = cs.add_hyperparameter(CategoricalHyperparameter( name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R")) max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( name="max_depth", lower=1, upper=10, default=1, log=False)) return cs
def test_illegal_default_configuration(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("loss", ["l1", "l2"], default='l1') hp2 = CategoricalHyperparameter("penalty", ["l1", "l2"], default='l1') cs.add_hyperparameter(hp1) cs.add_hyperparameter(hp2) forb1 = ForbiddenEqualsClause(hp1, "l1") forb2 = ForbiddenEqualsClause(hp2, "l1") forb3 = ForbiddenAndConjunction(forb1, forb2) # cs.add_forbidden_clause(forb3) self.assertRaisesRegexp(ValueError, "Configuration:\n" " loss, Value: l1\n penalty, Value: l1\n" "violates forbidden clause \(Forbidden: loss == l1 && Forbidden: " "penalty == l1\)", cs.add_forbidden_clause, forb3)
def test_add_forbidden_clause(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("input1", [0, 1]) cs.add_hyperparameter(hp1) forb = ForbiddenEqualsClause(hp1, 1) # TODO add checking whether a forbidden clause makes sense at all cs.add_forbidden_clause(forb) # TODO add something to properly retrieve the forbidden clauses self.assertEqual(str(cs), "Configuration space object:\n " "Hyperparameters:\n input1, " "Type: Categorical, Choices: {0, 1}, " "Default: 0\n" " Forbidden Clauses:\n" " Forbidden: input1 == 1\n")
def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter(name="percentile", lower=1, upper=99, default=50) score_func = CategoricalHyperparameter(name="score_func", choices=["chi2", "f_classif"], default="chi2") if dataset_properties is not None: # Chi2 can handle sparse data, so we respect this if "sparse" in dataset_properties and dataset_properties["sparse"]: score_func = Constant(name="score_func", value="chi2") cs = ConfigurationSpace() cs.add_hyperparameter(percentile) cs.add_hyperparameter(score_func) return cs
def test_illegal_default_configuration(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("loss", ["l1", "l2"], default='l1') hp2 = CategoricalHyperparameter("penalty", ["l1", "l2"], default='l1') cs.add_hyperparameter(hp1) cs.add_hyperparameter(hp2) forb1 = ForbiddenEqualsClause(hp1, "l1") forb2 = ForbiddenEqualsClause(hp2, "l1") forb3 = ForbiddenAndConjunction(forb1, forb2) # cs.add_forbidden_clause(forb3) self.assertRaisesRegexp( ValueError, "Configuration:\n" " loss, Value: l1\n penalty, Value: l1\n" "violates forbidden clause \(Forbidden: loss == l1 && Forbidden: " "penalty == l1\)", cs.add_forbidden_clause, forb3)
def test_hyperparameters_with_valid_condition(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("parent", [0, 1]) cs.add_hyperparameter(hp1) hp2 = UniformIntegerHyperparameter("child", 0, 10) cs.add_hyperparameter(hp2) cond = EqualsCondition(hp2, hp1, 0) cs.add_condition(cond) self.assertEqual(len(cs._hyperparameters), 2)
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() criterion = cs.add_hyperparameter(Constant("criterion", "mse")) splitter = cs.add_hyperparameter(Constant("splitter", "best")) max_features = cs.add_hyperparameter(Constant("max_features", 1.0)) max_depth = cs.add_hyperparameter(UniformFloatHyperparameter("max_depth", 0.0, 2.0, default=0.5)) min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter("min_samples_split", 2, 20, default=2)) min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1)) min_weight_fraction_leaf = cs.add_hyperparameter(Constant("min_weight_fraction_leaf", 0.0)) max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter("max_leaf_nodes", "None")) return cs
def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter( name="percentile", lower=10, upper=90, default=50) score_func = CategoricalHyperparameter( name="score_func", choices=["chi2", "f_classif"], default="chi2") if dataset_properties is not None: # Chi2 can handle sparse data, so we respect this if 'sparse' in dataset_properties and dataset_properties['sparse']: score_func = Constant( name="score_func", value="chi2") cs = ConfigurationSpace() cs.add_hyperparameter(percentile) cs.add_hyperparameter(score_func) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() # the smoothing parameter is a non-negative float # I will limit it to 100 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) fit_prior = CategoricalHyperparameter(name="fit_prior", choices=["True", "False"], default="True") cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() criterion = cs.add_hyperparameter(Constant('criterion', 'mse')) splitter = cs.add_hyperparameter(Constant("splitter", "best")) max_features = cs.add_hyperparameter(Constant('max_features', 1.0)) max_depth = cs.add_hyperparameter(UniformFloatHyperparameter( 'max_depth', 0., 2., default=0.5)) min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2)) min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( "min_samples_leaf", 1, 20, default=1)) min_weight_fraction_leaf = cs.add_hyperparameter( Constant("min_weight_fraction_leaf", 0.0)) max_leaf_nodes = cs.add_hyperparameter( UnParametrizedHyperparameter("max_leaf_nodes", "None")) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) criterion = cs.add_hyperparameter(Constant("criterion", "mse")) max_features = cs.add_hyperparameter( UniformFloatHyperparameter("max_features", 0.5, 5, default=1)) max_depth = cs.add_hyperparameter( UnParametrizedHyperparameter(name="max_depth", value="None")) min_samples_split = cs.add_hyperparameter( UniformIntegerHyperparameter("min_samples_split", 2, 20, default=2)) min_samples_leaf = cs.add_hyperparameter( UniformIntegerHyperparameter("min_samples_leaf", 1, 20, default=1)) # Unparametrized, we use min_samples as regularization # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( # name="max_leaf_nodes_or_max_depth", value="max_depth") # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", # choices=["max_leaf_nodes", "max_depth"], default="max_depth") # min_weight_fraction_leaf = UniformFloatHyperparameter( # "min_weight_fraction_leaf", 0.0, 0.1) # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", # value="None") bootstrap = cs.add_hyperparameter( CategoricalHyperparameter("bootstrap", ["True", "False"], default="False")) # Conditions # Not applicable because max_leaf_nodes is no legal value of the parent #cond_max_leaf_nodes_or_max_depth = \ # EqualsCondition(child=max_leaf_nodes, # parent=max_leaf_nodes_or_max_depth, # value="max_leaf_nodes") #cond2_max_leaf_nodes_or_max_depth = \ # EqualsCondition(child=use_max_depth, # parent=max_leaf_nodes_or_max_depth, # value="max_depth") #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, #value="True") #cs.add_condition(cond_max_leaf_nodes_or_max_depth) #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) #cs.add_condition(cond_max_depth) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_iter = cs.add_hyperparameter( UnParametrizedHyperparameter("n_iter", value=300)) tol = cs.add_hyperparameter( UniformFloatHyperparameter("tol", 10**-5, 10**-1, default=10**-4, log=True)) alpha_1 = cs.add_hyperparameter( UniformFloatHyperparameter(name="alpha_1", lower=10**-10, upper=10**-3, default=10**-6)) alpha_2 = cs.add_hyperparameter( UniformFloatHyperparameter(name="alpha_2", log=True, lower=10**-10, upper=10**-3, default=10**-6)) lambda_1 = cs.add_hyperparameter( UniformFloatHyperparameter(name="lambda_1", log=True, lower=10**-10, upper=10**-3, default=10**-6)) lambda_2 = cs.add_hyperparameter( UniformFloatHyperparameter(name="lambda_2", log=True, lower=10**-10, upper=10**-3, default=10**-6)) threshold_lambda = cs.add_hyperparameter( UniformFloatHyperparameter(name="threshold_lambda", log=True, lower=10**3, upper=10**5, default=10**4)) fit_intercept = cs.add_hyperparameter( UnParametrizedHyperparameter("fit_intercept", "True")) return cs
def test_get_hyperparameter(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("parent", [0, 1]) cs.add_hyperparameter(hp1) hp2 = UniformIntegerHyperparameter("child", 0, 10) cs.add_hyperparameter(hp2) retval = cs.get_hyperparameter("parent") self.assertEqual(hp1, retval) retval = cs.get_hyperparameter("child") self.assertEqual(hp2, retval) self.assertRaises(KeyError, cs.get_hyperparameter, "grandfather")
def test_add_configuration_space(self): cs = ConfigurationSpace() hp1 = cs.add_hyperparameter(CategoricalHyperparameter( "input1", [0, 1])) forb1 = cs.add_forbidden_clause(ForbiddenEqualsClause(hp1, 1)) hp2 = cs.add_hyperparameter( UniformIntegerHyperparameter("child", 0, 10)) cond = cs.add_condition(EqualsCondition(hp2, hp1, 0)) cs2 = ConfigurationSpace() cs2.add_configuration_space('prefix', cs, delimiter='__') self.assertEqual( str(cs2), '''Configuration space object: Hyperparameters: prefix__child, Type: UniformInteger, Range: [0, 10], Default: 5 prefix__input1, Type: Categorical, Choices: {0, 1}, Default: 0 Conditions: prefix__child | prefix__input1 == 0 Forbidden Clauses: Forbidden: prefix__input1 == 1 ''')
def test_condition_with_cycles(self): cs = ConfigurationSpace() hp1 = CategoricalHyperparameter("parent", [0, 1]) cs.add_hyperparameter(hp1) hp2 = UniformIntegerHyperparameter("child", 0, 10) cs.add_hyperparameter(hp2) cond1 = EqualsCondition(hp2, hp1, 0) cs.add_condition(cond1) cond2 = EqualsCondition(hp1, hp2, 0) self.assertRaisesRegexp( ValueError, "Hyperparameter configuration " "contains a cycle \[\['child', 'parent'\]\]", cs.add_condition, cond2)
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() shrinkage = cs.add_hyperparameter(CategoricalHyperparameter( "shrinkage", ["None", "auto", "manual"], default="None")) shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter( "shrinkage_factor", 0., 1., 0.5)) n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( 'n_components', 1, 250, default=10)) tol = cs.add_hyperparameter(UniformFloatHyperparameter( "tol", 1e-5, 1e-1, default=1e-4, log=True)) cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual")) return cs
def test_repr(self): cs1 = ConfigurationSpace() retval = cs1.__str__() self.assertEqual("Configuration space object:\n Hyperparameters:\n", retval) hp1 = CategoricalHyperparameter("parent", [0, 1]) cs1.add_hyperparameter(hp1) retval = cs1.__str__() self.assertEqual("Configuration space object:\n Hyperparameters:\n" " %s\n" % str(hp1), retval) hp2 = UniformIntegerHyperparameter("child", 0, 10) cond1 = EqualsCondition(hp2, hp1, 0) cs1.add_hyperparameter(hp2) cs1.add_condition(cond1) retval = cs1.__str__() self.assertEqual("Configuration space object:\n Hyperparameters:\n" " %s\n %s\n Conditions:\n %s\n" % (str(hp2), str(hp1), str(cond1)), retval)
def get_hyperparameter_search_space(dataset_properties=None): N = UniformIntegerHyperparameter("N", 50, 2000, default=100) maxiter = UniformIntegerHyperparameter("maxiter", 50, 500, default=100) cs = ConfigurationSpace() cs.add_hyperparameter(N) cs.add_hyperparameter(maxiter) return cs
def test_add_forbidden(self): m = numpy.ones([2, 3]) preprocessors_list = ['pa', 'pb'] classifier_list = ['ca', 'cb', 'cc'] cs = ConfigurationSpace() preprocessor = CategoricalHyperparameter(name='preprocessor', choices=preprocessors_list) classifier = CategoricalHyperparameter(name='classifier', choices=classifier_list) cs.add_hyperparameter(preprocessor) cs.add_hyperparameter(classifier) new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( conf_space=cs, node_0_list=preprocessors_list, node_1_list=classifier_list, matches=m, node_0_name='preprocessor', node_1_name="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 0) self.assertIsInstance(new_cs, ConfigurationSpace) m[1, 1] = 0 new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( conf_space=cs, node_0_list=preprocessors_list, node_1_list=classifier_list, matches=m, node_0_name='preprocessor', node_1_name="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 1) self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'cb') self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pb') self.assertIsInstance(new_cs, ConfigurationSpace)
def get_hyperparameter_search_space(dataset_properties=None): # More than degree 3 is too expensive! degree = UniformIntegerHyperparameter("degree", 2, 3, 2) interaction_only = CategoricalHyperparameter("interaction_only", ["False", "True"], "False") include_bias = CategoricalHyperparameter("include_bias", ["True", "False"], "True") cs = ConfigurationSpace() cs.add_hyperparameter(degree) cs.add_hyperparameter(interaction_only) cs.add_hyperparameter(include_bias) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() # base_estimator = Constant(name="base_estimator", value="None") n_estimators = cs.add_hyperparameter( UniformIntegerHyperparameter(name="n_estimators", lower=50, upper=500, default=50, log=False)) learning_rate = cs.add_hyperparameter( UniformFloatHyperparameter(name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) algorithm = cs.add_hyperparameter( CategoricalHyperparameter(name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R")) max_depth = cs.add_hyperparameter( UniformIntegerHyperparameter(name="max_depth", lower=1, upper=10, default=1, log=False)) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() shrinkage = cs.add_hyperparameter( CategoricalHyperparameter("shrinkage", ["None", "auto", "manual"], default="None") ) shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter("shrinkage_factor", 0.0, 1.0, 0.5)) n_components = cs.add_hyperparameter(UniformIntegerHyperparameter("n_components", 1, 250, default=10)) tol = cs.add_hyperparameter(UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, log=True)) cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual")) return cs
def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( "n_components", 10, 2000, default=100)) algorithm = cs.add_hyperparameter(CategoricalHyperparameter('algorithm', ['parallel', 'deflation'], 'parallel')) whiten = cs.add_hyperparameter(CategoricalHyperparameter('whiten', ['False', 'True'], 'False')) fun = cs.add_hyperparameter(CategoricalHyperparameter( 'fun', ['logcosh', 'exp', 'cube'], 'logcosh')) cs.add_condition(EqualsCondition(n_components, whiten, "True")) return cs
def get_hyperparameter_search_space(dataset_properties=None): keep_variance = UniformFloatHyperparameter( "keep_variance", 0.5, 0.9999, default=0.9999) whiten = CategoricalHyperparameter( "whiten", ["False", "True"], default="False") cs = ConfigurationSpace() cs.add_hyperparameter(keep_variance) cs.add_hyperparameter(whiten) return cs