Пример #1
0
    def get_hyperparameter_search_space(dataset_properties=None):
        n_components = UniformIntegerHyperparameter("n_components",
                                                    10,
                                                    2000,
                                                    default_value=100)
        kernel = CategoricalHyperparameter(
            'kernel', ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf')
        gamma = UniformFloatHyperparameter(
            "gamma",
            3.0517578125e-05,
            8,
            log=True,
            default_value=0.01,
        )
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
        cs = ConfigurationSpace()
        cs.add_hyperparameters([n_components, kernel, degree, gamma, coef0])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"])
        cs.add_conditions(
            [degree_depends_on_poly, coef0_condition, gamma_condition])
        return cs
Пример #2
0
def create_hyperspace():
    cs = ConfigurationSpace()

    # training hyperparameters.
    learning_rate = UniformFloatHyperparameter("learning_rate", 1e-5, 5e-2, default_value=1e-4, q=3e-5, log=True)
    batch_size = UniformIntegerHyperparameter("batch_size", 16, 128, q=16, default_value=32)
    momentum = UniformFloatHyperparameter("momentum", 0., .5, default_value=0., q=.1)
    lr_decay = UniformFloatHyperparameter("lr_decay", .7, .99, default_value=9e-1, q=3e-2)
    dropout_value = UniformFloatHyperparameter("dropout", .1, .7, default_value=.5, q=.1)
    cs.add_hyperparameters([learning_rate, batch_size, momentum, lr_decay, dropout_value])

    # network architecture hyperparameters.
    num_pooling_layer = UniformIntegerHyperparameter("n_pooling_layer", 2, 3, default_value=2)
    num_conv_layer1 = UniformIntegerHyperparameter("n_conv_layer1", 16, 64, default_value=32, q=2)
    num_conv_layer2 = UniformIntegerHyperparameter("n_conv_layer2", 32, 96, default_value=64, q=2)
    num_conv_layer3 = UniformIntegerHyperparameter("n_conv_layer3", 32, 96, default_value=64, q=2)
    num_fully_layer = UniformIntegerHyperparameter("n_fully_unit", 128, 512, default_value=256, q=64)
    cs.add_hyperparameters([num_pooling_layer, num_conv_layer1, num_conv_layer2, num_conv_layer3, num_fully_layer])
    for i in [1, 2, 3]:
        kernel_init_stddev = UniformFloatHyperparameter(
            "kernel_init_stddev%d" % i, 1e-3, 5e-2, default_value=1e-2, q=2e-3)
        kernel_regularizer = UniformFloatHyperparameter(
            "kernel_regularizer%d" % i, 1e-9, 1e-4, default_value=1e-6, q=5e-7, log=True)
        cs.add_hyperparameters([kernel_init_stddev, kernel_regularizer])
        if i == 3:
            k_init_cond = InCondition(child=kernel_init_stddev, parent=num_pooling_layer, values=[3])
            k_reg_cond = InCondition(child=kernel_regularizer, parent=num_pooling_layer, values=[3])
            cs.add_conditions([k_init_cond, k_reg_cond])

    # configuration = cs.get_default_configuration()
    # print(configuration.configuration_space)
    # print(configuration.get_dictionary())
    return cs
Пример #3
0
    def get_hyperparameter_search_space(dataset_properties=None, optimizer='smac'):
        if dataset_properties is not None and \
                (dataset_properties.get("sparse") is True or
                 dataset_properties.get("signed") is False):
            allow_chi2 = False
        else:
            allow_chi2 = True

        possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine']
        if allow_chi2:
            possible_kernels.append("chi2")
        kernel = CategoricalHyperparameter('kernel', possible_kernels, 'rbf')
        n_components = UniformIntegerHyperparameter(
            "n_components", 50, 5000, default_value=100, log=True)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default_value=0.1)
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([kernel, degree, gamma, coef0, n_components])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])

        gamma_kernels = ["poly", "rbf", "sigmoid"]
        if allow_chi2:
            gamma_kernels.append("chi2")
        gamma_condition = InCondition(gamma, kernel, gamma_kernels)
        cs.add_conditions([degree_depends_on_poly, coef0_condition, gamma_condition])
        return cs
Пример #4
0
    def get_hyperparameter_search_space(**kwargs):
        n_components_factor = UniformFloatHyperparameter("n_components_factor",
                                                         0.,
                                                         1.,
                                                         default_value=1.)
        kernel = CategoricalHyperparameter(
            'kernel', ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf')
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default_value=1.0)
        degree = UniformIntegerHyperparameter('degree', 2, 5, 3)
        coef0 = UniformFloatHyperparameter("coef0", -1., 1., default_value=0.)

        cs = ConfigurationSpace()
        cs.add_hyperparameters(
            [n_components_factor, kernel, degree, gamma, coef0])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"])
        cs.add_conditions(
            [degree_depends_on_poly, coef0_condition, gamma_condition])
        return cs
Пример #5
0
def optimize():
    # We load the iris-dataset (a widely used benchmark)
    iris = datasets.load_iris()

    #logger = logging.getLogger("SVMExample")
    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()

    # We define a few possible types of SVM-kernels and add them as "kernel" to our cs
    kernel = CategoricalHyperparameter("kernel", ["linear", "rbf", "poly", "sigmoid"], default="poly")
    cs.add_hyperparameter(kernel)

    # There are some hyperparameters shared by all kernels
    C = UniformFloatHyperparameter("C", 0.001, 1000.0, default=1.0)
    shrinking = CategoricalHyperparameter("shrinking", ["true", "false"], default="true")
    cs.add_hyperparameters([C, shrinking])

    # Others are kernel-specific, so we can add conditions to limit the searchspace
    degree = UniformIntegerHyperparameter("degree", 1, 5, default=3)     # Only used by kernel poly
    coef0 = UniformFloatHyperparameter("coef0", 0.0, 10.0, default=0.0)  # poly, sigmoid
    cs.add_hyperparameters([degree, coef0])
    use_degree = InCondition(child=degree, parent=kernel, values=["poly"])
    use_coef0 = InCondition(child=coef0, parent=kernel, values=["poly", "sigmoid"])
    cs.add_conditions([use_degree, use_coef0])

    # This also works for parameters that are a mix of categorical and values from a range of numbers
    # For example, gamma can be either "auto" or a fixed float
    gamma = CategoricalHyperparameter("gamma", ["auto", "value"], default="auto")  # only rbf, poly, sigmoid
    gamma_value = UniformFloatHyperparameter("gamma_value", 0.0001, 8, default=1)
    cs.add_hyperparameters([gamma, gamma_value])
    # We only activate gamma_value if gamma is set to "value"
    cs.add_condition(InCondition(child=gamma_value, parent=gamma, values=["value"]))
    # And again we can restrict the use of gamma in general to the choice of the kernel
    cs.add_condition(InCondition(child=gamma, parent=kernel, values=["rbf", "poly", "sigmoid"]))


    # Scenario object
    scenario = Scenario("test/test_files/svm_scenario.txt")

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = svm_from_cfg(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    # Optimize, using a SMAC-object
    print("Optimizing! Depending on your machine, this might take a few minutes.")
    smac = SMAC(scenario=scenario, rng=np.random.RandomState(42),
            tae_runner=svm_from_cfg)

    incumbent = smac.optimize()
    inc_value = svm_from_cfg(incumbent)

    print("Optimized Value: %.2f" % (inc_value))
    def get_hyperparameter_search_space(**kwargs):
        cs = ConfigurationSpace()
        penalty = CategoricalHyperparameter("penalty", ["l1", "l2", "elasticnet", "none"], default_value='l2')
        solver = CategoricalHyperparameter("solver", ["newton-cg", "lbfgs", "liblinear", "sag", "saga"],
                                           default_value="lbfgs")
        dual = CategoricalHyperparameter("dual", choices=[True, False], default_value=False)
        tol = UniformFloatHyperparameter("tol", lower=1e-7, upper=100., default_value=1.0e-4, log=True)
        C = UniformFloatHyperparameter("C", lower=1e-7, upper=100., default_value=1.0, log=True)
        fit_intercept = CategoricalHyperparameter("fit_intercept", choices=[True, False], default_value=True)
        intercept_scaling = UniformFloatHyperparameter("intercept_scaling", lower=0.0001, upper=2.0, default_value=1.0,
                                                       log=True)
        max_iter = UniformIntegerHyperparameter("max_iter", lower=50, upper=10000, default_value=100)
        multi_class = CategoricalHyperparameter("multi_class", ["ovr", "multinomial", "auto"], default_value="auto")
        l1_ratio = UniformFloatHyperparameter("l1_ratio", lower=0., upper=1., default_value=0.1)

        l1_ratio_condition = InCondition(l1_ratio, penalty, ["elasticnet"])
        dual_condition = AndConjunction(InCondition(dual, penalty, ["l2"]), InCondition(dual, solver, ["liblinear"]))
        cs.add_hyperparameters([penalty, solver, dual, tol, C, fit_intercept, intercept_scaling, max_iter, multi_class,
                                l1_ratio])

        penaltyAndLbfgs = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "lbfgs"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndNewton = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "newton-cg"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndSag = ForbiddenAndConjunction(
            ForbiddenEqualsClause(solver, "sag"),
            ForbiddenInClause(penalty, ["l1", "elasticnet"])
        )
        penaltyAndSaga = ForbiddenAndConjunction(
            ForbiddenInClause(penalty, ["elasticnet"]),
            ForbiddenInClause(solver, ["newton-cg", "lbfgs", "sag"])
        )
        penaltyAndSagaa = ForbiddenAndConjunction(
            ForbiddenInClause(penalty, ["elasticnet", "none"]),
            ForbiddenInClause(solver, ["liblinear"])
        )
        penaltyAndSagaaa = ForbiddenAndConjunction(
            ForbiddenInClause(multi_class, ["multinomial"]),
            ForbiddenInClause(solver, ["liblinear"])
        )

        cs.add_forbidden_clause(penaltyAndLbfgs)
        cs.add_forbidden_clause(penaltyAndNewton)
        cs.add_forbidden_clause(penaltyAndSag)
        cs.add_forbidden_clause(penaltyAndSagaa)
        cs.add_forbidden_clause(penaltyAndSaga)
        cs.add_forbidden_clause(penaltyAndSagaaa)
        cs.add_condition(l1_ratio_condition)
        cs.add_condition(dual_condition)
        return cs
Пример #7
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()

        loss = CategoricalHyperparameter(
            "loss",
            ["squared_loss", "huber", "epsilon_insensitive", "squared_epsilon_insensitive"],
            default_value="squared_loss",
            )
        penalty = CategoricalHyperparameter(
            "penalty", ["l1", "l2", "elasticnet"], default_value="l2")
        alpha = UniformFloatHyperparameter(
            "alpha", 1e-7, 1e-1, log=True, default_value=0.0001)
        l1_ratio = UniformFloatHyperparameter(
            "l1_ratio", 1e-9, 1., log=True, default_value=0.15)
        fit_intercept = UnParametrizedHyperparameter(
            "fit_intercept", "True")
        tol = UniformFloatHyperparameter(
            "tol", 1e-5, 1e-1, default_value=1e-4, log=True)
        epsilon = UniformFloatHyperparameter(
            "epsilon", 1e-5, 1e-1, default_value=0.1, log=True)
        learning_rate = CategoricalHyperparameter(
            "learning_rate", ["optimal", "invscaling", "constant"],
            default_value="invscaling")
        eta0 = UniformFloatHyperparameter(
            "eta0", 1e-7, 1e-1, default_value=0.01, log=True)
        power_t = UniformFloatHyperparameter(
            "power_t", 1e-5, 1, default_value=0.25)
        average = CategoricalHyperparameter(
            "average", ["False", "True"], default_value="False")

        cs.add_hyperparameters([loss, penalty, alpha, l1_ratio, fit_intercept,
                                tol, epsilon, learning_rate, eta0,
                                power_t, average])

        # TODO add passive/aggressive here, although not properly documented?
        elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet")
        epsilon_condition = InCondition(
            epsilon,
            loss,
            ["huber", "epsilon_insensitive", "squared_epsilon_insensitive"],
            )

        # eta0 is only relevant if learning_rate!='optimal' according to code
        # https://github.com/scikit-learn/scikit-learn/blob/0.19.X/sklearn/
        # linear_model/sgd_fast.pyx#L603
        eta0_in_inv_con = InCondition(eta0, learning_rate, ["invscaling",
                                                            "constant"])
        power_t_condition = EqualsCondition(power_t, learning_rate,
                                            "invscaling")

        cs.add_conditions([elasticnet, epsilon_condition, power_t_condition,
                           eta0_in_inv_con])

        return cs
Пример #8
0
    def test_in_condition(self):
        hp1 = CategoricalHyperparameter("parent", range(0, 11))
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        cond_ = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        self.assertEqual(cond, cond_)

        cond_reverse = InCondition(hp1, hp2, [0, 1, 2, 3, 4, 5])
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent in {0, 1, 2, 3, 4, 5}", str(cond))
Пример #9
0
    def get_hyperparameter_search_space(dataset_properties=None):
        # Copied from libsvm_c
        C = UniformFloatHyperparameter(
            name="C", lower=0.03125, upper=32768, log=True, default=1.0)

        kernel = CategoricalHyperparameter(
            name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'],
            default="rbf")
        degree = UniformIntegerHyperparameter(
            name="degree", lower=1, upper=5, default=3)

        # Changed the gamma value to 0.0 (is 0.1 for classification)
        gamma = UniformFloatHyperparameter(
            name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1)

        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter(
            name="coef0", lower=-1, upper=1, default=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter(
            name="shrinking", choices=["True", "False"], default="True")
        tol = UniformFloatHyperparameter(
            name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True)
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        # Random Guess
        epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001,
                                             upper=1, default=0.1, log=True)
        cs = ConfigurationSpace()
        cs.add_hyperparameter(C)
        cs.add_hyperparameter(kernel)
        cs.add_hyperparameter(degree)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(coef0)
        cs.add_hyperparameter(shrinking)
        cs.add_hyperparameter(tol)
        cs.add_hyperparameter(max_iter)
        cs.add_hyperparameter(epsilon)

        degree_depends_on_kernel = InCondition(child=degree, parent=kernel,
                                               values=('poly', 'rbf', 'sigmoid'))
        gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel,
                                              values=('poly', 'rbf'))
        coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel,
                                              values=('poly', 'sigmoid'))
        cs.add_condition(degree_depends_on_kernel)
        cs.add_condition(gamma_depends_on_kernel)
        cs.add_condition(coef0_depends_on_kernel)
        return cs
Пример #10
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = Constant("loss", "auto")
        learning_rate = UniformFloatHyperparameter(name="learning_rate",
                                                   lower=0.01,
                                                   upper=1,
                                                   default_value=0.1,
                                                   log=True)
        max_iter = UniformIntegerHyperparameter("max_iter",
                                                32,
                                                512,
                                                default_value=100)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf",
            lower=1,
            upper=200,
            default_value=20,
            log=True)
        max_depth = UnParametrizedHyperparameter(name="max_depth",
                                                 value="None")
        max_leaf_nodes = UniformIntegerHyperparameter(name="max_leaf_nodes",
                                                      lower=3,
                                                      upper=2047,
                                                      default_value=31,
                                                      log=True)
        max_bins = Constant("max_bins", 256)
        l2_regularization = UniformFloatHyperparameter(
            name="l2_regularization",
            lower=1E-10,
            upper=1,
            default_value=1E-10,
            log=True)
        early_stop = CategoricalHyperparameter(
            name="early_stop",
            choices=["off", "train", "valid"],
            default_value="off")
        tol = UnParametrizedHyperparameter(name="tol", value=1e-7)
        scoring = UnParametrizedHyperparameter(name="scoring", value="loss")
        n_iter_no_change = UniformIntegerHyperparameter(
            name="n_iter_no_change", lower=1, upper=20, default_value=10)
        validation_fraction = UniformFloatHyperparameter(
            name="validation_fraction",
            lower=0.01,
            upper=0.4,
            default_value=0.1)

        cs.add_hyperparameters([
            loss, learning_rate, max_iter, min_samples_leaf, max_depth,
            max_leaf_nodes, max_bins, l2_regularization, early_stop, tol,
            scoring, n_iter_no_change, validation_fraction
        ])

        n_iter_no_change_cond = InCondition(n_iter_no_change, early_stop,
                                            ["valid", "train"])
        validation_fraction_cond = EqualsCondition(validation_fraction,
                                                   early_stop, "valid")

        cs.add_conditions([n_iter_no_change_cond, validation_fraction_cond])

        return cs
Пример #11
0
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True,
                                       default_value=1.0)
        # No linear kernel here, because we have liblinear
        kernel = CategoricalHyperparameter(name="kernel",
                                           choices=["rbf", "poly", "sigmoid"],
                                           default_value="rbf")
        degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
        gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8,
                                           log=True, default_value=0.1)
        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                              default_value="True")
        tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default_value=1e-3,
                                         log=True)
        # cache size is not a hyperparameter, but an argument to the program!
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([C, kernel, degree, gamma, coef0, shrinking,
                                tol, max_iter])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)

        return cs
Пример #12
0
    def add_params(cs: ConfigurationSpace, cutoff: int):
        '''
            adds parameters to ConfigurationSpace

            Arguments
            ---------
            cs: ConfigurationSpace
                configuration space to add new parameters and conditions
            cutoff: int
                maximal possible time for aspeed
        '''

        pre_solving = CategoricalHyperparameter("presolving",
                                                choices=[True, False],
                                                default_value=False)
        cs.add_hyperparameter(pre_solving)
        pre_cutoff = UniformIntegerHyperparameter("pre:cutoff",
                                                  lower=1,
                                                  upper=cutoff,
                                                  default_value=math.ceil(
                                                      cutoff * 0.1),
                                                  log=True)
        cs.add_hyperparameter(pre_cutoff)
        cond = InCondition(child=pre_cutoff, parent=pre_solving, values=[True])
        cs.add_condition(cond)
Пример #13
0
def get_hyperparameter_search_space(seed):

    imputation = CategoricalHyperparameter('imputation__strategy', ['mean', 'median', 'most_frequent'])

    C = UniformFloatHyperparameter("classifier__C", 0.03125, 32768, log=True, default_value=1.0)
    # No linear kernel here, because we have liblinear
    kernel = CategoricalHyperparameter(name="classifier__kernel", choices=["rbf", "poly", "sigmoid"], default_value="rbf")
    degree = UniformIntegerHyperparameter("classifier__degree", 2, 5, default_value=3)
    gamma = UniformFloatHyperparameter("classifier__gamma", 3.0517578125e-05, 8, log=True, default_value=0.1)
    # TODO this is totally ad-hoc
    coef0 = UniformFloatHyperparameter("classifier__coef0", -1, 1, default_value=0)
    # probability is no hyperparameter, but an argument to the SVM algo
    shrinking = CategoricalHyperparameter("classifier__shrinking", [True, False], default_value=True)
    tol = UniformFloatHyperparameter("classifier__tol", 1e-5, 1e-1, default_value=1e-3, log=True)
    # cache size is not a hyperparameter, but an argument to the program!
    max_iter = UnParametrizedHyperparameter("classifier__max_iter", -1)

    cs = ConfigurationSpace('sklearn.svm.SVC', seed)
    cs.add_hyperparameters([imputation, C, kernel, degree, gamma, coef0, shrinking, tol, max_iter])

    degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
    coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
    cs.add_condition(degree_depends_on_poly)
    cs.add_condition(coef0_condition)

    return cs
Пример #14
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = CategoricalHyperparameter(
            "loss", ["ls", "lad", "huber", "quantile"], default_value="ls")
        learning_rate = UniformFloatHyperparameter(
            name="learning_rate", lower=0.01, upper=1, default_value=0.1, log=True)
        n_estimators = UniformIntegerHyperparameter(
            "n_estimators", 50, 500, default_value=100)
        max_depth = UniformIntegerHyperparameter(
            name="max_depth", lower=1, upper=10, default_value=3)
        min_samples_split = UniformIntegerHyperparameter(
            name="min_samples_split", lower=2, upper=20, default_value=2, log=False)
        min_samples_leaf = UniformIntegerHyperparameter(
            name="min_samples_leaf", lower=1, upper=20, default_value=1, log=False)
        min_weight_fraction_leaf = UnParametrizedHyperparameter(
            "min_weight_fraction_leaf", 0.)
        subsample = UniformFloatHyperparameter(
            name="subsample", lower=0.01, upper=1.0, default_value=1.0, log=False)
        max_features = UniformFloatHyperparameter(
            "max_features", 0.1, 1.0, default_value=1)
        max_leaf_nodes = UnParametrizedHyperparameter(
            name="max_leaf_nodes", value="None")
        min_impurity_decrease = UnParametrizedHyperparameter(
            name='min_impurity_decrease', value=0.0)
        alpha = UniformFloatHyperparameter(
            "alpha", lower=0.75, upper=0.99, default_value=0.9)

        cs.add_hyperparameters([loss, learning_rate, n_estimators, max_depth,
                                min_samples_split, min_samples_leaf,
                                min_weight_fraction_leaf, subsample, max_features,
                                max_leaf_nodes, min_impurity_decrease, alpha])

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
Пример #15
0
    def _union_to_config_space(self, name, hp_value):
        union_child = []
        union_config = []
        for union_name, union_hp_value in hp_value.configuration.items():
            unique_union_name = "{}_{}".format(name, union_name)
            if isinstance(union_hp_value,
                          (hyperparams.Bounded, hyperparams.Uniform,
                           hyperparams.UniformInt)):
                child = self._bounded_to_config_space(unique_union_name,
                                                      union_hp_value)
            elif isinstance(
                    union_hp_value,
                (hyperparams.Enumeration, hyperparams.UniformBool)):
                child = self._enumeration_to_config_space(
                    unique_union_name, union_hp_value)
            elif isinstance(union_hp_value, (hyperparams.Constant)):
                child = self._constant_to_config_space(unique_union_name,
                                                       union_hp_value)
            union_child.append(unique_union_name)
            union_config.append(child)

        # params_config = CategoricalHyperparameter(name=name, choices=union_child, default_value=hp_value.get_default())
        params_config = CategoricalHyperparameter(name=name,
                                                  choices=union_child)
        self.cs.add_hyperparameter(params_config)
        [
            self.cs.add_condition(
                InCondition(child=item,
                            parent=params_config,
                            values=[item.name])) for item in union_config
        ]
        return params_config
    def get_hyperparameter_search_space(**kwargs):
        cs = ConfigurationSpace()
        n_components = UniformIntegerHyperparameter('n_components_factor',
                                                    1,
                                                    250,
                                                    default_value=10)
        max_iter = UniformIntegerHyperparameter("max_iter",
                                                10,
                                                2000,
                                                default_value=1000)
        tol = UniformFloatHyperparameter("tol",
                                         1e-5,
                                         1e-1,
                                         default_value=1e-2,
                                         log=True)
        svd_method = CategoricalHyperparameter("svd_method",
                                               ["lapack", "randomized"],
                                               default_value="randomized")
        iterated_power = UniformIntegerHyperparameter("iterated_power",
                                                      1,
                                                      10,
                                                      default_value=3)
        cs.add_hyperparameters(
            [n_components, max_iter, tol, svd_method, iterated_power])

        iterated_power_condition = InCondition(iterated_power, svd_method,
                                               ["randomized"])
        cs.add_condition(iterated_power_condition)

        return cs
Пример #17
0
def make_cs():
    cs = ConfigurationSpace()
    cs.add_hyperparameter(
        UniformIntegerHyperparameter("n_estimators", 1, 30, default=10))

    max_features = CategoricalHyperparameter('max_features', ['auto', 'value'],
                                             default='auto')
    max_features_value = UniformFloatHyperparameter('max_features_value', .1,
                                                    1)
    cs.add_hyperparameters([max_features, max_features_value])
    cs.add_condition(
        InCondition(child=max_features_value,
                    parent=max_features,
                    values=['value']))

    max_depth = CategoricalHyperparameter('max_depth', [None, 'value'])
    max_depth_value = UniformIntegerHyperparameter("max_depth_value", 1, 10)
    cs.add_hyperparameters([max_depth, max_depth_value])
    cs.add_condition(
        InCondition(child=max_depth_value, parent=max_depth, values=['value']))

    min_samples_split = UniformFloatHyperparameter("min_samples_split", .1, 1)
    cs.add_hyperparameter(min_samples_split)

    min_samples_leaf = UniformFloatHyperparameter("min_samples_leaf", .1, .5)
    cs.add_hyperparameter(min_samples_leaf)

    min_weight_fraction_leaf = UniformFloatHyperparameter(
        "min_weight_fraction_leaf", 0, .5)
    cs.add_hyperparameter(min_weight_fraction_leaf)

    max_leaf_nodes = CategoricalHyperparameter('max_leaf_nodes',
                                               [None, 'value'])
    max_leaf_nodes_value = UniformIntegerHyperparameter(
        'max_leaf_nodes_value', 2, 100)
    cs.add_hyperparameters([max_leaf_nodes, max_leaf_nodes_value])
    cs.add_condition(
        InCondition(child=max_leaf_nodes_value,
                    parent=max_leaf_nodes,
                    values=['value']))

    min_impurity_split = UniformFloatHyperparameter('min_impurity_split', 0, 1)
    cs.add_hyperparameter(min_impurity_split)

    bootstrap = CategoricalHyperparameter('bootstrap', [True, False],
                                          default=True)
    cs.add_hyperparameter(bootstrap)
Пример #18
0
    def getPCS(self):
        '''
        maxIter: [1,100]最大迭代次数,默认50
        regParam :[0,0.2] 正则化参数,默认0
        tol:[1e-6,1e-1] 迭代算法收敛性,默认 1e-6
        family ,link, variancePower 对应关系
        •   “gaussian” -> “identity”, “log”, “inverse”
        •   “binomial” -> “logit”, “probit”, “cloglog”
        •   “poisson” -> “log”, “identity”, “sqrt”
        •   “gamma” -> “inverse”, “identity”, “log”
        •   “tweedie” -> power link function specified through “linkPower”.
        The default link power in the tweedie family is 1 - variancePower.


        '''
        # Build Configuration Space which defines all parameters and their
        # ranges
        cs = ConfigurationSpace()
        maxIter = UniformIntegerHyperparameter("maxIter",
                                               1,
                                               100,
                                               default_value=50)
        regParam = UniformFloatHyperparameter("regParam",
                                              0,
                                              0.4,
                                              default_value=1e-04)
        tol = UniformFloatHyperparameter("tol",
                                         1e-06,
                                         1e-01,
                                         default_value=1e-06)
        family = CategoricalHyperparameter("family", ["gaussian", "poisson"],
                                           default_value="gaussian")
        gaussianLink = CategoricalHyperparameter(
            "gaussianLink", ["identity", "log", "inverse"],
            default_value="identity")
        poissonLink = CategoricalHyperparameter("poissonLink",
                                                ["log", "identity", "sqrt"],
                                                default_value="log")
        cs.add_hyperparameters(
            [maxIter, regParam, tol, family, gaussianLink, poissonLink])
        cs.add_condition(
            InCondition(child=gaussianLink, parent=family,
                        values=["gaussian"]))
        cs.add_condition(
            InCondition(child=poissonLink, parent=family, values=["poisson"]))
        return cs
Пример #19
0
def get_complete_configspace():
    """Creates a configspace that includes all kinds of parameters with
    complicated values. The idea is to provide a configspace that can be
    used to check modules using ConfigSpace as a dependency to check
    compatibility with e.g. Constants, log-scale, etc.

    Returns
    -------
    cs: ConfigurationSpace
        cs containing all kinds of parameters
    """
    cs = ConfigurationSpace()

    hp = {}
    # Add Constants for all allowed types ('int', 'float', 'string')
    hp['alpha'] = Constant("alpha", 0.0001)
    hp['tol'] = Constant("tol", '0.0001')
    hp['verbose'] = Constant("verbose", 1)
    # Add numericals
    # Add Floats
    hp['beta1'] = UniformFloatHyperparameter("beta1", 0.85, 0.95, log=False)
    hp['power_t'] = NormalFloatHyperparameter("power_t", mu=0.5, sigma=0.1, log=False)
    # Add Ints
    hp['momentum'] = UniformIntegerHyperparameter("momentum", 0, 100, False)
    hp['beta2'] = NormalIntegerHyperparameter("beta2", mu=1, sigma=0.001, log=False)
    # Add Floats (log)
    hp['learning_rate_init'] = UniformFloatHyperparameter("learning_rate_init", 0.0001, 0.1, log=True)
    hp['random1'] = NormalFloatHyperparameter("NormalFloat", mu=0, sigma=1, default_value=1, log=True)
    # Add Ints (log)
    hp['random2'] = UniformIntegerHyperparameter("UniformInt", 2, 100, log=True)
    hp['random3'] = NormalIntegerHyperparameter("NormalInt", mu=0, sigma=1, default_value=1, log=True)
    # Add Categorical for allowed types
    hp['activation'] = CategoricalHyperparameter('activation', choices=['identity', 'logistic', 'tanh', 'relu'])
    hp['solver'] = CategoricalHyperparameter('solver', choices=[-2, 0, 2])  # corrresponds to: ‘lbfgs’, ‘sgd’, ‘adam’
    hp['batch_size_auto'] = CategoricalHyperparameter('batch_size_auto', choices=[True, False])
    hp['learning_rate'] = CategoricalHyperparameter('learning_rate', choices=[-0.5, 0.0, 0.5])  # corresponds to {‘constant’, ‘invscaling’, ‘adaptive’}
    # Add Ordinal
    hp['batch_size'] = OrdinalHyperparameter('batch_size', sequence=[32, 64.0, '128'])

    for k, v in hp.items():
        cs.add_hyperparameter(v)

    # learning_rate only with sgd
    c = InCondition(hp['learning_rate'], hp['solver'], [0])
    c = EqualsCondition(hp['momentum'], hp['solver'], 0)
    # learning_rate_init only with sgd or adam
    cs.add_condition(OrConjunction(EqualsCondition(hp['learning_rate'], hp['solver'], 0),  # sgd
                                EqualsCondition(hp['learning_rate'], hp['solver'], 2)))  # adam
    # batch_size only with not batch_size_auto
    cs.add_condition(NotEqualsCondition(hp['batch_size'], hp['batch_size_auto'], True))
    # complicated way for solver == sgd
    #cs.add_condition(AndConjunction(LessThanCondition(hp['power_t'], hp['solver'], 1),
    #                                GreaterThanCondition(hp['power_t'], hp['solver'], -1)))
    # betas with adam
    cs.add_condition(EqualsCondition(hp['beta1'], hp['solver'], 2))
    cs.add_condition(EqualsCondition(hp['beta2'], hp['solver'], 2))

    return cs
    def test_write_AndConjunction_condition(self):
        expected = "lp '--lp ' c {mi,bo}\nls '--ls ' c {sa,ca,ny}\ntemp '--temp ' r (0.500000, 1.000000)|  ls  %in%  c(sa,ca)  &&  lp  %in%  c(bo)\n"

        temp = UniformFloatHyperparameter("temp", 0.5, 1)
        ls = CategoricalHyperparameter("ls", ["sa", "ca", "ny"], "sa")
        lp = CategoricalHyperparameter("lp", ["mi", "bo"], "bo")

        cs = ConfigurationSpace()
        cs.add_hyperparameter(temp)
        cs.add_hyperparameter(lp)
        cs.add_hyperparameter(ls)

        c1 = InCondition(temp, ls, ['sa','ca'])
        c2 = InCondition(temp, lp, ['bo'])
        c3 = AndConjunction(c1, c2)
        cs.add_condition(c3)
        value = irace.write(cs)
        self.assertEqual(expected, value)
Пример #21
0
def _construct_in_condition(
    condition: Dict,
    cs: ConfigurationSpace,
) -> InCondition:
    return InCondition(
        child=cs.get_hyperparameter(condition['child']),
        parent=cs.get_hyperparameter(condition['parent']),
        values=condition['values'],
    )
Пример #22
0
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        hidden_layer_depth = UniformIntegerHyperparameter(name="hidden_layer_depth",
                                                          lower=1, upper=3, default_value=1)
        num_nodes_per_layer = UniformIntegerHyperparameter(name="num_nodes_per_layer",
                                                           lower=16, upper=264, default_value=32,
                                                           log=True)
        activation = CategoricalHyperparameter(name="activation", choices=['tanh', 'relu'],
                                               default_value='tanh')
        alpha = UniformFloatHyperparameter(name="alpha", lower=1e-7, upper=1e-1, default_value=1e-4,
                                           log=True)

        learning_rate_init = UniformFloatHyperparameter(name="learning_rate_init",
                                                        lower=1e-4, upper=0.5, default_value=1e-3,
                                                        log=True)

        # Not allowing to turn off early stopping
        early_stopping = CategoricalHyperparameter(name="early_stopping",
                                                   choices=["valid", "train"],  # , "off"],
                                                   default_value="valid")
        # Constants
        n_iter_no_change = Constant(name="n_iter_no_change", value=32)  # default=10 is too low
        validation_fraction = Constant(name="validation_fraction", value=0.1)
        tol = UnParametrizedHyperparameter(name="tol", value=1e-4)
        solver = Constant(name="solver", value='adam')

        # Relying on sklearn defaults for now
        batch_size = UnParametrizedHyperparameter(name="batch_size", value="auto")
        shuffle = UnParametrizedHyperparameter(name="shuffle", value="True")
        beta_1 = UnParametrizedHyperparameter(name="beta_1", value=0.9)
        beta_2 = UnParametrizedHyperparameter(name="beta_2", value=0.999)
        epsilon = UnParametrizedHyperparameter(name="epsilon", value=1e-8)

        # Not used
        # solver=["sgd", "lbfgs"] --> not used to keep searchspace simpler
        # learning_rate --> only used when using solver=sgd
        # power_t --> only used when using solver=sgd & learning_rate=invscaling
        # momentum --> only used when solver=sgd
        # nesterovs_momentum --> only used when solver=sgd
        # max_fun --> only used when solver=lbfgs
        # activation=["identity", "logistic"] --> not useful for classification

        cs.add_hyperparameters([hidden_layer_depth, num_nodes_per_layer,
                                activation, alpha,
                                learning_rate_init, early_stopping,
                                n_iter_no_change, validation_fraction, tol,
                                solver, batch_size, shuffle,
                                beta_1, beta_2, epsilon])

        validation_fraction_cond = InCondition(validation_fraction, early_stopping, ["valid"])
        cs.add_conditions([validation_fraction_cond])
        # We always use early stopping
        # n_iter_no_change_cond = InCondition(n_iter_no_change, early_stopping, ["valid", "train"])
        # tol_cond = InCondition(n_iter_no_change, early_stopping, ["valid", "train"])
        # cs.add_conditions([n_iter_no_change_cond, tol_cond])

        return cs
Пример #23
0
    def test_in_condition(self):
        hp1 = CategoricalHyperparameter("parent", range(0, 11))
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cond = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        cond_ = InCondition(hp2, hp1, [0, 1, 2, 3, 4, 5])
        self.assertEqual(cond, cond_)

        # Test vector value:
        self.assertEqual(
            cond.vector_values,
            [hp1._inverse_transform(i) for i in [0, 1, 2, 3, 4, 5]])
        self.assertEqual(cond.vector_values, cond_.vector_values)

        cond_reverse = InCondition(hp1, hp2, [0, 1, 2, 3, 4, 5])
        self.assertNotEqual(cond, cond_reverse)

        self.assertNotEqual(cond, dict())

        self.assertEqual("child | parent in {0, 1, 2, 3, 4, 5}", str(cond))
Пример #24
0
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        selector = cs.get_hyperparameter("selector")
        regressor = cs.get_hyperparameter("regressor")
        if "PairwiseRegressor" in selector.choices:
            cond = InCondition(child=regressor, parent=selector, values=["PairwiseRegressor"])
            cs.add_condition(cond)
Пример #25
0
def get_libsvm_svc_default_search_space():
    classif_prefix = "classifier:libsvm_svc:"

    model_type = CategoricalHyperparameter('classifier:__choice__',
                                           ['libsvm_svc'])
    imputation = CategoricalHyperparameter('imputation:strategy',
                                           ['mean', 'median', 'most_frequent'])

    C = UniformFloatHyperparameter(classif_prefix + "C",
                                   0.03125,
                                   32768,
                                   log=True,
                                   default_value=1.0)
    # No linear kernel here, because we have liblinear
    kernel = CategoricalHyperparameter(name=classif_prefix + "kernel",
                                       choices=["rbf", "poly", "sigmoid"],
                                       default_value="rbf")
    degree = UniformIntegerHyperparameter(classif_prefix + "degree",
                                          2,
                                          5,
                                          default_value=3)
    gamma = UniformFloatHyperparameter(classif_prefix + "gamma",
                                       3.0517578125e-05,
                                       8,
                                       log=True,
                                       default_value=0.1)
    # TODO this is totally ad-hoc
    coef0 = UniformFloatHyperparameter(classif_prefix + "coef0",
                                       -1,
                                       1,
                                       default_value=0)
    # probability is no hyperparameter, but an argument to the SVM algo
    shrinking = CategoricalHyperparameter(classif_prefix + "shrinking",
                                          ["True", "False"],
                                          default_value="True")
    tol = UniformFloatHyperparameter(classif_prefix + "tol",
                                     1e-5,
                                     1e-1,
                                     default_value=1e-3,
                                     log=True)
    # cache size is not a hyperparameter, but an argument to the program!
    max_iter = UnParametrizedHyperparameter(classif_prefix + "max_iter", -1)

    cs = ConfigurationSpace()
    cs.add_hyperparameters([
        model_type, imputation, C, kernel, degree, gamma, coef0, shrinking,
        tol, max_iter
    ])

    degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
    coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
    cs.add_condition(degree_depends_on_poly)
    cs.add_condition(coef0_condition)

    return cs
    def get_hyperparameter_search_space(dataset_properties=None):
        cs = ConfigurationSpace()
        loss = cs.add_hyperparameter(
            CategoricalHyperparameter("loss",
                                      ["ls", "lad", "huber", "quantile"],
                                      default="ls"))
        learning_rate = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="learning_rate",
                                       lower=0.01,
                                       upper=1,
                                       default=0.1,
                                       log=True))
        n_estimators = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="n_estimators",
                                         lower=50,
                                         upper=500,
                                         default=100))
        max_depth = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="max_depth",
                                         lower=1,
                                         upper=10,
                                         default=3))
        min_samples_split = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_split",
                                         lower=2,
                                         upper=20,
                                         default=2,
                                         log=False))
        min_samples_leaf = cs.add_hyperparameter(
            UniformIntegerHyperparameter(name="min_samples_leaf",
                                         lower=1,
                                         upper=20,
                                         default=1,
                                         log=False))
        min_weight_fraction_leaf = cs.add_hyperparameter(
            UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.))
        subsample = cs.add_hyperparameter(
            UniformFloatHyperparameter(name="subsample",
                                       lower=0.01,
                                       upper=1.0,
                                       default=1.0,
                                       log=False))
        max_features = cs.add_hyperparameter(
            UniformFloatHyperparameter("max_features", 0.5, 5, default=1))
        max_leaf_nodes = cs.add_hyperparameter(
            UnParametrizedHyperparameter(name="max_leaf_nodes", value="None"))
        alpha = cs.add_hyperparameter(
            UniformFloatHyperparameter("alpha",
                                       lower=0.75,
                                       upper=0.99,
                                       default=0.9))

        cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile']))
        return cs
Пример #27
0
    def get_hyperparameter_search_space(dataset_properties=None):
        C = UniformFloatHyperparameter(
            name="C", lower=0.03125, upper=32768, log=True, default_value=1.0)
        # Random Guess
        epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001,
                                             upper=1, default_value=0.1,
                                             log=True)

        kernel = CategoricalHyperparameter(
            name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'],
            default_value="rbf")
        degree = UniformIntegerHyperparameter(
            name="degree", lower=2, upper=5, default_value=3)

        gamma = UniformFloatHyperparameter(
            name="gamma", lower=3.0517578125e-05, upper=8, log=True, default_value=0.1)

        # TODO this is totally ad-hoc
        coef0 = UniformFloatHyperparameter(
            name="coef0", lower=-1, upper=1, default_value=0)
        # probability is no hyperparameter, but an argument to the SVM algo
        shrinking = CategoricalHyperparameter(
            name="shrinking", choices=["True", "False"], default_value="True")
        tol = UniformFloatHyperparameter(
            name="tol", lower=1e-5, upper=1e-1, default_value=1e-3, log=True)
        max_iter = UnParametrizedHyperparameter("max_iter", -1)

        cs = ConfigurationSpace()
        cs.add_hyperparameters([C, kernel, degree, gamma, coef0, shrinking,
                               tol, max_iter, epsilon])

        degree_depends_on_kernel = InCondition(child=degree, parent=kernel,
                                               values=('poly', 'rbf', 'sigmoid'))
        gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel,
                                              values=('poly', 'rbf'))
        coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel,
                                              values=('poly', 'sigmoid'))
        cs.add_conditions([degree_depends_on_kernel, gamma_depends_on_kernel,
                           coef0_depends_on_kernel])

        return cs
    def add_params(cs: ConfigurationSpace):
        '''
            adds parameters to ConfigurationSpace 
        '''

        selector = cs.get_hyperparameter("selector")
        classifier = cs.get_hyperparameter("classifier")
        if "MultiClassifier" in selector.choices:
            cond = InCondition(child=classifier,
                               parent=selector,
                               values=["MultiClassifier"])
            cs.add_condition(cond)
Пример #29
0
def main_loop(problem):
    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

    cs = ConfigurationSpace()

    n_neighbors = UniformIntegerHyperparameter("n_neighbors", 2,10, default_value=5)
    cs.add_hyperparameter(n_neighbors)

    weights = CategoricalHyperparameter("weights", ["uniform","distance"], default_value="uniform")
    algorithm = CategoricalHyperparameter("algorithm", ["ball_tree", "kd_tree","brute","auto"], default_value="auto")
    cs.add_hyperparameters([weights, algorithm])

    leaf_size = UniformIntegerHyperparameter("leaf_size", 1, 100, default_value=50)
    cs.add_hyperparameter(leaf_size)
    use_leaf_size= InCondition(child=leaf_size, parent=algorithm, values=["ball_tree","kd_tree"])
    cs.add_condition(use_leaf_size)

    p = UniformIntegerHyperparameter("p", 1,3, default_value=2)
    cs.add_hyperparameter(p)

    # Scenario object
    max_eval=100000
    scenario = Scenario({"run_obj": "quality",   # we optimize quality (alternatively runtime)
                         "runcount-limit": max_eval,  # maximum function evaluations
                         "cs": cs,                        # configuration space
                         "shared_model": True,
                         "output_dir": "/home/naamah/Documents/CatES/result_All/smac/KNN/run_{}_{}_{}".format(max_eval,
                                                                                                           datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d_%H:%M:%S'),
                                                                                                              problem)

                         # "output_dir": "/home/naamah/Documents/CatES/result_All/smac/KNN/{}/run_{}_{}".format(problem,max_eval, datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d_%H:%M:%S_%f')),
                         # "input_psmac_dirs":"/home/naamah/Documents/CatES/result_All/",
                         # "deterministic": "true"
                         })

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = svm_from_cfg(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    # Optimize, using a SMAC-object
    print("Optimizing! Depending on your machine, this might take a few minutes.")
    smac = SMAC(scenario=scenario,tae_runner=svm_from_cfg)

    incumbent = smac.optimize()

    inc_value = svm_from_cfg(incumbent)
    print("Optimized Value: %.2f" % (inc_value))

    return (incumbent)


# main_loop()
Пример #30
0
    def config_space(self):
        """SVC hyperparameter space."""

        C_param = UniformFloatHyperparameter(
            'C', lower=1e-8, upper=100.0, default_value=1.0
        )
        shrinking = CategoricalHyperparameter(
            'shrinking', [True, False], default_value=True
        )
        kernel = CategoricalHyperparameter(
            'kernel', ['linear', 'rbf', 'poly', 'sigmoid'],
        )
        degree = UniformIntegerHyperparameter(
            'degree', lower=1, upper=5, default_value=2
        )
        coef0 = UniformFloatHyperparameter(
            'coef0', lower=0.0, upper=10.0, default_value=0.0
        )
        # Add hyperparameters to config space.
        config = ConfigurationSpace()
        config.seed(self.random_state)
        config.add_hyperparameters(
            (
                C_param,
                shrinking,
                kernel,
                degree,
                coef0,
            )
        )
        # Conditionals on hyperparameters specific to kernels.
        config.add_conditions(
            (
                InCondition(child=degree, parent=kernel, values=['poly']),
                InCondition(
                    child=coef0, parent=kernel, values=['poly', 'sigmoid']
                )
            )
        )
        return config