Пример #1
0
    def set_optimizer_space(cs: ConfigurationSpace):
        '''
        Set hyperparameters for optimizers
        '''
        optimizer = CategoricalHyperparameter('optimizer', ['SGD', 'Adam'],
                                              default_value='Adam')
        sgd_lr = UniformFloatHyperparameter('sgd_lr',
                                            0.00001,
                                            0.1,
                                            default_value=0.005,
                                            log=True)  # log scale
        sgd_decay = UniformFloatHyperparameter('sgd_decay',
                                               0.0001,
                                               0.1,
                                               default_value=0.05,
                                               log=True)  # log scale
        sgd_momentum = UniformFloatHyperparameter('sgd_momentum',
                                                  0.3,
                                                  0.99,
                                                  default_value=0.9)
        adam_lr = UniformFloatHyperparameter('adam_lr',
                                             0.00001,
                                             0.1,
                                             default_value=0.005,
                                             log=True)  # log scale
        adam_decay = UniformFloatHyperparameter('adam_decay',
                                                0.0001,
                                                0.1,
                                                default_value=0.05,
                                                log=True)  # log scale

        sgd_lr_cond = InCondition(child=sgd_lr,
                                  parent=optimizer,
                                  values=['SGD'])
        sgd_decay_cond = InCondition(child=sgd_decay,
                                     parent=optimizer,
                                     values=['SGD'])
        sgd_momentum_cond = InCondition(child=sgd_momentum,
                                        parent=optimizer,
                                        values=['SGD'])
        adam_lr_cond = InCondition(child=adam_lr,
                                   parent=optimizer,
                                   values=['Adam'])
        adam_decay_cond = InCondition(child=adam_decay,
                                      parent=optimizer,
                                      values=['Adam'])

        cs.add_hyperparameters(
            [optimizer, sgd_lr, sgd_decay, sgd_momentum, adam_lr, adam_decay])
        cs.add_conditions([
            sgd_lr_cond, sgd_decay_cond, sgd_momentum_cond, adam_lr_cond,
            adam_decay_cond
        ])
Пример #2
0
    def test_sample_configuration(self):
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cs.add_hyperparameter(hp2)
        cond1 = EqualsCondition(hp2, hp1, 0)
        cs.add_condition(cond1)
        # This automatically checks the configuration!
        Configuration(cs, dict(parent=0, child=5))

        # and now for something more complicated
        cs = ConfigurationSpace(seed=1)
        hp1 = CategoricalHyperparameter("input1", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = CategoricalHyperparameter("input2", [0, 1])
        cs.add_hyperparameter(hp2)
        hp3 = CategoricalHyperparameter("input3", [0, 1])
        cs.add_hyperparameter(hp3)
        hp4 = CategoricalHyperparameter("input4", [0, 1])
        cs.add_hyperparameter(hp4)
        hp5 = CategoricalHyperparameter("input5", [0, 1])
        cs.add_hyperparameter(hp5)
        hp6 = Constant("AND", "True")
        cs.add_hyperparameter(hp6)

        cond1 = EqualsCondition(hp6, hp1, 1)
        cond2 = NotEqualsCondition(hp6, hp2, 1)
        cond3 = InCondition(hp6, hp3, [1])
        cond4 = EqualsCondition(hp5, hp3, 1)
        cond5 = EqualsCondition(hp4, hp5, 1)
        cond6 = EqualsCondition(hp6, hp4, 1)
        cond7 = EqualsCondition(hp6, hp5, 1)

        conj1 = AndConjunction(cond1, cond2)
        conj2 = OrConjunction(conj1, cond3)
        conj3 = AndConjunction(conj2, cond6, cond7)
        cs.add_condition(cond4)
        cs.add_condition(cond5)
        cs.add_condition(conj3)

        samples = []
        for i in range(5):
            cs.seed(1)
            samples.append([])
            for j in range(100):
                sample = cs.sample_configuration()
                samples[-1].append(sample)

            if i > 0:
                for j in range(100):
                    self.assertEqual(samples[-1][j], samples[-2][j])
Пример #3
0
    def test_get_hyperparameters_topological_sort(self):
        # and now for something more complicated
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("input1", [0, 1])
        hp2 = CategoricalHyperparameter("input2", [0, 1])
        hp3 = CategoricalHyperparameter("input3", [0, 1])
        hp4 = CategoricalHyperparameter("input4", [0, 1])
        hp5 = CategoricalHyperparameter("input5", [0, 1])
        hp6 = Constant("AND", "True")
        # More top-level hyperparameters
        hp7 = CategoricalHyperparameter("input7", [0, 1])
        # Somewhat shuffled
        hyperparameters = [hp1, hp2, hp3, hp4, hp5, hp6, hp7]

        for hp in hyperparameters:
            cs.add_hyperparameter(hp)

        cond1 = EqualsCondition(hp6, hp1, 1)
        cond2 = NotEqualsCondition(hp6, hp2, 1)
        cond3 = InCondition(hp6, hp3, [1])
        cond4 = EqualsCondition(hp5, hp3, 1)
        cond5 = EqualsCondition(hp4, hp5, 1)
        cond6 = EqualsCondition(hp6, hp4, 1)
        cond7 = EqualsCondition(hp6, hp5, 1)

        conj1 = AndConjunction(cond1, cond2)
        conj2 = OrConjunction(conj1, cond3)
        conj3 = AndConjunction(conj2, cond6, cond7)

        cs.add_condition(cond4)
        hps = cs.get_hyperparameters()
        # AND is moved to the front because of alphabetical sorting
        for hp, idx in zip(hyperparameters, [1, 2, 3, 4, 6, 0, 5]):
            self.assertEqual(hps.index(hp), idx)
            self.assertEqual(cs._hyperparameter_idx[hp.name], idx)
            self.assertEqual(cs._idx_to_hyperparameter[idx], hp.name)

        cs.add_condition(cond5)
        hps = cs.get_hyperparameters()
        for hp, idx in zip(hyperparameters, [1, 2, 3, 6, 5, 0, 4]):
            self.assertEqual(hps.index(hp), idx)
            self.assertEqual(cs._hyperparameter_idx[hp.name], idx)
            self.assertEqual(cs._idx_to_hyperparameter[idx], hp.name)

        cs.add_condition(conj3)
        hps = cs.get_hyperparameters()
        # print(hps, hyperparameters)
        for hp, idx in zip(hyperparameters, [0, 1, 2, 5, 4, 6, 3]):
            # print(hp, idx)
            self.assertEqual(hps.index(hp), idx)
            self.assertEqual(cs._hyperparameter_idx[hp.name], idx)
        self.assertEqual(cs._idx_to_hyperparameter[idx], hp.name)
Пример #4
0
    def test_sample_configuration_with_or_conjunction(self):
        cs = ConfigurationSpace(seed=1)

        hyper_params = {}
        hyper_params["hp5"] = CategoricalHyperparameter("hp5", ['0', '1', '2'])
        hyper_params["hp7"] = CategoricalHyperparameter("hp7", ['3', '4', '5'])
        hyper_params["hp8"] = CategoricalHyperparameter("hp8", ['6', '7', '8'])
        for key in hyper_params:
            cs.add_hyperparameter(hyper_params[key])

        cs.add_condition(
            InCondition(hyper_params["hp5"], hyper_params["hp8"], ['6']))

        cs.add_condition(
            OrConjunction(
                InCondition(hyper_params["hp7"], hyper_params["hp8"], ['7']),
                InCondition(hyper_params["hp7"], hyper_params["hp5"], ['1'])))

        for cfg, fixture in zip(cs.sample_configuration(10),
                                [[1, np.NaN, 2], [0, 2, np.NaN], [0, 1, 1],
                                 [1, np.NaN, 2], [1, np.NaN, 2]]):
            np.testing.assert_array_almost_equal(cfg.get_array(), fixture)
Пример #5
0
 def __condition(self, item: Dict, store: Dict):
     child = item["_child"]
     child = store[child]
     parent = item["_parent"]
     parent = store[parent]
     value = (item["_values"])
     if (isinstance(value, list) and len(value) == 1):
         value = value[0]
     if isinstance(value, list):
         cond = InCondition(child, parent, list(map(hp_def._encode, value)))
     else:
         cond = EqualsCondition(child, parent, hp_def._encode(value))
     return cond
Пример #6
0
 def __condition(self, item: Dict, store: Dict, leader_model):
     child = add_leader_model(item["_child"], leader_model,
                              SERIES_CONNECT_LEADER_TOKEN)
     child = store[child]
     parent = add_leader_model(item["_parent"], leader_model,
                               SERIES_CONNECT_LEADER_TOKEN)
     parent = store[parent]
     value = (item["_values"])
     if (isinstance(value, list) and len(value) == 1):
         value = value[0]
     if isinstance(value, list):
         cond = InCondition(child, parent, list(map(smac_hdl._encode,
                                                    value)))
     else:
         cond = EqualsCondition(child, parent, smac_hdl._encode(value))
     return cond
Пример #7
0
def string2condition(cond_desc: str, hp_dict: dict):
    # Support EqualCondition and InCondition
    pattern_in = r'(.*?)\sin\s(.*?)}'
    pattern_equal = r'(.*?)\s==\s(.*)'
    matchobj_equal = re.match(pattern_equal, cond_desc)
    matchobj_in = re.match(pattern_in, cond_desc)
    if matchobj_equal:
        two_elements = matchobj_equal.group(1).split('|')
        child_name = two_elements[0][4:-1]
        parent_name = two_elements[1][1:]
        target_value = matchobj_equal.group(2)[1:-1]
        cond = EqualsCondition(hp_dict[child_name], hp_dict[parent_name], target_value)
    elif matchobj_in:
        two_elements = matchobj_in.group(1).split('|')
        child_name = two_elements[0][4:-1]
        parent_name = two_elements[1][1:]
        choice_str = matchobj_in.group(2).split(',')
        choices = [choice[2:-1] for choice in choice_str]
        cond = InCondition(hp_dict[child_name], hp_dict[parent_name], choices)
    else:
        raise ValueError("Unsupported condition type in config_space!")
    return cond
Пример #8
0
    def get_hyperparameter_search_space():
        C = UniformFloatHyperparameter("C",
                                       0.03125,
                                       32768,
                                       log=True,
                                       default_value=1.0)
        # No linear kernel here, because we have liblinear
        kernel = CategoricalHyperparameter(name="kernel",
                                           choices=["rbf", "poly", "sigmoid"],
                                           default_value="rbf")
        degree = UniformIntegerHyperparameter("degree", 2, 5, default_value=3)
        gamma = UniformFloatHyperparameter("gamma",
                                           3.0517578125e-05,
                                           8,
                                           log=True,
                                           default_value=0.1)
        coef0 = UniformFloatHyperparameter("coef0", -1, 1, default_value=0)
        shrinking = CategoricalHyperparameter("shrinking", ["True", "False"],
                                              default_value="True")
        tol = UniformFloatHyperparameter("tol",
                                         1e-5,
                                         1e-1,
                                         default_value=1e-3,
                                         log=True)
        # cache size is not a hyperparameter, but an argument to the program!
        max_iter = UnParametrizedHyperparameter("max_iter", 10000)

        cs = ConfigurationSpace()
        cs.add_hyperparameters(
            [C, kernel, degree, gamma, coef0, shrinking, tol, max_iter])

        degree_depends_on_poly = EqualsCondition(degree, kernel, "poly")
        coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"])
        cs.add_condition(degree_depends_on_poly)
        cs.add_condition(coef0_condition)

        return cs
Пример #9
0
    def test_check_configuration(self):
        # TODO this is only a smoke test
        # TODO actually, this rather tests the evaluate methods in the
        # conditions module!
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("parent", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = UniformIntegerHyperparameter("child", 0, 10)
        cs.add_hyperparameter(hp2)
        cond1 = EqualsCondition(hp2, hp1, 0)
        cs.add_condition(cond1)
        # This automatically checks the configuration!
        Configuration(cs, dict(parent=0, child=5))

        # and now for something more complicated
        cs = ConfigurationSpace()
        hp1 = CategoricalHyperparameter("input1", [0, 1])
        cs.add_hyperparameter(hp1)
        hp2 = CategoricalHyperparameter("input2", [0, 1])
        cs.add_hyperparameter(hp2)
        hp3 = CategoricalHyperparameter("input3", [0, 1])
        cs.add_hyperparameter(hp3)
        hp4 = CategoricalHyperparameter("input4", [0, 1])
        cs.add_hyperparameter(hp4)
        hp5 = CategoricalHyperparameter("input5", [0, 1])
        cs.add_hyperparameter(hp5)
        hp6 = Constant("AND", "True")
        cs.add_hyperparameter(hp6)

        cond1 = EqualsCondition(hp6, hp1, 1)
        cond2 = NotEqualsCondition(hp6, hp2, 1)
        cond3 = InCondition(hp6, hp3, [1])
        cond4 = EqualsCondition(hp6, hp4, 1)
        cond5 = EqualsCondition(hp6, hp5, 1)

        conj1 = AndConjunction(cond1, cond2)
        conj2 = OrConjunction(conj1, cond3)
        conj3 = AndConjunction(conj2, cond4, cond5)
        cs.add_condition(conj3)

        expected_outcomes = [
            False, False, False, False, False, False, False, True, False,
            False, False, False, False, False, False, True, False, False,
            False, True, False, False, False, True, False, False, False, False,
            False, False, False, True
        ]

        for idx, values in enumerate(product([0, 1], repeat=5)):
            # The hyperparameters aren't sorted, but the test assumes them to
            #  be sorted.
            hyperparameters = sorted(cs.get_hyperparameters(),
                                     key=lambda t: t.name)
            instantiations = {
                hyperparameters[jdx + 1].name: values[jdx]
                for jdx in range(len(values))
            }

            evaluation = conj3.evaluate(instantiations)
            self.assertEqual(expected_outcomes[idx], evaluation)

            if not evaluation:
                self.assertRaisesRegex(
                    ValueError,
                    r"Inactive hyperparameter 'AND' must "
                    r"not be specified, but has the vector value: "
                    r"'0.0'.",
                    Configuration,
                    cs,
                    values={
                        "input1": values[0],
                        "input2": values[1],
                        "input3": values[2],
                        "input4": values[3],
                        "input5": values[4],
                        "AND": "True",
                    },
                )
            else:
                Configuration(
                    cs,
                    values={
                        "input1": values[0],
                        "input2": values[1],
                        "input3": values[2],
                        "input4": values[3],
                        "input5": values[4],
                        "AND": "True",
                    },
                )
Пример #10
0
    def get_hyperparameter_search_space(dataset_properties=None):
        max_num_layers = 7  # Maximum number of layers coded

        # Hacky way to condition layers params based on the number of layers
        # 'c'=1, 'd'=2, 'e'=3 ,'f'=4', g ='5', h='6' + output_layer
        layer_choices = [
            chr(i) for i in range(ord('c'),
                                  ord('b') + max_num_layers)
        ]

        batch_size = UniformIntegerHyperparameter("batch_size",
                                                  32,
                                                  4096,
                                                  log=True,
                                                  default=32)

        number_epochs = UniformIntegerHyperparameter("number_epochs",
                                                     2,
                                                     80,
                                                     default=5)

        num_layers = CategoricalHyperparameter("num_layers",
                                               choices=layer_choices,
                                               default='c')

        lr = UniformFloatHyperparameter("learning_rate",
                                        1e-6,
                                        1.0,
                                        log=True,
                                        default=0.01)

        l2 = UniformFloatHyperparameter("lambda2",
                                        1e-7,
                                        1e-2,
                                        log=True,
                                        default=1e-4)

        dropout_output = UniformFloatHyperparameter("dropout_output",
                                                    0.0,
                                                    0.99,
                                                    default=0.5)

        # Define basic hyperparameters and define the config space
        # basic means that are independent from the number of layers

        cs = ConfigurationSpace()
        cs.add_hyperparameter(number_epochs)
        cs.add_hyperparameter(batch_size)
        cs.add_hyperparameter(num_layers)
        cs.add_hyperparameter(lr)
        cs.add_hyperparameter(l2)
        cs.add_hyperparameter(dropout_output)

        #  Define parameters with different child parameters and conditions
        solver_choices = [
            "adam", "adadelta", "adagrad", "sgd", "momentum", "nesterov",
            "smorm3s"
        ]

        solver = CategoricalHyperparameter(name="solver",
                                           choices=solver_choices,
                                           default="smorm3s")

        beta1 = UniformFloatHyperparameter("beta1",
                                           1e-4,
                                           0.1,
                                           log=True,
                                           default=0.1)

        beta2 = UniformFloatHyperparameter("beta2",
                                           1e-4,
                                           0.1,
                                           log=True,
                                           default=0.01)

        rho = UniformFloatHyperparameter("rho",
                                         0.05,
                                         0.99,
                                         log=True,
                                         default=0.95)

        momentum = UniformFloatHyperparameter("momentum",
                                              0.3,
                                              0.999,
                                              default=0.9)

        # TODO: Add policy based on this sklearn sgd
        policy_choices = ['fixed', 'inv', 'exp', 'step']

        lr_policy = CategoricalHyperparameter(name="lr_policy",
                                              choices=policy_choices,
                                              default='fixed')

        gamma = UniformFloatHyperparameter(name="gamma",
                                           lower=1e-3,
                                           upper=1e-1,
                                           default=1e-2)

        power = UniformFloatHyperparameter("power", 0.0, 1.0, default=0.5)

        epoch_step = UniformIntegerHyperparameter("epoch_step",
                                                  2,
                                                  20,
                                                  default=5)

        cs.add_hyperparameter(solver)
        cs.add_hyperparameter(beta1)
        cs.add_hyperparameter(beta2)
        cs.add_hyperparameter(momentum)
        cs.add_hyperparameter(rho)
        cs.add_hyperparameter(lr_policy)
        cs.add_hyperparameter(gamma)
        cs.add_hyperparameter(power)
        cs.add_hyperparameter(epoch_step)

        # Define parameters that are needed it for each layer
        output_activation_choices = ['softmax', 'sigmoid', 'softplus', 'tanh']

        activations_choices = [
            'sigmoid', 'tanh', 'scaledTanh', 'elu', 'relu', 'leaky', 'linear'
        ]

        weight_choices = [
            'constant', 'normal', 'uniform', 'glorot_normal', 'glorot_uniform',
            'he_normal', 'he_uniform', 'ortogonal', 'sparse'
        ]

        # Iterate over parameters that are used in each layer
        for i in range(1, max_num_layers):
            layer_units = UniformIntegerHyperparameter("num_units_layer_" +
                                                       str(i),
                                                       64,
                                                       4096,
                                                       log=True,
                                                       default=128)
            cs.add_hyperparameter(layer_units)
            layer_dropout = UniformFloatHyperparameter("dropout_layer_" +
                                                       str(i),
                                                       0.0,
                                                       0.99,
                                                       default=0.5)
            cs.add_hyperparameter(layer_dropout)
            weight_initialization = CategoricalHyperparameter(
                'weight_init_' + str(i),
                choices=weight_choices,
                default='he_normal')
            cs.add_hyperparameter(weight_initialization)
            layer_std = UniformFloatHyperparameter("std_layer_" + str(i),
                                                   1e-6,
                                                   0.1,
                                                   log=True,
                                                   default=0.005)
            cs.add_hyperparameter(layer_std)
            layer_activation = CategoricalHyperparameter(
                "activation_layer_" + str(i),
                choices=activations_choices,
                default="relu")
            cs.add_hyperparameter(layer_activation)
            layer_leakiness = UniformFloatHyperparameter('leakiness_layer_' +
                                                         str(i),
                                                         0.01,
                                                         0.99,
                                                         default=0.3)

            cs.add_hyperparameter(layer_leakiness)
            layer_tanh_alpha = UniformFloatHyperparameter('tanh_alpha_layer_' +
                                                          str(i),
                                                          0.5,
                                                          1.0,
                                                          default=2. / 3.)
            cs.add_hyperparameter(layer_tanh_alpha)
            layer_tanh_beta = UniformFloatHyperparameter('tanh_beta_layer_' +
                                                         str(i),
                                                         1.1,
                                                         3.0,
                                                         log=True,
                                                         default=1.7159)
            cs.add_hyperparameter(layer_tanh_beta)

        # TODO: Could be in a function in a new module
        for i in range(2, max_num_layers):
            # Condition layers parameter on layer choice
            layer_unit_param = cs.get_hyperparameter("num_units_layer_" +
                                                     str(i))
            layer_cond = InCondition(child=layer_unit_param,
                                     parent=num_layers,
                                     values=[l for l in layer_choices[i - 1:]])
            cs.add_condition(layer_cond)
            # Condition dropout parameter on layer choice
            layer_dropout_param = cs.get_hyperparameter("dropout_layer_" +
                                                        str(i))
            layer_cond = InCondition(child=layer_dropout_param,
                                     parent=num_layers,
                                     values=[l for l in layer_choices[i - 1:]])
            cs.add_condition(layer_cond)
            # Condition weight initialization on layer choice
            layer_weight_param = cs.get_hyperparameter("weight_init_" + str(i))
            layer_cond = InCondition(child=layer_weight_param,
                                     parent=num_layers,
                                     values=[l for l in layer_choices[i - 1:]])
            cs.add_condition(layer_cond)
            # Condition std parameter on weight layer initialization choice
            layer_std_param = cs.get_hyperparameter("std_layer_" + str(i))
            weight_cond = EqualsCondition(child=layer_std_param,
                                          parent=layer_weight_param,
                                          value='normal')
            cs.add_condition(weight_cond)
            # Condition activation parameter on layer choice
            layer_activation_param = cs.get_hyperparameter(
                "activation_layer_" + str(i))
            layer_cond = InCondition(child=layer_activation_param,
                                     parent=num_layers,
                                     values=[l for l in layer_choices[i - 1:]])
            cs.add_condition(layer_cond)
            # Condition leakiness on activation choice
            layer_leakiness_param = cs.get_hyperparameter("leakiness_layer_" +
                                                          str(i))
            activation_cond = EqualsCondition(child=layer_leakiness_param,
                                              parent=layer_activation_param,
                                              value='leaky')
            cs.add_condition(activation_cond)
            # Condition tanh on activation choice
            layer_tanh_alpha_param = cs.get_hyperparameter(
                "tanh_alpha_layer_" + str(i))
            activation_cond = EqualsCondition(child=layer_tanh_alpha_param,
                                              parent=layer_activation_param,
                                              value='scaledTanh')
            cs.add_condition(activation_cond)
            layer_tanh_beta_param = cs.get_hyperparameter("tanh_beta_layer_" +
                                                          str(i))
            activation_cond = EqualsCondition(child=layer_tanh_beta_param,
                                              parent=layer_activation_param,
                                              value='scaledTanh')
            cs.add_condition(activation_cond)

        # Conditioning on solver
        momentum_depends_on_solver = InCondition(
            momentum, solver, values=["momentum", "nesterov"])
        beta1_depends_on_solver = EqualsCondition(beta1, solver, "adam")
        beta2_depends_on_solver = EqualsCondition(beta2, solver, "adam")
        rho_depends_on_solver = EqualsCondition(rho, solver, "adadelta")

        cs.add_condition(momentum_depends_on_solver)
        cs.add_condition(beta1_depends_on_solver)
        cs.add_condition(beta2_depends_on_solver)
        cs.add_condition(rho_depends_on_solver)

        # Conditioning on learning rate policy
        lr_policy_depends_on_solver = InCondition(
            lr_policy, solver,
            ["adadelta", "adagrad", "sgd", "momentum", "nesterov"])
        gamma_depends_on_policy = InCondition(child=gamma,
                                              parent=lr_policy,
                                              values=["inv", "exp", "step"])
        power_depends_on_policy = EqualsCondition(power, lr_policy, "inv")
        epoch_step_depends_on_policy = EqualsCondition(epoch_step, lr_policy,
                                                       "step")

        cs.add_condition(lr_policy_depends_on_solver)
        cs.add_condition(gamma_depends_on_policy)
        cs.add_condition(power_depends_on_policy)
        cs.add_condition(epoch_step_depends_on_policy)

        return cs
Пример #11
0
x4 = CategoricalHyperparameter("x9", ['x1', 'x2', 'x3'])
x5 = CategoricalHyperparameter("x5", ['x1', 'x2', 'x3'])
x6 = CategoricalHyperparameter("x6", ['x1', 'x2', 'x3'])
x7 = CategoricalHyperparameter("x7", ['x1', 'x3', 'x2'], default_value='x2')
cs1.add_hyperparameters([x1, x2, x3, x7, x6, x5, x4])

cond1 = ForbiddenAndConjunction(
    ForbiddenEqualsClause(x3, "x1"),
    ForbiddenEqualsClause(x4, "x2"),
    ForbiddenEqualsClause(x5, "x3")
)
cs1.add_forbidden_clause(cond1)

cond2 = EqualsCondition(x1, x5, "x1")
cond7 = EqualsCondition(x2, x6, "x1")
cond3 = InCondition(x2, x6, ["x1", "x2", "x3"])
cs1.add_condition(cond3)
cs1.add_condition(cond2)

cond4 = ForbiddenEqualsClause(x4, 'x3')
cond5 = ForbiddenInClause(x7, ['x1', 'x3'])
cs1.add_forbidden_clause(cond5)
cs1.add_forbidden_clause(cond4)

from litebo.utils.config_space.space_utils import config_space2string, string2config_space

str = config_space2string(cs1)
print(str)
cs2 = string2config_space(str)
print(cs2 == cs1)
exit()