Exemple #1
0
def test_hyperparameters_added(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int("a", -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", direction="max"),
        max_trials=20,
        hyperparameters=hps,
        num_initial_points=2,
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    # Populate initial trials.
    for i in range(10):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = 10 * i
        trial.score = i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    # A new trial discovered a new hp and synced to oracle.hyperparameters.
    new_hps = hp_module.HyperParameters()
    new_hps.Float("b", 3.2, 6.4, step=0.2, default=3.6)
    new_hps.Boolean("c", default=True)
    oracle.update_space(new_hps)

    # Make a new trial, it should have b set.
    trial = oracle.create_trial("tuner0")
    assert trial.status == "RUNNING"
    assert "b" in trial.hyperparameters.values
    assert "c" in trial.hyperparameters.values
def test_merge():
    hp = hp_module.HyperParameters()
    hp.Int("a", 0, 100)
    hp.Fixed("b", 2)

    hp2 = hp_module.HyperParameters()
    hp2.Fixed("a", 3)
    hp.Int("c", 10, 100, default=30)

    hp.merge(hp2)

    assert hp.get("a") == 3
    assert hp.get("b") == 2
    assert hp.get("c") == 30

    hp3 = hp_module.HyperParameters()
    hp3.Fixed("a", 5)
    hp3.Choice("d", [1, 2, 3], default=1)

    hp.merge(hp3, overwrite=False)

    assert hp.get("a") == 3
    assert hp.get("b") == 2
    assert hp.get("c") == 30
    assert hp.get("d") == 1
def test_merge_inactive_hp_with_conditional_scopes():
    hp = hp_module.HyperParameters()
    hp.Choice("a", [1, 2, 3], default=3)
    assert hp.get("a") == 3
    with hp.conditional_scope("a", 2):
        hp.Fixed("b", 4)

    hp2 = hp_module.HyperParameters()
    hp2.merge(hp)
    # only active hp should be included to values
    assert "a" in hp2.values
    assert "b" not in hp2.values
Exemple #4
0
    def convert_study_config_fixed(self):
        hps = hp_module.HyperParameters()
        hps.Fixed('beta', 0.1)
        study_config_float = cloud_tuner_utils.make_study_config(
            objective='accuracy', hyperparams=hps)
        self.assertEqual(study_config_float, STUDY_CONFIG_FIXED_FLOAT)

        hps = hp_module.HyperParameters()
        hps.Fixed('type', 'WIDE_AND_DEEP')
        study_config_categorical = cloud_tuner_utils.make_study_config(
            objective='accuracy', hyperparams=hps)
        self.assertEqual(study_config_categorical,
                         STUDY_CONFIG_FIXED_CATEGORICAL)
Exemple #5
0
def test_model_construction_factor_zero():
    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(None, None, 3))
    model = hm.build(hp)
    # augment_layers search default space [0, 4], with default zero.
    assert len(model.layers) == 1

    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(None, None, 3),
                                      augment_layers=0)
    model = hm.build(hp)
    # factors default all zero, the model should only have input layer
    assert len(model.layers) == 1
def test_build_with_conditional_scope():
    def build_model(hp):
        model = hp.Choice("model", ["v1", "v2"])
        with hp.conditional_scope("model", "v1"):
            v1_params = {
                "layers": hp.Int("layers", 1, 3),
                "units": hp.Int("units", 16, 32),
            }
        with hp.conditional_scope("model", "v2"):
            v2_params = {
                "layers": hp.Int("layers", 2, 4),
                "units": hp.Int("units", 32, 64),
            }

        params = v1_params if model == "v1" else v2_params
        inputs = keras.Input(10)
        x = inputs
        for _ in range(params["layers"]):
            x = keras.layers.Dense(params["units"])(x)
        outputs = keras.layers.Dense(1)(x)
        model = keras.Model(inputs, outputs)
        model.compile("sgd", "mse")
        return model

    hp = hp_module.HyperParameters()
    build_model(hp)
    assert hp.values == {
        "model": "v1",
        "layers": 1,
        "units": 16,
    }
def test_input_tensor():
    hp = hp_module.HyperParameters()
    inputs = tf.keras.Input(shape=(256, 256, 3))
    hypermodel = efficientnet.HyperEfficientNet(input_tensor=inputs,
                                                classes=10)
    model = hypermodel.build(hp)
    assert model.inputs == [inputs]
Exemple #8
0
def test_bayesian_oracle_maximize(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int("a", -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", direction="max"),
        max_trials=20,
        hyperparameters=hps,
        num_initial_points=2,
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    # Make examples with high 'a' and high score.
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = 10 * i
        trial.score = i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    # Make examples with low 'a' and low score
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = -10 * i
        trial.score = -i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    trial = oracle.create_trial("tuner0")
    assert trial.status == "RUNNING"
    # Assert that the oracle suggests hps it thinks will maximize.
    assert trial.hyperparameters.get("a") > 0
Exemple #9
0
def test_bayesian_save_reload(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Choice("a", [1, 2], default=1)
    hps.Choice("b", [3, 4], default=3)
    hps.Choice("c", [5, 6], default=5)
    hps.Choice("d", [7, 8], default=7)
    hps.Choice("e", [9, 0], default=9)
    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        "score", "max"),
                                                  max_trials=20,
                                                  hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, "untitled")

    for _ in range(3):
        trial = oracle.create_trial("tuner_id")
        oracle.update_trial(trial.trial_id, {"score": 1.0})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    oracle.save()
    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        "score", "max"),
                                                  max_trials=20,
                                                  hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, "untitled")
    oracle.reload()

    for trial_id in range(3):
        trial = oracle.create_trial("tuner_id")
        oracle.update_trial(trial.trial_id, {"score": 1.0})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    assert len(oracle.trials) == 6
def test_nested_conditional_scopes_and_name_scopes():
    hp = hp_module.HyperParameters()
    a = hp.Choice("a", [1, 2, 3], default=3)
    with hp.conditional_scope("a", [1, 3]):
        b = hp.Choice("b", [4, 5, 6], default=6)
        with hp.conditional_scope("b", 6):
            c = hp.Choice("c", [7, 8, 9])
            with hp.name_scope("d"):
                e = hp.Choice("e", [10, 11, 12])
    with hp.conditional_scope("a", 2):
        f = hp.Choice("f", [13, 14, 15])
        with hp.name_scope("g"):
            h = hp.Int("h", 0, 10)

    assert hp.values == {
        "a": 3,
        "b": 6,
        "c": 7,
        "d/e": 10,
    }
    # Assignment to an active conditional hyperparameter returns the value.
    assert a == 3
    assert b == 6
    assert c == 7
    assert e == 10
    # Assignment to a non-active conditional hyperparameter returns `None`.
    assert f is None
    assert h is None
Exemple #11
0
 def test_convert_study_config_fixed(self, name, value, expected_config):
     hps = hp_module.HyperParameters()
     hps.Fixed(name, value)
     study_config = utils.make_study_config(
         objective="accuracy", hyperparams=hps
     )
     self._assert_study_config_equal(study_config, expected_config)
Exemple #12
0
def test_hyperparameter_existence_and_hp_defaults_rand_aug():
    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(32, 32, 3),
                                      augment_layers=[2, 5],
                                      contrast=False)
    hm.build(hp)
    assert hp.get("augment_layers") == 2
Exemple #13
0
    def _random_values(self):
        """Fills the hyperparameter space with random values.

        Returns:
            A dictionary mapping parameter names to suggested values.
        """
        collisions = 0
        while 1:
            hps = hp_module.HyperParameters()
            # Generate a set of random values.
            for hp in self.hyperparameters.space:
                hps.merge([hp])
                if hps.is_active(hp):  # Only active params in `values`.
                    hps.values[hp.name] = hp.random_sample(self._seed_state)
                    self._seed_state += 1
            values = hps.values
            # Keep trying until the set of values is unique,
            # or until we exit due to too many collisions.
            values_hash = self._compute_values_hash(values)
            if values_hash in self._tried_so_far:
                collisions += 1
                if collisions > self._max_collisions:
                    return None
                continue
            self._tried_so_far.add(values_hash)
            break
        return values
Exemple #14
0
 def test_convert_optimizer_trial_to_hps(self):
     hps = hp_module.HyperParameters()
     hps.Choice('learning_rate', [1e-4, 1e-3, 1e-2])
     optimizer_trial = {
         'name':
         'trial_name',
         'state':
         'ACTIVE',
         'parameters': [{
             'parameter': 'learning_rate',
             'floatValue': 0.0001
         }, {
             'parameter': 'num_layers',
             'intValue': '2'
         }, {
             'parameter': 'units_0',
             'floatValue': 96
         }, {
             'parameter': 'units_1',
             'floatValue': 352
         }]
     }
     trial_hps = cloud_tuner_utils.convert_optimizer_trial_to_hps(
         hps, optimizer_trial)
     self.assertEqual(trial_hps.values, EXPECTE_TRIAL_HPS)
def test_include_top_false():
    hp = hp_module.HyperParameters()
    hypermodel = xception.HyperXception(input_shape=(256, 256, 3),
                                        classes=10,
                                        include_top=False)
    model = hypermodel.build(hp)
    assert not model.optimizer
def test_trial_proto():
    hps = hp_module.HyperParameters()
    hps.Int("a", 0, 10, default=3)
    trial = trial_module.Trial(hps, trial_id="trial1", status="COMPLETED")
    trial.metrics.register("score", direction="max")
    trial.metrics.update("score", 10, step=1)

    proto = trial.to_proto()
    assert len(proto.hyperparameters.space.int_space) == 1
    assert proto.hyperparameters.values.values["a"].int_value == 3
    assert not proto.HasField("score")

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.status == "COMPLETED"
    assert new_trial.hyperparameters.get("a") == 3
    assert new_trial.trial_id == "trial1"
    assert new_trial.score is None
    assert new_trial.best_step is None

    trial.score = -10
    trial.best_step = 3

    proto = trial.to_proto()
    assert proto.HasField("score")
    assert proto.score.value == -10
    assert proto.score.step == 3

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.score == -10
    assert new_trial.best_step == 3
    assert new_trial.metrics.get_history("score") == [
        metrics_tracking.MetricObservation(10, step=1)
    ]
Exemple #17
0
 def test_convert_hyperparams_to_hparams_fixed_bool(self):
     hps = hp_module.HyperParameters()
     hps.Fixed("condition", True)
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam("condition", hparams_api.Discrete([True])): True,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))
def test_hyperparameter_override():
    hp = hp_module.HyperParameters()
    hp.Choice("pooling", ["flatten"])
    hp.Choice("num_dense_layers", [2])
    hypermodel = xception.HyperXception(input_shape=(256, 256, 3), classes=10)
    hypermodel.build(hp)
    assert hp.get("pooling") == "flatten"
    assert hp.get("num_dense_layers") == 2
def test_augmentation_param_fixed_model():
    hp = hp_module.HyperParameters()
    aug_model = tf.keras.Sequential(name="aug")
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(32, 32, 3),
                                                classes=10,
                                                augmentation_model=aug_model)
    model = hypermodel.build(hp)
    assert model.layers[1].name == "aug"
def test_include_top_false():
    hp = hp_module.HyperParameters()
    hypermodel = resnet.HyperResNet(input_shape=(256, 256, 3),
                                    classes=10,
                                    include_top=False)
    model = hypermodel.build(hp)
    # Check that model wasn't compiled.
    assert not model.optimizer
Exemple #21
0
 def test_convert_hyperparams_to_hparams_fixed(self, name, value):
     hps = hp_module.HyperParameters()
     hps.Fixed(name, value)
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam(name, hparams_api.Discrete([value])): value,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))
def test_hyperparameter_override():
    hp = hp_module.HyperParameters()
    hp.Choice("version", ["v1"])
    hp.Fixed("conv3_depth", 10)
    hypermodel = resnet.HyperResNet(input_shape=(256, 256, 3), classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "v1"
    assert hp.get("conv3_depth") == 10
    assert hp.get("conv4_depth") == 6
def test_hyperparameter_existence_and_defaults():
    hp = hp_module.HyperParameters()
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(224, 224, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "B0"
    assert hp.get("top_dropout_rate") == 0.2
    assert hp.get("learning_rate") == 0.01
    assert hp.get("pooling") == "avg"
Exemple #24
0
def test_hyperparameter_override_rand_aug():
    hp = hp_module.HyperParameters()
    hp.Fixed("randaug_mag", 1.0)
    hp.Choice("randaug_count", [4])
    hm = aug_module.HyperImageAugment(input_shape=(32, 32, 3),
                                      augment_layers=[2, 4])
    hm.build(hp)
    assert hp.get("randaug_mag") == 1.0
    assert hp.get("randaug_count") == 4
Exemple #25
0
 def test_convert_hyperparams_to_hparams_boolean(self):
     hps = hp_module.HyperParameters()
     hps.Boolean("has_beta")
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam("has_beta", hparams_api.Discrete([True, False])):
             False,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))
Exemple #26
0
 def test_convert_hyperparams_to_hparams_choice(self):
     hps = hp_module.HyperParameters()
     hps.Choice("learning_rate", [1e-4, 1e-3, 1e-2])
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam("learning_rate",
                            hparams_api.Discrete([1e-4, 1e-3, 1e-2])): 1e-4,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))
Exemple #27
0
    def test_convert_study_config_categorical(self):
        hps = hp_module.HyperParameters()
        hps.Choice("model_type", ["LINEAR", "WIDE_AND_DEEP"])
        study_config = utils.make_study_config(
            objective="accuracy", hyperparams=hps)
        self._assert_study_config_equal(study_config, STUDY_CONFIG_CATEGORICAL)

        actual_hps = utils.convert_study_config_to_hps(study_config)
        self._assert_hps_equal(actual_hps, hps)
def test_parent_name():
    hp = hp_module.HyperParameters()
    hp.Choice("a", [1, 2, 3], default=2)
    b1 = hp.Int("b", 0, 10, parent_name="a", parent_values=1, default=5)
    b2 = hp.Int("b", 0, 100, parent_name="a", parent_values=2, default=4)
    assert b1 is None
    assert b2 == 4
    # Only active values appear in `values`.
    assert hp.values == {"a": 2, "b": 4}
def test_hyperparameter_existence_and_defaults():
    hp = hp_module.HyperParameters()
    hypermodel = resnet.HyperResNet(input_shape=(256, 256, 3), classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "v2"
    assert hp.get("conv3_depth") == 4
    assert hp.get("conv4_depth") == 6
    assert hp.get("learning_rate") == 0.01
    assert hp.get("pooling") == "avg"
def test_hyperparameter_override():
    hp = hp_module.HyperParameters()
    hp.Choice("version", ["B1"])
    hp.Fixed("top_dropout_rate", 0.5)
    hypermodel = efficientnet.HyperEfficientNet(input_shape=(256, 256, 3),
                                                classes=10)
    hypermodel.build(hp)
    assert hp.get("version") == "B1"
    assert hp.get("top_dropout_rate") == 0.5