Example #1
0
def test_hyperparameters_added(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int("a", -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", direction="max"),
        max_trials=20,
        hyperparameters=hps,
        num_initial_points=2,
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    # Populate initial trials.
    for i in range(10):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = 10 * i
        trial.score = i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    # Update the space.
    new_hps = hp_module.HyperParameters()
    new_hps.Float("b", 3.2, 6.4, step=0.2, default=3.6)
    new_hps.Boolean("c", default=True)
    oracle.update_space(new_hps)

    # Make a new trial, it should have b set.
    trial = oracle.create_trial("tuner0")
    assert trial.status == "RUNNING"
    assert "b" in trial.hyperparameters.values
    assert "c" in trial.hyperparameters.values
def test_merge():
    hp = hp_module.HyperParameters()
    hp.Int("a", 0, 100)
    hp.Fixed("b", 2)

    hp2 = hp_module.HyperParameters()
    hp2.Fixed("a", 3)
    hp.Int("c", 10, 100, default=30)

    hp.merge(hp2)

    assert hp.get("a") == 3
    assert hp.get("b") == 2
    assert hp.get("c") == 30

    hp3 = hp_module.HyperParameters()
    hp3.Fixed("a", 5)
    hp3.Choice("d", [1, 2, 3], default=1)

    hp.merge(hp3, overwrite=False)

    assert hp.get("a") == 3
    assert hp.get("b") == 2
    assert hp.get("c") == 30
    assert hp.get("d") == 1
Example #3
0
def test_hyperparameters_added(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int('a', -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        'score', direction='max'),
                                                  max_trials=20,
                                                  hyperparameters=hps,
                                                  num_initial_points=2)
    oracle._set_project_dir(tmp_dir, 'untitled')

    # Populate initial trials.
    for i in range(10):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values['a'] = 10 * i
        trial.score = i
        trial.status = 'COMPLETED'
        oracle.trials[trial.trial_id] = trial

    # Update the space.
    new_hps = hp_module.HyperParameters()
    new_hps.Float('b', 3.2, 6.4, step=0.2, default=3.6)
    new_hps.Boolean('c', default=True)
    oracle.update_space(new_hps)

    # Make a new trial, it should have b set.
    trial = oracle.create_trial('tuner0')
    assert trial.status == 'RUNNING'
    assert 'b' in trial.hyperparameters.values
    assert 'c' in trial.hyperparameters.values
Example #4
0
def test_merge():
    hp = hp_module.HyperParameters()
    hp.Int('a', 0, 100)
    hp.Fixed('b', 2)

    hp2 = hp_module.HyperParameters()
    hp2.Fixed('a', 3)
    hp.Int('c', 10, 100, default=30)

    hp.merge(hp2)

    assert hp.get('a') == 3
    assert hp.get('b') == 2
    assert hp.get('c') == 30

    hp3 = hp_module.HyperParameters()
    hp3.Fixed('a', 5)
    hp3.Choice('d', [1, 2, 3], default=1)

    hp.merge(hp3, overwrite=False)

    assert hp.get('a') == 3
    assert hp.get('b') == 2
    assert hp.get('c') == 30
    assert hp.get('d') == 1
def test_merge_inactive_hp_with_conditional_scopes():
    hp = hp_module.HyperParameters()
    hp.Choice('a', [1, 2, 3], default=3)
    assert hp.get('a') == 3
    with hp.conditional_scope('a', 2):
        hp.Fixed('b', 4)

    hp2 = hp_module.HyperParameters()
    hp2.merge(hp)
    # only active hp should be included to values
    assert 'a' in hp2.values
    assert 'b' not in hp2.values
Example #6
0
    def convert_study_config_fixed(self):
        hps = hp_module.HyperParameters()
        hps.Fixed("beta", 0.1)
        study_config_float = utils.make_study_config(objective="accuracy",
                                                     hyperparams=hps)
        self.assertEqual(study_config_float, STUDY_CONFIG_FIXED_FLOAT)

        hps = hp_module.HyperParameters()
        hps.Fixed("type", "WIDE_AND_DEEP")
        study_config_categorical = utils.make_study_config(
            objective="accuracy", hyperparams=hps)
        self.assertEqual(study_config_categorical,
                         STUDY_CONFIG_FIXED_CATEGORICAL)
    def convert_study_config_fixed(self):
        hps = hp_module.HyperParameters()
        hps.Fixed('beta', 0.1)
        study_config_float = cloud_tuner_utils.make_study_config(
            objective='accuracy', hyperparams=hps)
        self.assertEqual(study_config_float, STUDY_CONFIG_FIXED_FLOAT)

        hps = hp_module.HyperParameters()
        hps.Fixed('type', 'WIDE_AND_DEEP')
        study_config_categorical = cloud_tuner_utils.make_study_config(
            objective='accuracy', hyperparams=hps)
        self.assertEqual(study_config_categorical,
                         STUDY_CONFIG_FIXED_CATEGORICAL)
Example #8
0
def test_model_construction_factor_zero():
    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(None, None, 3))
    model = hm.build(hp)
    # augment_layers search default space [0, 4], with default zero.
    assert len(model.layers) == 1

    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(None, None, 3),
                                      augment_layers=0)
    model = hm.build(hp)
    # factors default all zero, the model should only have input layer
    assert len(model.layers) == 1
Example #9
0
def test_trial_proto():
    hps = hp_module.HyperParameters()
    hps.Int("a", 0, 10, default=3)
    trial = trial_module.Trial(hps, trial_id="trial1", status="COMPLETED")
    trial.metrics.register("score", direction="max")
    trial.metrics.update("score", 10, step=1)

    proto = trial.to_proto()
    assert len(proto.hyperparameters.space.int_space) == 1
    assert proto.hyperparameters.values.values["a"].int_value == 3
    assert not proto.HasField("score")

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.status == "COMPLETED"
    assert new_trial.hyperparameters.get("a") == 3
    assert new_trial.trial_id == "trial1"
    assert new_trial.score is None
    assert new_trial.best_step is None

    trial.score = -10
    trial.best_step = 3

    proto = trial.to_proto()
    assert proto.HasField("score")
    assert proto.score.value == -10
    assert proto.score.step == 3

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.score == -10
    assert new_trial.best_step == 3
    assert new_trial.metrics.get_history("score") == [
        metrics_tracking.MetricObservation(10, step=1)
    ]
def test_nested_conditional_scopes_and_name_scopes():
    hp = hp_module.HyperParameters()
    a = hp.Choice('a', [1, 2, 3], default=3)
    with hp.conditional_scope('a', [1, 3]):
        b = hp.Choice('b', [4, 5, 6], default=6)
        with hp.conditional_scope('b', 6):
            c = hp.Choice('c', [7, 8, 9])
            with hp.name_scope('d'):
                e = hp.Choice('e', [10, 11, 12])
    with hp.conditional_scope('a', 2):
        f = hp.Choice('f', [13, 14, 15])
        with hp.name_scope('g'):
            h = hp.Int('h', 0, 10)

    assert hp.values == {
        'a': 3,
        'b': 6,
        'c': 7,
        'd/e': 10,
    }
    # Assignment to an active conditional hyperparameter returns the value.
    assert a == 3
    assert b == 6
    assert c == 7
    assert e == 10
    # Assignment to a non-active conditional hyperparameter returns `None`.
    assert f is None
    assert h is None
Example #11
0
def test_trial_proto():
    hps = hp_module.HyperParameters()
    hps.Int('a', 0, 10, default=3)
    trial = trial_module.Trial(hps, trial_id='trial1', status='COMPLETED')
    trial.metrics.register('score', direction='max')
    trial.metrics.update('score', 10, step=1)

    proto = trial.to_proto()
    assert len(proto.hyperparameters.space.int_space) == 1
    assert proto.hyperparameters.values.values['a'].int_value == 3
    assert not proto.HasField('score')

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.status == 'COMPLETED'
    assert new_trial.hyperparameters.get('a') == 3
    assert new_trial.trial_id == 'trial1'
    assert new_trial.score is None
    assert new_trial.best_step is None

    trial.score = -10
    trial.best_step = 3

    proto = trial.to_proto()
    assert proto.HasField('score')
    assert proto.score.value == -10
    assert proto.score.step == 3

    new_trial = trial_module.Trial.from_proto(proto)
    assert new_trial.score == -10
    assert new_trial.best_step == 3
    assert new_trial.metrics.get_history('score') == [
        metrics_tracking.MetricObservation(10, step=1)
    ]
Example #12
0
def test_include_top_false():
    hp = hp_module.HyperParameters()
    hypermodel = xception.HyperXception(input_shape=(256, 256, 3),
                                        classes=10,
                                        include_top=False)
    model = hypermodel.build(hp)
    assert not model.optimizer
Example #13
0
def test_nested_conditional_scopes_and_name_scopes():
    hp = hp_module.HyperParameters()
    a = hp.Choice('a', [1, 2, 3], default=2)
    with hp.conditional_scope('a', [1, 3]):
        b = hp.Choice('b', [4, 5, 6])
        with hp.conditional_scope('b', 6):
            c = hp.Choice('c', [7, 8, 9])
            with hp.name_scope('d'):
                e = hp.Choice('e', [10, 11, 12])
    with hp.conditional_scope('a', 2):
        f = hp.Choice('f', [13, 14, 15])

    assert hp.values == {
        'a': 2,
        'a=1,3/b': 4,
        'a=1,3/b=6/c': 7,
        'a=1,3/b=6/d/e': 10,
        'a=2/f': 13
    }
    # Assignment to an active conditional hyperparameter returns the value.
    assert a == 2
    assert f == 13
    # Assignment to a non-active conditional hyperparameter returns `None`.
    assert b is None
    assert c is None
    assert e is None
 def test_convert_optimizer_trial_to_hps(self):
     hps = hp_module.HyperParameters()
     hps.Choice('learning_rate', [1e-4, 1e-3, 1e-2])
     optimizer_trial = {
         'name':
         'trial_name',
         'state':
         'ACTIVE',
         'parameters': [{
             'parameter': 'learning_rate',
             'floatValue': 0.0001
         }, {
             'parameter': 'num_layers',
             'intValue': '2'
         }, {
             'parameter': 'units_0',
             'floatValue': 96
         }, {
             'parameter': 'units_1',
             'floatValue': 352
         }]
     }
     trial_hps = cloud_tuner_utils.convert_optimizer_trial_to_hps(
         hps, optimizer_trial)
     self.assertEqual(trial_hps.values, EXPECTE_TRIAL_HPS)
Example #15
0
def test_hyperband_tuner(patch_fit, patch_load, tmp_dir):
    x = np.random.rand(10, 2, 2).astype('float32')
    y = np.random.randint(0, 1, (10, ))
    val_x = np.random.rand(10, 2, 2).astype('float32')
    val_y = np.random.randint(0, 1, (10, ))

    tuner = HyperbandStub(build_model,
                          objective='val_accuracy',
                          max_trials=15,
                          factor=2,
                          min_epochs=1,
                          max_epochs=2,
                          executions_per_trial=3,
                          directory=tmp_dir)

    hp = hyperparameters.HyperParameters()
    hp.values['tuner/epochs'] = 10
    trial_id = '1'
    hp.values['tuner/trial_id'] = trial_id

    tuner.run_trial(
        trial_module.Trial(trial_id, hp, 5, base_directory=tmp_dir), hp, [], {
            'x': x,
            'y': y,
            'epochs': 1,
            'validation_data': (val_x, val_y)
        })
    assert patch_fit.called
    assert patch_load.called
Example #16
0
def test_bayesian_oracle_maximize(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int("a", -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", direction="max"),
        max_trials=20,
        hyperparameters=hps,
        num_initial_points=2,
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    # Make examples with high 'a' and high score.
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = 10 * i
        trial.score = i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    # Make examples with low 'a' and low score
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values["a"] = -10 * i
        trial.score = -i
        trial.status = "COMPLETED"
        oracle.trials[trial.trial_id] = trial

    trial = oracle.create_trial("tuner0")
    assert trial.status == "RUNNING"
    # Assert that the oracle suggests hps it thinks will maximize.
    assert trial.hyperparameters.get("a") > 0
Example #17
0
def test_bayesian_save_reload(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Choice("a", [1, 2], default=1)
    hps.Choice("b", [3, 4], default=3)
    hps.Choice("c", [5, 6], default=5)
    hps.Choice("d", [7, 8], default=7)
    hps.Choice("e", [9, 0], default=9)
    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", "max"), max_trials=20, hyperparameters=hps
    )
    oracle._set_project_dir(tmp_dir, "untitled")

    for _ in range(3):
        trial = oracle.create_trial("tuner_id")
        oracle.update_trial(trial.trial_id, {"score": 1.0})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    oracle.save()
    oracle = bo_module.BayesianOptimizationOracle(
        objective=kt.Objective("score", "max"), max_trials=20, hyperparameters=hps
    )
    oracle._set_project_dir(tmp_dir, "untitled")
    oracle.reload()

    for trial_id in range(3):
        trial = oracle.create_trial("tuner_id")
        oracle.update_trial(trial.trial_id, {"score": 1.0})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    assert len(oracle.trials) == 6
def test_return_default_value_if_not_populated():
    hp = hp_module.HyperParameters()

    assert hp.Choice(
        'hp_name',
        ['hp_value', 'hp_value_default'],
        default='hp_value_default') == 'hp_value_default'
Example #19
0
def test_input_tensor():
    hp = hp_module.HyperParameters()
    inputs = tf.keras.Input(shape=(256, 256, 3))
    hypermodel = efficientnet.HyperEfficientNet(input_tensor=inputs,
                                                classes=10)
    model = hypermodel.build(hp)
    assert model.inputs == [inputs]
Example #20
0
def test_bayesian_save_reload(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Choice('a', [1, 2], default=1)
    hps.Choice('b', [3, 4], default=3)
    hps.Choice('c', [5, 6], default=5)
    hps.Choice('d', [7, 8], default=7)
    hps.Choice('e', [9, 0], default=9)
    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        'score', 'max'),
                                                  max_trials=20,
                                                  hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, 'untitled')

    for _ in range(3):
        trial = oracle.create_trial('tuner_id')
        oracle.update_trial(trial.trial_id, {'score': 1.})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    oracle.save()
    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        'score', 'max'),
                                                  max_trials=20,
                                                  hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, 'untitled')
    oracle.reload()

    for trial_id in range(3):
        trial = oracle.create_trial('tuner_id')
        oracle.update_trial(trial.trial_id, {'score': 1.})
        oracle.end_trial(trial.trial_id, "COMPLETED")

    assert len(oracle.trials) == 6
Example #21
0
def test_float_optimization(tmp_dir):
    def build_model(hp):
        # Maximum at a=-1, b=1, c=1, d=0 with score=3
        return -1 * hp['a']**3 + hp['b']**3 + hp['c'] - abs(hp['d'])

    class PolynomialTuner(kt.engine.base_tuner.BaseTuner):
        def run_trial(self, trial):
            hps = trial.hyperparameters
            score = self.hypermodel.build(hps)
            self.oracle.update_trial(trial.trial_id, {'score': score})

    hps = hp_module.HyperParameters()
    hps.Float('a', -1, 1)
    hps.Float('b', -1, 1)
    hps.Float('c', -1, 1)
    hps.Float('d', -1, 1)

    tuner = PolynomialTuner(hypermodel=build_model,
                            oracle=kt.oracles.BayesianOptimization(
                                objective=kt.Objective('score', 'max'),
                                hyperparameters=hps,
                                max_trials=50),
                            directory=tmp_dir)

    tuner.search()

    atol, rtol = 1e-2, 1e-2
    best_trial = tuner.oracle.get_best_trials()[0]
    best_hps = best_trial.hyperparameters

    assert np.isclose(best_trial.score, 3, atol=atol, rtol=rtol)
    assert np.isclose(best_hps['a'], -1, atol=atol, rtol=rtol)
    assert np.isclose(best_hps['b'], 1, atol=atol, rtol=rtol)
    assert np.isclose(best_hps['c'], 1, atol=atol, rtol=rtol)
    assert np.isclose(best_hps['d'], 0, atol=atol, rtol=rtol)
Example #22
0
def test_include_top_false():
    hp = hp_module.HyperParameters()
    hypermodel = resnet.HyperResNet(
        input_shape=(256, 256, 3), classes=10, include_top=False)
    model = hypermodel.build(hp)
    # Check that model wasn't compiled.
    assert not model.optimizer
Example #23
0
def test_input_tensor():
    hp = hp_module.HyperParameters()
    inputs = tf.keras.Input((256, 256, 3))
    hypermodel = xception.HyperXception(
        input_tensor=inputs, include_top=False)
    model = hypermodel.build(hp)
    assert model.inputs == [inputs]
Example #24
0
def test_hyperband_tuner(patch_fit, patch_load, tmp_dir):
    x = np.random.rand(10, 2, 2).astype('float32')
    y = np.random.randint(0, 1, (10, ))
    val_x = np.random.rand(10, 2, 2).astype('float32')
    val_y = np.random.randint(0, 1, (10, ))

    tuner = hyperband_module.Hyperband(build_model,
                                       objective='val_accuracy',
                                       max_trials=15,
                                       factor=2,
                                       min_epochs=1,
                                       max_epochs=2,
                                       directory=tmp_dir)

    hp = hyperparameters.HyperParameters()
    history_trial = trial_module.Trial(hyperparameters=hp.copy())
    history_trial.score = 1
    history_trial.best_step = 0
    hp.values['tuner/epochs'] = 10
    hp.values['tuner/trial_id'] = history_trial.trial_id
    tuner.oracle.trials[history_trial.trial_id] = history_trial

    trial = trial_module.Trial(hyperparameters=hp)
    tuner.oracle.trials[trial.trial_id] = trial
    tuner.run_trial(trial, x=x, y=y, epochs=1, validation_data=(val_x, val_y))
    assert patch_fit.called
    assert patch_load.called
Example #25
0
def test_hyperparameter_existence_and_hp_defaults_rand_aug():
    hp = hp_module.HyperParameters()
    hm = aug_module.HyperImageAugment(input_shape=(32, 32, 3),
                                      augment_layers=[2, 5],
                                      contrast=False)
    hm.build(hp)
    assert hp.get("augment_layers") == 2
def test_build_with_conditional_scope():
    def build_model(hp):
        model = hp.Choice("model", ["v1", "v2"])
        with hp.conditional_scope("model", "v1"):
            v1_params = {
                "layers": hp.Int("layers", 1, 3),
                "units": hp.Int("units", 16, 32),
            }
        with hp.conditional_scope("model", "v2"):
            v2_params = {
                "layers": hp.Int("layers", 2, 4),
                "units": hp.Int("units", 32, 64),
            }

        params = v1_params if model == "v1" else v2_params
        inputs = keras.Input(10)
        x = inputs
        for _ in range(params["layers"]):
            x = keras.layers.Dense(params["units"])(x)
        outputs = keras.layers.Dense(1)(x)
        model = keras.Model(inputs, outputs)
        model.compile("sgd", "mse")
        return model

    hp = hp_module.HyperParameters()
    build_model(hp)
    assert hp.values == {
        "model": "v1",
        "layers": 1,
        "units": 16,
    }
Example #27
0
def test_build_with_conditional_scope():

    def build_model(hp):
        model = hp.Choice('model', ['v1', 'v2'])
        with hp.conditional_scope('model', 'v1'):
            v1_params = {'layers': hp.Int('layers', 1, 3),
                         'units': hp.Int('units', 16, 32)}
        with hp.conditional_scope('model', 'v2'):
            v2_params = {'layers': hp.Int('layers', 2, 4),
                         'units': hp.Int('units', 32, 64)}

        params = v1_params if model == 'v1' else v2_params
        inputs = keras.Input(10)
        x = inputs
        for _ in range(params['layers']):
            x = keras.layers.Dense(params['units'])(x)
        outputs = keras.layers.Dense(1)(x)
        model = keras.Model(inputs, outputs)
        model.compile('sgd', 'mse')
        return model

    hp = hp_module.HyperParameters()
    build_model(hp)
    assert hp.values == {
        'model': 'v1',
        'model=v1/layers': 1,
        'model=v1/units': 16,
        'model=v2/layers': 2,
        'model=v2/units': 32
    }
def test_nested_conditional_scopes_and_name_scopes():
    hp = hp_module.HyperParameters()
    a = hp.Choice("a", [1, 2, 3], default=3)
    with hp.conditional_scope("a", [1, 3]):
        b = hp.Choice("b", [4, 5, 6], default=6)
        with hp.conditional_scope("b", 6):
            c = hp.Choice("c", [7, 8, 9])
            with hp.name_scope("d"):
                e = hp.Choice("e", [10, 11, 12])
    with hp.conditional_scope("a", 2):
        f = hp.Choice("f", [13, 14, 15])
        with hp.name_scope("g"):
            h = hp.Int("h", 0, 10)

    assert hp.values == {
        "a": 3,
        "b": 6,
        "c": 7,
        "d/e": 10,
    }
    # Assignment to an active conditional hyperparameter returns the value.
    assert a == 3
    assert b == 6
    assert c == 7
    assert e == 10
    # Assignment to a non-active conditional hyperparameter returns `None`.
    assert f is None
    assert h is None
Example #29
0
def test_hyperband_save_load_at_begining(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Choice('a', [1, 2], default=1)
    hps.Choice('b', [3, 4], default=3)
    hps.Choice('c', [5, 6], default=5)
    hps.Choice('d', [7, 8], default=7)
    hps.Choice('e', [9, 0], default=9)
    oracle = hyperband_module.HyperbandOracle(objective='score',
                                              max_trials=50,
                                              hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, 'untitled')

    oracle.save()
    oracle = hyperband_module.HyperbandOracle(objective='score',
                                              max_trials=50,
                                              hyperparameters=hps)
    oracle._set_project_dir(tmp_dir, 'untitled')
    oracle.reload()

    trials = []
    for i in range(oracle._model_sequence[0]):
        trial = oracle.create_trial(i)
        trials.append(trial)
        assert trial.status == 'RUNNING'
        oracle.update_trial(trial.trial_id, {'score': 1})

    trial = oracle.create_trial('idle0')
    assert trial.status == 'IDLE'

    for trial in trials:
        oracle.end_trial(trial.trial_id, 'COMPLETED')
Example #30
0
def test_bayesian_oracle_maximize(tmp_dir):
    hps = hp_module.HyperParameters()
    hps.Int('a', -100, 100)

    oracle = bo_module.BayesianOptimizationOracle(objective=kt.Objective(
        'score', direction='max'),
                                                  max_trials=20,
                                                  hyperparameters=hps,
                                                  num_initial_points=2)
    oracle._set_project_dir(tmp_dir, 'untitled')

    # Make examples with high 'a' and high score.
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values['a'] = 10 * i
        trial.score = i
        trial.status = 'COMPLETED'
        oracle.trials[trial.trial_id] = trial

    # Make examples with low 'a' and low score
    for i in range(5):
        trial = trial_module.Trial(hyperparameters=hps.copy())
        trial.hyperparameters.values['a'] = -10 * i
        trial.score = -i
        trial.status = 'COMPLETED'
        oracle.trials[trial.trial_id] = trial

    trial = oracle.create_trial('tuner0')
    assert trial.status == 'RUNNING'
    # Assert that the oracle suggests hps it thinks will maximize.
    assert trial.hyperparameters.get('a') > 0