def test_Int(): rg = hp_module.Int('rg', min_value=5, max_value=9, step=1, default=6) rg = hp_module.Int.from_config(rg.get_config()) assert rg.default == 6 assert 5 <= rg.random_sample() <= 9 assert isinstance(rg.random_sample(), int) assert rg.random_sample(123) == rg.random_sample(123) # No default rg = hp_module.Int('rg', min_value=5, max_value=9, step=1) assert rg.default == 5
def test_sampling_arg(): f = hp_module.Float('f', 1e-20, 1e10, sampling='log') f = hp_module.Float.from_config(f.get_config()) assert f.sampling == 'log' i = hp_module.Int('i', 0, 10, sampling='linear') i = hp_module.Int.from_config(i.get_config()) assert i.sampling == 'linear' with pytest.raises(ValueError, match='`sampling` must be one of'): hp_module.Int('j', 0, 10, sampling='invalid')
def test_bayesian_oracle_with_zero_y(tmp_dir): hp_list = [hp_module.Choice('a', [1, 2], default=1), hp_module.Int('b', 3, 10, default=3), hp_module.Float('c', 0, 1, 0.1, default=0), hp_module.Fixed('d', 7), hp_module.Choice('e', [9, 0], default=9)] oracle = bo_module.BayesianOptimizationOracle() for i in range(100): oracle.populate_space(str(i), hp_list) oracle.result(str(i), 0)
def test_save_before_result(tmp_dir): hp_list = [hp_module.Choice('a', [1, 2], default=1), hp_module.Int('b', 3, 10, default=3), hp_module.Float('c', 0, 1, 0.1, default=0), hp_module.Fixed('d', 7), hp_module.Choice('e', [9, 0], default=9)] oracle = bo_module.BayesianOptimizationOracle() oracle.populate_space(str(1), hp_list) oracle.save(os.path.join(tmp_dir, 'temp_oracle')) oracle.result(str(1), 0)
def test_sampling_arg(): f = hp_module.Float("f", 1e-20, 1e10, sampling="log") f = hp_module.Float.from_config(f.get_config()) assert f.sampling == "log" i = hp_module.Int("i", 0, 10, sampling="linear") i = hp_module.Int.from_config(i.get_config()) assert i.sampling == "linear" with pytest.raises(ValueError, match="`sampling` must be one of"): hp_module.Int("j", 0, 10, sampling="invalid") with pytest.raises( ValueError, match="`sampling` `min_value` 1 is greater than the `max_value` 0", ): hp_module.Int("k", 1, 0, sampling="linear") with pytest.raises( ValueError, match="`sampling` `min_value` 1 is greater than the `max_value` 0", ): hp_module.Int("k", 1, 0, sampling="linear")
def test_bayesian_dynamic_space(tmp_dir): hp_list = [hp_module.Choice('a', [1, 2], default=1)] oracle = bo_module.BayesianOptimizationOracle() for i in range(10): oracle.populate_space(str(i), hp_list) oracle.result(str(i), i) hp_list.append(hp_module.Int('b', 3, 10, default=3)) assert 'b' in oracle.populate_space('1_0', hp_list)['values'] hp_list.append(hp_module.Float('c', 0, 1, 0.1, default=0)) assert 'c' in oracle.populate_space('1_1', hp_list)['values'] hp_list.append(hp_module.Fixed('d', 7)) assert 'd' in oracle.populate_space('1_2', hp_list)['values'] hp_list.append(hp_module.Choice('e', [9, 0], default=9)) assert 'e' in oracle.populate_space('1_3', hp_list)['values']
def test_int_proto(): hp = hp_module.Int('a', 1, 100, sampling='log') proto = hp.to_proto() assert proto.name == 'a' assert proto.min_value == 1 assert proto.max_value == 100 assert proto.sampling == kerastuner_pb2.Sampling.LOG # Proto stores the implicit default. assert proto.default == 1 assert proto.step == 1 new_hp = hp_module.Int.from_proto(proto) assert new_hp._default == 1 # Pop the implicit default for comparison purposes. new_hp._default = None assert new_hp.get_config() == hp.get_config()
def assemble(self, input_node): block = hyperblock.ImageBlock() if max(self._shape[0], self._shape[1]) < 32: if self._num_samples < 10000: self.hps.append(hp_module.Choice( block.name + '_resnet/v1/conv4_depth', [6], default=6)) self.hps.append(hp_module.Choice( block.name + '_resnet/v2/conv4_depth', [6], default=6)) self.hps.append(hp_module.Choice( block.name + '_resnet/next/conv4_depth', [6], default=6)) self.hps.append(hp_module.Int( block.name + '_xception/num_residual_blocks', 2, 4, default=4)) return block(input_node)