def test_challenger_list_callback(self, patch_sample, patch_ei, patch_impute): values = (10, 1, 9, 2, 8, 3, 7, 4, 6, 5) patch_sample.return_value = ConfigurationMock(1) patch_ei.return_value = np.array([[_] for _ in values], dtype=float) patch_impute.side_effect = lambda l: values cs = ConfigurationSpace() ei = EI(None) rs = RandomSearch(ei, cs) rs._maximize = unittest.mock.Mock() rs._maximize.return_value = [(0, 0)] rval = rs.maximize( runhistory=None, stats=None, num_points=10, ) self.assertEqual(rs._maximize.call_count, 0) next(rval) self.assertEqual(rs._maximize.call_count, 1) random_configuration_chooser = unittest.mock.Mock() random_configuration_chooser.check.side_effect = [ True, False, False, False ] rs._maximize = unittest.mock.Mock() rs._maximize.return_value = [(0, 0), (1, 1)] rval = rs.maximize( runhistory=None, stats=None, num_points=10, random_configuration_chooser=random_configuration_chooser, ) self.assertEqual(rs._maximize.call_count, 0) # The first configuration is chosen at random (see the random_configuration_chooser mock) conf = next(rval) self.assertIsInstance(conf, ConfigurationMock) self.assertEqual(rs._maximize.call_count, 0) # The 2nd configuration triggers the call to the callback (see the random_configuration_chooser mock) conf = next(rval) self.assertEqual(rs._maximize.call_count, 1) self.assertEqual(conf, 0) # The 3rd configuration doesn't trigger the callback any more conf = next(rval) self.assertEqual(rs._maximize.call_count, 1) self.assertEqual(conf, 1) with self.assertRaises(StopIteration): next(rval)
def test_get_next_by_random_search_sorted(self, patch_sample, patch_ei, patch_impute): values = (10, 1, 9, 2, 8, 3, 7, 4, 6, 5) patch_sample.return_value = [ConfigurationMock(i) for i in values] patch_ei.return_value = np.array([[_] for _ in values], dtype=float) patch_impute.side_effect = lambda l: values cs = ConfigurationSpace() ei = EI(None) rs = RandomSearch(ei, cs) rval = rs._maximize( runhistory=None, stats=None, num_points=10, _sorted=True ) self.assertEqual(len(rval), 10) for i in range(10): self.assertIsInstance(rval[i][1], ConfigurationMock) self.assertEqual(rval[i][1].value, 10 - i) self.assertEqual(rval[i][0], 10 - i) self.assertEqual(rval[i][1].origin, 'Random Search (sorted)') # Check that config.get_array works as desired and imputation is used # in between, we therefore have to retrieve the value from the mock! np.testing.assert_allclose([v.value for v in patch_ei.call_args[0][0]], np.array(values, dtype=float))
def test_get_next_by_random_search(self, patch): def side_effect(size): return [ConfigurationMock()] * size patch.side_effect = side_effect cs = ConfigurationSpace() ei = EI(None) rs = RandomSearch(ei, cs) rval = rs._maximize( runhistory=None, stats=None, num_points=10, _sorted=False ) self.assertEqual(len(rval), 10) for i in range(10): self.assertIsInstance(rval[i][1], ConfigurationMock) self.assertEqual(rval[i][1].origin, 'Random Search') self.assertEqual(rval[i][0], 0)
class InterleavedLocalAndRandomSearch(AcquisitionFunctionMaximizer): """Implements SMAC's default acquisition function optimization. This optimizer performs local search from the previous best points according, to the acquisition function, uses the acquisition function to sort randomly sampled configurations and interleaves unsorted, randomly sampled configurations in between. Parameters ---------- acquisition_function : ~smac.optimizer.acquisition.AbstractAcquisitionFunction config_space : ~smac.configspace.ConfigurationSpace rng : np.random.RandomState or int, optional """ def __init__( self, acquisition_function: AbstractAcquisitionFunction, config_space: ConfigurationSpace, rng: Union[bool, np.random.RandomState] = None, ): super().__init__(acquisition_function, config_space, rng) self.random_search = RandomSearch(acquisition_function, config_space, rng) self.local_search = LocalSearch(acquisition_function, config_space, rng) self.max_acq_value = sys.float_info.min def maximize(self, runhistory: RunHistory, stats: Stats, num_points: int, *args) -> Iterable[Configuration]: next_configs_by_local_search = self.local_search._maximize( runhistory, stats, 10, ) # Get configurations sorted by EI next_configs_by_random_search_sorted = self.random_search._maximize( runhistory, stats, num_points - len(next_configs_by_local_search), _sorted=True, ) # Having the configurations from random search, sorted by their # acquisition function value is important for the first few iterations # of SMAC. As long as the random forest predicts constant value, we # want to use only random configurations. Having them at the begging of # the list ensures this (even after adding the configurations by local # search, and then sorting them) next_configs_by_acq_value = (next_configs_by_random_search_sorted + next_configs_by_local_search) next_configs_by_acq_value.sort(reverse=True, key=lambda x: x[0]) self.logger.debug( "First 10 acq func (origin) values of selected configurations: %s", str([[_[0], _[1].origin] for _ in next_configs_by_acq_value[:10]])) # store the max last expansion (challengers generation) self.max_acq_value = next_configs_by_acq_value[0][0] next_configs_by_acq_value = [_[1] for _ in next_configs_by_acq_value] challengers = ChallengerList(next_configs_by_acq_value, self.config_space) return challengers def _maximize(self, runhistory: RunHistory, stats: Stats, num_points: int) -> Iterable[Tuple[float, Configuration]]: raise NotImplementedError()