def test_seeding_noisy_grid_search(space): """Verify that seeding have effect at init""" optimizer = PrimaryAlgo(space, {'noisygridsearch': {'seed': 1}}) a = optimizer.suggest(1)[0] assert not numpy.allclose(a, optimizer.suggest(1)[0]) optimizer = PrimaryAlgo(space, {'noisygridsearch': {'seed': 1}}) assert numpy.allclose(a, optimizer.suggest(1)[0]) optimizer = PrimaryAlgo(space, {'noisygridsearch': {'seed': 2}}) assert not numpy.allclose(a, optimizer.suggest(1)[0])
def test_seeding(space): """Verify that seeding makes sampling deterministic""" tpe_optimizer = PrimaryAlgo(space, 'tpeoptimizer') tpe_optimizer.seed_rng(1) a = tpe_optimizer.suggest(1)[0] assert not numpy.allclose(a, tpe_optimizer.suggest(1)[0]) tpe_optimizer.seed_rng(1) assert numpy.allclose(a, tpe_optimizer.suggest(1)[0])
def test_seeding(space, algo): """Verify that seeding after init have no effects""" optimizer = PrimaryAlgo(space, algo) optimizer.seed_rng(1) a = optimizer.suggest(1)[0] assert not numpy.allclose(a, optimizer.suggest(1)[0]) optimizer.seed_rng(1) assert not numpy.allclose(a, optimizer.suggest(1)[0])
def test_seeding(space): """Verify that seeding makes sampling deterministic""" optimizer = PrimaryAlgo(space, 'hyperband') optimizer.seed_rng(1) a = optimizer.suggest(1)[0] assert not numpy.allclose(a, optimizer.suggest(1)[0]) optimizer.seed_rng(1) assert numpy.allclose(a, optimizer.suggest(1)[0])
def test_seeding(space): """Verify that seeding makes sampling deterministic""" bayesian_optimizer = PrimaryAlgo(space, 'bayesianoptimizer') bayesian_optimizer.seed_rng(1) a = bayesian_optimizer.suggest(1)[0] assert not numpy.allclose(a, bayesian_optimizer.suggest(1)[0]) bayesian_optimizer.seed_rng(1) assert numpy.allclose(a, bayesian_optimizer.suggest(1)[0])
def test_seed_rng(space): """Test that algo is seeded properly""" optimizer = PrimaryAlgo(space, 'hyperband') optimizer.seed_rng(1) a = optimizer.suggest(1) # Hyperband will always return the full first rung assert numpy.allclose(a, optimizer.suggest(1)) optimizer.seed_rng(2) assert not numpy.allclose(a, optimizer.suggest(1))
def test_seeding(space): """Verify that seeding makes sampling deterministic""" optimizer = PrimaryAlgo(space, "meshadaptivedirectsearch") optimizer.seed_rng(1) a = optimizer.suggest(1)[0] with pytest.raises(AssertionError): numpy.testing.assert_equal(a, optimizer.suggest(1)[0]) optimizer.seed_rng(1) numpy.testing.assert_equal(a, optimizer.suggest(1)[0])
def test_set_state(space): """Verify that resetting state makes sampling deterministic""" optimizer = PrimaryAlgo(space, 'hyperband') optimizer.seed_rng(1) state = optimizer.state_dict a = optimizer.suggest(1)[0] assert not numpy.allclose(a, optimizer.suggest(1)[0]) optimizer.set_state(state) assert numpy.allclose(a, optimizer.suggest(1)[0])
def test_set_state(space): """Verify that resetting state makes sampling deterministic""" bayesian_optimizer = PrimaryAlgo(space, 'bayesianoptimizer') bayesian_optimizer.seed_rng(1) state = bayesian_optimizer.state_dict a = bayesian_optimizer.suggest(1)[0] assert not numpy.allclose(a, bayesian_optimizer.suggest(1)[0]) bayesian_optimizer.set_state(state) assert numpy.allclose(a, bayesian_optimizer.suggest(1)[0])
def _instantiate_algo(space, max_trials, config=None, ignore_unavailable=False): """Instantiate the algorithm object Parameters ---------- config: dict, optional Configuration of the algorithm. If None or empty, system's defaults are used (orion.core.config.experiment.algorithms). ignore_unavailable: bool, optional If True and algorithm is not available (plugin not installed), return the configuration. Otherwise, raise Factory error from PrimaryAlgo """ if not config: config = orion.core.config.experiment.algorithms try: algo = PrimaryAlgo(space, config) algo.algorithm.max_trials = max_trials except NotImplementedError as e: if not ignore_unavailable: raise e log.warning(str(e)) log.warning("Algorithm will not be instantiated.") algo = config return algo
def _instantiate_config(self, config): """Check before dispatching experiment whether configuration corresponds to a executable experiment environment. 1. Check `refers` and instantiate `Adapter` objects from it. 2. Try to build parameter space from user arguments. 3. Check whether configured algorithms correspond to [known]/valid implementations of the ``Algorithm`` class. Instantiate these objects. 4. Check if experiment `is_done`, prompt for larger `max_trials` if it is. (TODO) """ # Just overwrite everything else given for section, value in config.items(): if section not in self.__slots__: log.info( "Found section '%s' in configuration. Experiments " "do not support this option. Ignoring.", section) continue if section.startswith('_'): log.info( "Found section '%s' in configuration. " "Cannot set private attributes. Ignoring.", section) continue # Copy sub configuration to value confusing side-effects # Only copy at this level, not `config` directly to avoid TypeErrors if config contains # non-serializable objects (copy.deepcopy complains otherwise). if isinstance(value, dict): value = copy.deepcopy(value) setattr(self, section, value) try: space_builder = SpaceBuilder() space = space_builder.build_from(config['metadata']['user_args']) if not space: raise ValueError( "Parameter space is empty. There is nothing to optimize.") # Instantiate algorithms self.algorithms = PrimaryAlgo(space, self.algorithms) except KeyError: pass if self.refers and not isinstance(self.refers.get('adapter'), BaseAdapter): self.refers['adapter'] = Adapter.build(self.refers['adapter']) if not self.producer.get('strategy'): self.producer = { 'strategy': Strategy(of_type="MaxParallelStrategy") } elif not isinstance(self.producer.get('strategy'), BaseParallelStrategy): self.producer = { 'strategy': Strategy(of_type=self.producer['strategy']) }
class BayesianOptimizer: def __init__(self, space, max_trials, seed, **kwargs): self.primary = PrimaryAlgo(space, {'BayesianOptimizer': kwargs}) self.primary.algorithm.random_state = seed self.max_trials = max_trials self.trial_count = 0 @property def space(self): return self.primary.space def is_completed(self): return self.trial_count >= self.max_trials def get_params(self, seed=None): if seed is None: seed = random.randint(0, 100000) self.primary.algorithm._init_optimizer() optimizer = self.primary.algorithm.optimizer optimizer.rng.seed(seed) # Giving the same seed could be problematic since optimizer.rng and # optimizer.base_estimator.rng would be synchronized and sample the same values. optimizer.base_estimator_.random_state = optimizer.rng.randint( 0, 100000) params = unflatten( dict(zip(self.space.keys(), self.primary.suggest()[0]))) logger.debug('Sampling:\n{}'.format(pprint.pformat(params))) return params def observe(self, params, objective): params = flatten(params) params = [[params[param_name] for param_name in self.space.keys()]] results = [dict(objective=objective)] self.primary.observe(params, results)
def palgo(dumbalgo, space, fixed_suggestion): """Set up a PrimaryAlgo with dumb configuration.""" algo_config = { 'DumbAlgo': { 'value': fixed_suggestion, 'subone': { 'DumbAlgo': dict(value=6, scoring=5) } } } palgo = PrimaryAlgo(space, algo_config) return palgo
def palgo(dumbalgo, space, fixed_suggestion): """Set up a PrimaryAlgo with dumb configuration.""" algo_config = { "DumbAlgo": { "value": fixed_suggestion, "subone": { "DumbAlgo": dict(value=6, scoring=5) }, } } palgo = PrimaryAlgo(space, algo_config) return palgo
def _instantiate_algo(space, config): """Instantiate the algorithm object Parameters ---------- config: dict, optional Configuration of the strategy. If None of empty, system's defaults are used (orion.core.config.experiment.algorithms). """ if not config: config = orion.core.config.experiment.algorithms return PrimaryAlgo(space, config)
def test_deltas_noisy_grid_search(monkeypatch, space): """Verify that deltas are applied properly""" deltas = {'yolo1': 3, 'yolo2': 1} class Dummy(): def __init__(self, seed): pass def uniform(self, a, b, size): return numpy.ones(size) monkeypatch.setattr('numpy.random.RandomState', Dummy) config = {'seed': 3, 'deltas': deltas, 'n_points': 2} optimizer = PrimaryAlgo(space, {'noisygridsearch': config}) a = optimizer.suggest(4) assert a[0][0] == -3 + deltas['yolo1'] / 2 assert a[0][1] == numpy.exp(numpy.log(1) + deltas['yolo2'] / 2) assert a[1][0] == -3 + deltas['yolo1'] / 2 assert a[1][1] == numpy.exp(numpy.log(10) + deltas['yolo2'] / 2) assert a[2][0] == 3 + deltas['yolo1'] / 2 assert a[2][1] == numpy.exp(numpy.log(1) + deltas['yolo2'] / 2) assert a[3][0] == 3 + deltas['yolo1'] / 2 assert a[3][1] == numpy.exp(numpy.log(10) + deltas['yolo2'] / 2)
class TPEOptimizer: def __init__(self, space, max_trials, seed, **kwargs): self.primary = PrimaryAlgo(space, {'TPEOptimizer': kwargs}) self.primary.algorithm.random_state = seed self.max_trials = max_trials self.trial_count = 0 @property def space(self): return self.primary.space def is_completed(self): return self.trial_count >= self.max_trials def get_params(self, seed): if seed is None: seed = random.randint(0, 100000) self.primary.algorithm.study.sampler.rng.seed(seed) self.primary.algorithm.study.sampler.random_sampler.rng.seed(seed) params = unflatten( dict(zip(self.space.keys(), self.primary.suggest()[0]))) logger.debug('Sampling:\n{}'.format(pprint.pformat(params))) return params def observe(self, params, objective): params = flatten(params) params = [[params[param_name] for param_name in self.space.keys()]] results = [dict(objective=objective)] self.primary.observe(params, results)
def _instantiate_config(self, config): """Check before dispatching experiment whether configuration corresponds to a executable experiment environment. 1. Check `refers` and instantiate `Experiment` objects from it. (TODO) 2. Try to build parameter space from user arguments. 3. Check whether configured algorithms correspond to [known]/valid implementations of the ``Algorithm`` class. Instantiate these objects. 4. Check if experiment `is_done`, prompt for larger `max_trials` if it is. (TODO) """ # Just overwrite everything else given for section, value in config.items(): if section == 'status': continue if section not in self.__slots__: log.warning( "Found section '%s' in configuration. Experiments " "do not support this option. Ignoring.", section) continue if section.startswith('_'): log.warning( "Found section '%s' in configuration. " "Cannot set private attributes. Ignoring.", section) continue setattr(self, section, value) try: space = SpaceBuilder().build_from(config['metadata']['user_args']) if not space: raise ValueError( "Parameter space is empty. There is nothing to optimize.") # Instantiate algorithms self.algorithms = PrimaryAlgo(space, self.algorithms) except KeyError: pass
def algorithm(space): """Build a dumb algo object""" return PrimaryAlgo(space, 'dumbalgo')
def __init__(self, space, max_trials, seed, **kwargs): self.primary = PrimaryAlgo(space, {'TPEOptimizer': kwargs}) self.primary.algorithm.random_state = seed self.max_trials = max_trials self.trial_count = 0
def test_set_state(space): """Test that state is reset properly""" optimizer = PrimaryAlgo(space, 'hyperband') optimizer.seed_rng(1) state = optimizer.state_dict points = optimizer.suggest(1) # Hyperband will always return the full first rung assert numpy.allclose(points, optimizer.suggest(1)) optimizer.seed_rng(2) assert not numpy.allclose(points, optimizer.suggest(1)) optimizer.set_state(state) assert numpy.allclose(points, optimizer.suggest(1))