def test_hyperband_api(): import random params = Space.from_dict({'a': 'uniform(0, 1)'}) hpo = Hyperband(Fidelity(0, 100, 10, 'epochs'), params) assert not hpo.is_done() for rung in range(3): params_set = hpo.suggest() for i, params in enumerate(params_set): print(i, params) try: hpo.suggest() raise RuntimeError() except WaitingForTrials: pass except OptimizationIsDone: pass for i, params in enumerate(params_set): v = random.uniform(0, 1) if i == len(params_set) - 1: v = 1e-10 hpo.observe(params, v) print('-------') assert hpo.is_done() print(hpo.result()) print(hpo.info())
def run(uri, database, namespace, function, fidelity, space, count, variables, plot_filename, objective, defaults, save_dir='.', sleep_time=60, register=True): if fidelity is None: fidelity = Fidelity(1, 1, name='epoch').to_dict() defaults.update(variables) config = { 'name': 'random_search', 'fidelity': fidelity, 'space': space, 'count': count } client = new_client(uri, database) if not is_registered(client, namespace) and register: register_hpo(client, namespace, function, config, defaults=defaults) while not is_hpo_completed(client, namespace): print_status(client, namespace) time.sleep(sleep_time) # get the result of the HPO print(f'HPO is done') data = fetch_hpo_valid_curves(client, namespace, list(sorted(variables.keys()))) save_results(namespace, data, save_dir) plot(space, objective, data, plot_filename, model_seed=1)
def build_robo(model_type, n_init=2, count=5): params = {'x': 'uniform(-5, 10)', 'y': 'uniform(0, 15)'} return HPOptimizer('robo', fidelity=Fidelity(0, 100, 10, 'epoch').to_dict(), space=params, model_type=model_type, count=count, n_init=n_init)
def test_hyperband_simple_sequential(): import random params = Space.from_dict({'a': 'uniform(0, 1)'}) hpo = Hyperband(Fidelity(0, 100, 10, 'epochs'), params) for params in hpo: hpo.observe(params, result=random.uniform(0, 1)) assert hpo.is_done() print(hpo.result()) print(hpo.info())
def check(): def add(a, b, **kwargs): return a + b hpo = HPOptimizer('hyperband', fidelity=Fidelity(1, 30, 2), space={ 'a': 'uniform(0, 1)', 'b': 'uniform(0, 1)' }) while not hpo.is_done(): for args in hpo: # try a new configuration result = add(**args) # forward the result to the optimizer hpo.observe(args, result) print(hpo.result())
import pytest import logging import os from olympus.utils.log import set_log_level set_log_level(logging.DEBUG) from olympus.hpo.worker import HPOWorkGroup, make_remote_call from olympus.utils.testing import my_trial from olympus.hpo.fidelity import Fidelity from olympus.hpo import HPOptimizer FIDELITY = Fidelity(1, 30, 10).to_dict() def run_nomad_hpo(hpo_name, uri, launch_server=True, fidelity=FIDELITY, group='nomad', workers=10): """Worker are converted to HPO when new trials are needed then killed""" with HPOWorkGroup(uri, 'olympus', f'classification-{group}-1', clean=True, launch_server=launch_server) as group: group.launch_workers(workers) params = { 'a': 'uniform(0, 1)',
import numpy import pytest from sspace import Space from robo.fmin import bayesian_optimization from olympus.hpo import HPOptimizer from olympus.hpo.fidelity import Fidelity from olympus.hpo.robo import RoBO, build_bounds from olympus.hpo.optimizer import WaitingForTrials FIDELITY = Fidelity(0, 100, 10, 'epoch') def branin(x, y, epoch=0, uid=None, other=None, experiment_name=None, client=None): b = (5.1 / (4.*numpy.pi**2)) c = (5. / numpy.pi) t = (1. / (8.*numpy.pi)) return 1.*(y-b*x**2+c*x-6.)**2+10.*(1-t)*numpy.cos(x)+10. def branin_for_original_robo(x): return branin(x[0], x[1]) def get_robo_results(count): lower = numpy.array([-5, 0]) upper = numpy.array([10, 15])