def evaluate(mth, run_i, seed): print(mth, run_i, seed, '===== start =====', flush=True) def objective_function(config): y = problem.evaluate_config(config) res = dict() res['config'] = config res['objs'] = (y, ) res['constraints'] = None return res bo = SMBO(objective_function, cs, sample_strategy='random', init_strategy='random', max_runs=max_runs, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate() global_time = time.time() - global_start_time print(seed, i, objs, config, trial_state, trial_info, 'time=', global_time) config_list.append(config) perf_list.append(objs[0]) time_list.append(global_time) return config_list, perf_list, time_list
def evaluate(mth, run_i, seed): print(mth, run_i, seed, '===== start =====', flush=True) def objective_function(config): res = problem.evaluate_config(config) res['config'] = config return res bo = SMBO(objective_function, cs, num_objs=problem.num_objs, num_constraints=0, surrogate_type=surrogate_type, # default: gp acq_type=acq_type, # default: ehvi acq_optimizer_type=acq_optimizer_type, # default: random_scipy initial_runs=initial_runs, # default: 2 * (problem.dim + 1) init_strategy=init_strategy, # default: sobol max_runs=max_runs, ref_point=problem.ref_point, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() hv_diffs = [] config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate() global_time = time.time() - global_start_time print(seed, i, objs, config, trial_state, trial_info, 'time=', global_time) hv = Hypervolume(problem.ref_point).compute(bo.get_history().get_pareto_front()) hv_diff = problem.max_hv - hv print(seed, i, 'hypervolume =', hv) print(seed, i, 'hv diff =', hv_diff) hv_diffs.append(hv_diff) config_list.append(config) perf_list.append(objs) time_list.append(global_time) pf = np.asarray(bo.get_history().get_pareto_front()) # plot for debugging if plot_mode == 1: Y_init = None plot_pf(problem, problem_str, mth, pf, Y_init) return hv_diffs, pf, config_list, perf_list, time_list
def evaluate(mth, run_i, seed): print(mth, run_i, seed, '===== start =====', flush=True) def objective_function(config): y = problem.evaluate_config(config) return y bo = SMBO( objective_function, cs, num_constraints=num_constraints, surrogate_type=surrogate_type, # default: gp acq_optimizer_type=acq_optimizer_type, # default: random_scipy initial_runs=initial_runs, # default: 3 init_strategy=init_strategy, # default: random_explore_first max_runs=max_runs + initial_runs, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate() global_time = time.time() - global_start_time print(seed, i, objs, config, trial_state, trial_info, 'time=', global_time) # if any(c > 0 for c in constraints): # objs = np.max(self.perfs, axis=0) if self.perfs else objs config_list.append(config) perf_list.append(objs[0]) time_list.append(global_time) _perf_list = [] for i, c in enumerate(bo.config_advisor.constraint_perfs[0]): if c > 0: _perf_list.append(9999999) else: _perf_list.append(perf_list[i]) return config_list, _perf_list, time_list
def evaluate(problem, seed): def objective_function(config): y = problem.evaluate_config(config) res = dict() res['config'] = config res['objs'] = (y, ) res['constraints'] = None return res from litebo.optimizer.generic_smbo import SMBO bo = SMBO( objective_function, cs, surrogate_type=surrogate_type, # default: prf acq_optimizer_type=acq_optimizer_type, # default: local_random initial_runs=initial_runs, # default: 3 init_strategy=init_strategy, # default: random_explore_first max_runs=max_runs, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate() global_time = time.time() - global_start_time print(seed, i, objs, config, trial_state, trial_info, 'time=', global_time) config_list.append(config) perf_list.append(objs[0]) time_list.append(global_time) if global_time >= runtime_limit: break return config_list, perf_list, time_list
def evaluate(mth, run_i, seed): print(mth, run_i, seed, '===== start =====', flush=True) def objective_function(config): y = problem.evaluate_config(config) return y bo = SMBO(objective_function, cs, num_constraints=num_constraints, sample_strategy='random', init_strategy='random', max_runs=max_runs, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate() global_time = time.time() - global_start_time print(seed, i, objs, config, trial_state, trial_info, 'time=', global_time) config_list.append(config) perf_list.append(objs[0]) time_list.append(global_time) _perf_list = [] for i, c in enumerate(bo.config_advisor.constraint_perfs[0]): if c > 0: _perf_list.append(9999999) else: _perf_list.append(perf_list[i]) return config_list, _perf_list, time_list
import os import sys import numpy as np import matplotlib.pyplot as plt sys.path.insert(0, os.getcwd()) from litebo.optimizer.generic_smbo import SMBO from litebo.benchmark.objective_functions.synthetic import BraninCurrin prob = BraninCurrin() bo = SMBO(prob.evaluate, prob.config_space, advisor_type='mcadvisor', task_id='mcehvi', num_objs=prob.num_objs, num_constraints=prob.num_constraints, acq_type='mcehvi', ref_point=prob.ref_point, max_runs=100, random_state=2) bo.run() hvs = bo.get_history().hv_data log_hv_diff = np.log10(prob.max_hv - np.asarray(hvs)) pf = np.asarray(bo.get_history().get_pareto_front()) plt.scatter(pf[:, 0], pf[:, 1]) # plt.plot(log_hv_diff) # plt.show()
c = 5. / np.pi r = 6. s = 10. t = 1. / (8. * np.pi) ret = a * (x2 - b * x1**2 + c * x1 - r)**2 + s * (1 - t) * np.cos(x1) + s return {'objs': (ret, )} cs = ConfigurationSpace() x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0) x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0) cs.add_hyperparameters([x1, x2]) seed = np.random.randint(100) # random search # bo = SMBO(branin, cs, sample_strategy='random', max_runs=100, task_id='mcei', random_state=seed) bo = SMBO(branin, cs, advisor_type='mcadvisor', acq_type='mcei', mc_times=10, max_runs=50, task_id='mcei', random_state=seed) bo.run() inc_value = bo.get_incumbent() print('BO', '=' * 30) print(inc_value)
import os import sys import numpy as np import matplotlib.pyplot as plt sys.path.insert(0, os.getcwd()) from litebo.optimizer.generic_smbo import SMBO from litebo.benchmark.objective_functions.synthetic import BraninCurrin prob = BraninCurrin(constrained=True) bo = SMBO(prob.evaluate, prob.config_space, advisor_type='mcadvisor', task_id='mccmo', num_objs=prob.num_objs, num_constraints=prob.num_constraints, acq_type='mcparegoc', ref_point=prob.ref_point, max_runs=100, random_state=2) bo.run() hvs = bo.get_history().hv_data log_hv_diff = np.log10(prob.max_hv - np.asarray(hvs)) plt.plot(log_hv_diff) plt.show()
res = dict() f1 = (px2 - 5.1/(4*np.pi**2) * px1**2 + 5/np.pi * px1 - 6)**2 + 10 * (1 - 1/(8*np.pi)) * np.cos(px1) + 10 f2 = (1 - np.exp(-1/(2*x2))) * (2300 * x1**3 + 1900 * x1**2 + 2092 * x1 + 60) / (100 * x1**3 + 500 * x1**2 + 4*x1 + 20) res['objs'] = [f1, f2] res['constraints'] = [] return res bc_params = { 'float': { 'x1': (0, 1, 0.5), 'x2': (0, 1, 0.5) } } bc_cs = ConfigurationSpace() bc_cs.add_hyperparameters([UniformFloatHyperparameter(e, *bc_params['float'][e]) for e in bc_params['float']]) bc_max_hv = 59.36011874867746 bc_ref_point = [18., 6.] bo = SMBO(branin_currin, bc_cs, task_id='parego', num_objs=2, acq_type='parego', ref_point=bc_ref_point, max_runs=100) bo.run() hvs = bo.get_history().hv_data log_hv_diff = np.log10(bc_max_hv - np.asarray(hvs))
x2 = xs['x2'] a = 1. b = 5.1 / (4. * np.pi**2) c = 5. / np.pi r = 6. s = 10. t = 1. / (8. * np.pi) ret = a * (x2 - b * x1**2 + c * x1 - r)**2 + s * (1 - t) * np.cos(x1) + s return {'objs': (ret, )} cs = ConfigurationSpace() x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=0) x2 = UniformFloatHyperparameter("x2", 0, 15, default_value=0) cs.add_hyperparameters([x1, x2]) bo = SMBO(branin, cs, advisor_type='default', max_runs=50, time_limit_per_trial=3, task_id='hp1') bo.run() inc_value = bo.get_incumbent() print('BO', '=' * 30) print(inc_value) # Execute the following command: # Step 1: `python test/visualization/visualize_smbo.py` # Step 2: `tensorboard --logdir logs/hp1`
import numpy as np from litebo.optimizer.generic_smbo import SMBO from litebo.utils.config_space import ConfigurationSpace, UniformFloatHyperparameter def townsend(config): X = np.array(list(config.get_dictionary().values())) res = dict() res['objs'] = (-(np.cos( (X[0] - 0.1) * X[1])**2 + X[0] * np.sin(3 * X[0] + X[1])), ) res['constraints'] = (-(-np.cos(1.5 * X[0] + np.pi) * np.cos(1.5 * X[1]) + np.sin(1.5 * X[0] + np.pi) * np.sin(1.5 * X[1])), ) return res townsend_params = {'float': {'x1': (-2.25, 2.5, 0), 'x2': (-2.5, 1.75, 0)}} townsend_cs = ConfigurationSpace() townsend_cs.add_hyperparameters([ UniformFloatHyperparameter(e, *townsend_params['float'][e]) for e in townsend_params['float'] ]) bo = SMBO(townsend, townsend_cs, num_constraints=1, acq_optimizer_type='random_scipy', max_runs=60, task_id='smbo_eic') bo.run()
num_inputs = setup['num_inputs'] num_objs = setup['num_objs'] referencePoint = setup['referencePoint'] real_hv = setup['real_hv'] time_limit_per_trial = 2 * setup['time_limit'] _x, _y = load_data(dataset, data_dir) multi_objective_func = partial(multi_objective_func, x=_x, y=_y) # Evaluate mth bo = SMBO( multi_objective_func, cs, num_objs=num_objs, max_runs=max_runs, # surrogate_type='gp_rbf', # use default acq_type=mth, # initial_configurations=X_init, initial_runs=10, time_limit_per_trial=time_limit_per_trial, task_id='mo', random_state=seed) bo.config_advisor.optimizer.random_chooser.prob = rand_prob # set rand_prob, default 0 bo.config_advisor.acquisition_function.sample_num = sample_num # set sample_num bo.config_advisor.acquisition_function.random_state = seed # set random_state bo.config_advisor.optimizer.num_mc = 10000 # MESMO optimizer only bo.config_advisor.optimizer.num_opt = 10 # MESMO optimizer only print(mth, '===== start =====') # bo.run() hv_diffs = [] for i in range(max_runs): config, trial_state, objs, trial_info = bo.iterate()
def evaluate(mth, run_i, seed): print(mth, run_i, seed, '===== start =====', flush=True) def objective_function(config): res = problem.evaluate_config(config) res['config'] = config res['objs'] = np.asarray(res['objs']).tolist() res['constraints'] = np.asarray(res['constraints']).tolist() return res bo = SMBO( objective_function, cs, num_objs=problem.num_objs, num_constraints=problem.num_constraints, surrogate_type=surrogate_type, # default: gp acq_type=acq_type, # default: ehvic acq_optimizer_type=acq_optimizer_type, # default: random_scipy initial_runs=initial_runs, # default: 2 * (problem.dim + 1) init_strategy=init_strategy, # default: sobol max_runs=max_runs, ref_point=problem.ref_point, time_limit_per_trial=time_limit_per_trial, task_id=task_id, random_state=seed) # bo.run() hv_diffs = [] config_list = [] perf_list = [] time_list = [] global_start_time = time.time() for i in range(max_runs): config, trial_state, origin_objs, trial_info = bo.iterate() global_time = time.time() - global_start_time constraints = [ bo.config_advisor.constraint_perfs[i][-1] for i in range(problem.num_constraints) ] if any(c > 0 for c in constraints): objs = [9999999.0] * problem.num_objs else: objs = origin_objs print(seed, i, origin_objs, objs, constraints, config, trial_state, trial_info, 'time=', global_time) assert len(bo.config_advisor.constraint_perfs[0] ) == i + 1 # make sure no repeat or failed config config_list.append(config) perf_list.append(objs) time_list.append(global_time) hv = Hypervolume(problem.ref_point).compute(perf_list) hv_diff = problem.max_hv - hv hv_diffs.append(hv_diff) print(seed, i, 'hypervolume =', hv) print(seed, i, 'hv diff =', hv_diff) pf = np.asarray(bo.get_history().get_pareto_front()) # plot for debugging if plot_mode == 1: Y_init = None plot_pf(problem, problem_str, mth, pf, Y_init) return hv_diffs, pf, config_list, perf_list, time_list
return res bc_params = {'float': {'x1': (0, 1, 0.5), 'x2': (0, 1, 0.5)}} bc_cs = ConfigurationSpace() bc_cs.add_hyperparameters([ UniformFloatHyperparameter(e, *bc_params['float'][e]) for e in bc_params['float'] ]) bc_max_hv = 59.36011874867746 bc_ref_point = [18., 6.] bo = SMBO(branin_currin, bc_cs, advisor_type='mcadvisor', task_id='mcparego', num_objs=2, acq_type='mcparego', ref_point=bc_ref_point, max_runs=100, random_state=2) bo.run() hvs = bo.get_history().hv_data log_hv_diff = np.log10(bc_max_hv - np.asarray(hvs)) import matplotlib.pyplot as plt plt.plot(log_hv_diff) # plt.savefig('plt.pdf') plt.show()
import numpy as np from litebo.optimizer.generic_smbo import SMBO from litebo.utils.config_space import ConfigurationSpace, UniformFloatHyperparameter def townsend(config): X = np.array(list(config.get_dictionary().values())) res = dict() res['objs'] = (-(np.cos( (X[0] - 0.1) * X[1])**2 + X[0] * np.sin(3 * X[0] + X[1])), ) res['constraints'] = (-(-np.cos(1.5 * X[0] + np.pi) * np.cos(1.5 * X[1]) + np.sin(1.5 * X[0] + np.pi) * np.sin(1.5 * X[1])), ) return res townsend_params = {'float': {'x1': (-2.25, 2.5, 0), 'x2': (-2.5, 1.75, 0)}} townsend_cs = ConfigurationSpace() townsend_cs.add_hyperparameters([ UniformFloatHyperparameter(e, *townsend_params['float'][e]) for e in townsend_params['float'] ]) bo = SMBO(townsend, townsend_cs, advisor_type='mcadvisor', acq_type='mceic', num_constraints=1, max_runs=60, task_id='mceic') bo.run()
from litebo.benchmark.objective_functions.synthetic import * from litebo.optimizer.generic_smbo import SMBO problem = Bukin() bo = SMBO(problem.evaluate, problem.config_space, surrogate_type='gp', initial_runs=10, max_runs=60, task_id='bo') bo.run() c_problem = Ackley(constrained=True) cbo = SMBO(c_problem.evaluate, c_problem.config_space, num_constraints=2, surrogate_type='gp', initial_runs=10, max_runs=110, task_id='cbo') cbo.run() cbor = SMBO(c_problem.evaluate, c_problem.config_space, num_constraints=2, sample_strategy='random', initial_runs=10, max_runs=110, task_id='c_random', random_state=trial_id)
num_inputs = 2 num_objs = 2 prob = CONSTR() acq_optimizer_type = 'random_scipy' seed = 1 initial_runs = 2 * (num_inputs + 1) max_runs = 100 + initial_runs bo = SMBO(prob.evaluate, prob.config_space, task_id='ehvic', num_objs=prob.num_objs, num_constraints=prob.num_constraints, acq_type='ehvic', acq_optimizer_type=acq_optimizer_type, surrogate_type='gp', ref_point=prob.ref_point, max_runs=max_runs, initial_runs=initial_runs, init_strategy='sobol', random_state=seed) bo.run() hvs = bo.get_history().hv_data pf = np.asarray(bo.get_history().get_pareto_front()) if pf.shape[-1] == 2: plt.scatter(pf[:, 0], pf[:, 1]) elif pf.shape[-1] == 3: ax = plt.axes(projection='3d')
from litebo.optimizer.generic_smbo import SMBO from litebo.benchmark.objective_functions.synthetic import Ackley num_inputs = 10 acq_optimizer_type = 'random_scipy' seed = 1 prob = Ackley(dim=num_inputs, constrained=False) initial_runs = 2 * (num_inputs + 1) max_runs = 250 bo = SMBO(prob.evaluate, prob.config_space, task_id='turbo', advisor_type='mcadvisor', num_objs=prob.num_objs, num_constraints=prob.num_constraints, acq_type='mcei', acq_optimizer_type=acq_optimizer_type, use_trust_region=True, surrogate_type='gp', max_runs=max_runs, initial_runs=initial_runs, init_strategy='latin_hypercube', random_state=seed) bo.run() values = list(bo.get_history().data.values()) plt.plot(values) plt.show()