def test_init(self): #test default parameters opt = SimpleBayesianOptimizer(None) assert_equal(opt.initial_random_runs, 10) assert_is_none(opt.acquisition_hyperparams) assert_equal(opt.num_gp_restarts, 10) assert_true(isinstance(opt.acquisition_function, ExpectedImprovement)) assert_dict_equal(opt.kernel_params, {}) assert_equal(opt.kernel, "matern52") assert_false(opt.mcmc) assert_equal(opt.num_precomputed, 10) #test correct initialization opt_arguments = { "initial_random_runs": 5, "acquisition_hyperparams": {}, "num_gp_restarts": 5, "acquisition": ProbabilityOfImprovement, "kernel_params": {}, "kernel": "matern52", "mcmc": True, "num_precomputed": 5 } opt = SimpleBayesianOptimizer(opt_arguments) assert_equal(opt.initial_random_runs, 5) assert_dict_equal(opt.acquisition_hyperparams, {}) assert_equal(opt.num_gp_restarts, 5) assert_true(isinstance(opt.acquisition_function, ProbabilityOfImprovement)) assert_dict_equal(opt.kernel_params, {}) assert_equal(opt.kernel, "matern52") if import_if_exists("pymcmc")[0]: assert_true(opt.mcmc) else: assert_false(opt.mcmc) assert_equal(opt.num_precomputed, 5)
__author__ = 'Frederik Diehl' from apsis.optimizers.optimizer import Optimizer from apsis.optimizers.random_search import RandomSearch from apsis.models.parameter_definition import * from apsis.utilities.randomization import check_random_state from apsis.models.candidate import Candidate from apsis.optimizers.bayesian.acquisition_functions import * from apsis.utilities.import_utils import import_if_exists import GPy import logging mcmc_imported, pm = import_if_exists("pymcmc") class SimpleBayesianOptimizer(Optimizer): """ This implements a simple bayesian optimizer. It is simple because it only implements the simplest form - no freeze-thaw, (currently) no multiple workers, only numeric parameters. Attributes ---------- SUPPORTED_PARAM_TYPES : list of ParamDefs The supported parameter types. Currently only numberic and position. kernel : GPy Kernel The Kernel to be used with the gp. acquisition_function : acquisition_function The acquisition function to use acquisition_hyperparams :
__author__ = 'Frederik Diehl' from apsis.optimizers.optimizer import Optimizer from apsis.optimizers.random_search import RandomSearch from apsis.models.parameter_definition import * from apsis.utilities.randomization import check_random_state from apsis.models.candidate import Candidate from apsis.optimizers.bayesian.acquisition_functions import * from apsis.utilities.import_utils import import_if_exists import GPy import logging mcmc_imported, pm = import_if_exists("pymcmc") class SimpleBayesianOptimizer(Optimizer): """ This implements a simple bayesian optimizer. It is simple because it only implements the simplest form - no freeze-thaw, (currently) no multiple workers, only numeric parameters. Attributes ---------- SUPPORTED_PARAM_TYPES : list of ParamDefs The supported parameter types. Currently only numberic and position. kernel : GPy Kernel The Kernel to be used with the gp. acquisition_function : acquisition_function The acquisition function to use acquisition_hyperparams : The acquisition hyperparameters.