def test_branin(self): branin = Branin() # Check batch computation n_points = 10 X = np.random.rand(n_points, branin.n_dims) X[:, 0] = X[:, 0].dot(branin.X_upper[0] - branin.X_lower[0]) + branin.X_lower[0] X[:, 1] = X[:, 1].dot(branin.X_upper[1] - branin.X_lower[1]) + branin.X_lower[1] y = branin.evaluate(X) assert len(y.shape) == 2 assert y.shape[0] == n_points assert y.shape[1] == 1 # Check single computation X = np.array([np.random.rand(branin.n_dims)]) X[:, 0] = X[:, 0].dot(branin.X_upper[0] - branin.X_lower[0]) + branin.X_lower[0] X[:, 1] = X[:, 1].dot(branin.X_upper[1] - branin.X_lower[1]) + branin.X_lower[1] y = branin.evaluate(X) assert y.shape[0] == 1 # Check optimas X = branin.opt y = branin.evaluate(X) assert np.all(np.round(y, 6) == np.array([branin.fopt]))
class BraninInBillionDims(REMBO): def __init__(self): self.b = Branin() X_lower = np.concatenate((self.b.X_lower, np.zeros([999998]))) X_upper = np.concatenate((self.b.X_upper, np.ones([999998]))) super(BraninInBillionDims, self).__init__(X_lower, X_upper, d=2) def objective_function(self, x): return self.b.objective_function(x[:, :2])
def setUp(self): self.branin = Branin() n_points = 5 rng = np.random.RandomState(42) self.X = init_random_uniform(self.branin.X_lower, self.branin.X_upper, n_points, rng=rng) self.Y = self.branin.evaluate(self.X) kernel = GPy.kern.Matern52(input_dim=self.branin.n_dims) self.model = GPyModel(kernel, optimize=True, noise_variance=1e-4, num_restarts=10) self.model.train(self.X, self.Y) self.acquisition_func = EI(self.model, X_upper=self.branin.X_upper, X_lower=self.branin.X_lower, par=0.1)
class TestMaximizers2D(unittest.TestCase): def setUp(self): self.branin = Branin() n_points = 5 rng = np.random.RandomState(42) self.X = init_random_uniform(self.branin.X_lower, self.branin.X_upper, n_points, rng=rng) self.Y = self.branin.evaluate(self.X) kernel = GPy.kern.Matern52(input_dim=self.branin.n_dims) self.model = GPyModel(kernel, optimize=True, noise_variance=1e-4, num_restarts=10) self.model.train(self.X, self.Y) self.acquisition_func = EI(self.model, X_upper=self.branin.X_upper, X_lower=self.branin.X_lower, par=0.1) def test_direct(self): maximizer = Direct(self.acquisition_func, self.branin.X_lower, self.branin.X_upper) x = maximizer.maximize() assert x.shape[0] == 1 assert x.shape[1] == self.branin.n_dims assert np.all(x[:, 0] >= self.branin.X_lower[0]) assert np.all(x[:, 1] >= self.branin.X_lower[1]) assert np.all(x[:, 0] <= self.branin.X_upper[0]) assert np.all(x[:, 1] <= self.branin.X_upper[1]) assert np.all(x < self.branin.X_upper) def test_stochastic_local_search(self): maximizer = StochasticLocalSearch(self.acquisition_func, self.branin.X_lower, self.branin.X_upper) x = maximizer.maximize() assert x.shape[0] == 1 assert x.shape[1] == self.branin.n_dims assert np.all(x[:, 0] >= self.branin.X_lower[0]) assert np.all(x[:, 1] >= self.branin.X_lower[1]) assert np.all(x[:, 0] <= self.branin.X_upper[0]) assert np.all(x[:, 1] <= self.branin.X_upper[1]) assert np.all(x < self.branin.X_upper) def test_cmaes(self): maximizer = CMAES(self.acquisition_func, self.branin.X_lower, self.branin.X_upper) x = maximizer.maximize() assert x.shape[0] == 1 assert x.shape[1] == self.branin.n_dims assert np.all(x[:, 0] >= self.branin.X_lower[0]) assert np.all(x[:, 1] >= self.branin.X_lower[1]) assert np.all(x[:, 0] <= self.branin.X_upper[0]) assert np.all(x[:, 1] <= self.branin.X_upper[1]) assert np.all(x < self.branin.X_upper)
Created on Jun 23, 2015 @author: Aaron Klein ''' from robo.models.random_forest import RandomForest from robo.acquisition.ei import EI #from robo.maximizers.direct import Direct from robo.maximizers.cmaes import CMAES from robo.task.synthetic_functions.branin import Branin from robo.solver.bayesian_optimization import BayesianOptimization from robo.incumbent.posterior_optimization import PosteriorMeanAndStdOptimization # Specifies the task object that defines the objective functions and # the bounds of the input space branin = Branin() # Instantiate the random forest. Branin does not have any categorical # values thus we pass a np.zero vector here. model = RandomForest(branin.types) # Define the acquisition function acquisition_func = EI(model, X_upper=branin.X_upper, X_lower=branin.X_lower, par=0.1) # Strategy of estimating the incumbent rec = PosteriorMeanAndStdOptimization(model, branin.X_lower, branin.X_upper,
ls_sample = np.array([ self.tophat.sample_from_prior(n_samples)[:, 0] for _ in range(1, (self.n_dims - 1)) ]).T p0[:, 1:(self.n_dims - 1)] = ls_sample # Noise p0[:, -1] = self.horseshoe.sample_from_prior(n_samples)[:, 0] return p0 burnin = 100 chain_length = 200 n_hypers = 20 task = Branin() cov_amp = 1.0 config_kernel = george.kernels.Matern52Kernel(np.ones([task.n_dims]), ndim=task.n_dims) kernel = cov_amp * config_kernel prior = MyPrior(len(kernel) + 1) model = GaussianProcessMCMC(kernel, prior=prior, burnin=burnin, chain_length=chain_length, n_hypers=n_hypers)
def __init__(self): self.b = Branin() X_lower = np.concatenate((self.b.X_lower, np.zeros([999998]))) X_upper = np.concatenate((self.b.X_upper, np.ones([999998]))) super(BraninInBillionDims, self).__init__(X_lower, X_upper, d=2)