def test_grid_search(self): maximizer = GridSearch(self.objective_function, self.lower, self.upper) x = maximizer.maximize() assert x.shape[0] == 1 assert len(x.shape) == 1 assert np.all(x >= self.lower) assert np.all(x <= self.upper)
def test_grid_search(self): maximizer = GridSearch(self.acquisition_func, self.X_lower, self.X_upper) x = maximizer.maximize() assert x.shape[0] == 1 assert x.shape[1] == self.dims assert np.all(x[:, 0] >= self.X_lower[0]) assert np.all(x[:, 0] <= self.X_upper[0]) assert np.all(x < self.X_upper)
# Set the method that we will use to optimize the acquisition function maximizer = GridSearch(acquisition_func, task.X_lower, task.X_upper) # Draw one random point and evaluate it to initialize BO X = np.array([np.random.uniform(task.X_lower, task.X_upper, task.n_dims)]) Y = task.objective_function(X) # This is the main Bayesian optimization loop for i in range(10): # Fit the model on the data we observed so far model.train(X, Y) # Update the acquisition function model with the retrained model acquisition_func.update(model) # Optimize the acquisition function to obtain a new point new_x = maximizer.maximize() # Evaluate the point and add the new observation to our set of previous seen points new_y = task.objective_function(np.array(new_x)) X = np.append(X, new_x, axis=0) Y = np.append(Y, new_y, axis=0) # Visualize the objective function, model and the acquisition function f, (ax1, ax2) = plt.subplots(2, sharex=True) ax1 = plot_objective_function(task.objective_function, task.X_lower, task.X_upper, X, Y, ax1) ax1 = plot_model(model, task.X_lower, task.X_upper, ax1) ax2 = plot_acquisition_function(acquisition_func, task.X_lower, task.X_upper, ax2) plt.show(block=True)