def test_acquisition_gradient_cookbook(): rng = np.random.RandomState(0) X = rng.randn(20, 5) y = rng.randn(20) X_new = rng.randn(5) gpr = cook_estimator("GP", Space(((-5.0, 5.0), )), random_state=0) gpr.fit(X, y) for acq_func in ["LCB", "PI", "EI"]: check_gradient_correctness(X_new, gpr, acq_func, np.max(y))
def test_acquisition_per_second_gradient(acq_func): rng = np.random.RandomState(0) X = rng.randn(20, 10) # Make the second component large, so that mean_grad and std_grad # do not become zero. y = np.vstack((X[:, 0], np.abs(X[:, 0])**3)).T for X_new in [rng.randn(10), rng.randn(10)]: gpr = cook_estimator("GP", Space(((-5.0, 5.0), )), random_state=0) mor = MultiOutputRegressor(gpr) mor.fit(X, y) check_gradient_correctness(X_new, mor, acq_func, 1.5)
def fit(self, X, y): """ The first estimator returns a constant value. The second estimator is a gaussian process regressor that models the logarithm of the time. """ X = np.array(X) gpr = cook_estimator("GP", self.space, normalize_y=False) gpr.fit(X, np.log(np.ravel(X))) self.estimators_ = [] self.estimators_.append(ConstSurrogate()) self.estimators_.append(gpr) return self
def test_use_given_estimator(): """Test that gp_minimize does not use default estimator if one is passed in explicitly.""" domain = [(1.0, 2.0), (3.0, 4.0)] noise_correct = 1e5 noise_fake = 1e-10 estimator = cook_estimator("GP", domain, noise=noise_correct) res = gp_minimize( branin, domain, n_calls=4, n_initial_points=2, base_estimator=estimator, noise=noise_fake, ) assert res["models"][-1].noise == noise_correct
def test_categorical_gp_has_gradients(): space = Space([("a", "b")]) assert not has_gradients(cook_estimator("GP", space=space))
def test_has_gradients(estimator, gradients): space = Space([(-2.0, 2.0)]) assert has_gradients(cook_estimator(estimator, space=space)) == gradients