Esempio n. 1
0
    def bay_opt(self):
        # Initialize sample
        for i in range(self.niter):
            m52_1 = ConstantKernel(1) * RBF(np.array([100] * 12))
            self.gpr_obj = GaussianProcessRegressor(kernel=m52_1,
                                                    alpha=10,
                                                    noise="gaussian")

            m52_2 = ConstantKernel(1) * RBF(np.array([100] * 12))
            self.gpr_constraint = GaussianProcessRegressor(kernel=m52_2,
                                                           alpha=10,
                                                           noise="gaussian")

            # Update Gaussian process with existing samples
            #print(self.x.shape,self.y_obj.shape )
            self.gpr_obj.fit(self.x, self.y_obj)
            #print(self.gpr_obj.predict(self.x))
            self.gpr_constraint.fit(self.x, self.y_constraint)

            # Obtain next sampling point from the acquisition function (expected_improvement)
            X_next = self.propose_location()

            # Obtain next noisy sample from the objective function
            Y_next1 = np.array([self.obj_func(X_next)]).reshape(-1, 1)
            Y_next2 = np.array([self.constraint_func(X_next)]).reshape(-1, 1)
            #print(Y_next1, Y_next1.shape, Y_next2,Y_next2.shape)
            # Add sample to previous samples
            self.x = np.vstack((self.x, X_next))
            self.y_obj = np.vstack((self.y_obj, Y_next1))
            self.y_constraint = np.vstack((self.y_constraint, Y_next2))
        idx = np.where(self.y_constraint > 0)[0]
        t = idx[np.argmin(self.y_obj[idx])]
        self.f_best = self.y_obj[t]
        self.min_x = self.x[t]
        return self.f_best, self.min_x
Esempio n. 2
0
def minimal_gp(request):
    kernel = ConstantKernel(
        constant_value=1 ** 2, constant_value_bounds=(0.01 ** 2, 1 ** 2)
    ) * RBF(length_scale=1.0, length_scale_bounds=(0.5, 1.5))
    gp = BayesGPR(
        random_state=1, normalize_y=False, kernel=kernel, warp_inputs=request.param
    )
    return gp
Esempio n. 3
0
def kriging_surrogates():
    surrogate_model = GaussianProcessRegressor(normalize_y=True)
    kernels = [
        1.0 * Matern(nu=5 / 2),
        1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0))
    ]
    params = [kernels]
    return surrogate_model, params
Esempio n. 4
0
def test_mean_gradient():
    length_scale = np.arange(1, 6)
    X = rng.randn(10, 5)
    y = rng.randn(10)
    X_new = rng.randn(5)

    rbf = RBF(length_scale=length_scale, length_scale_bounds="fixed")
    gpr = GaussianProcessRegressor(rbf, random_state=0).fit(X, y)

    mean, std, mean_grad = gpr.predict(
        np.expand_dims(X_new, axis=0),
        return_std=True, return_cov=False, return_mean_grad=True)
    num_grad = optimize.approx_fprime(
        X_new, lambda x: predict_wrapper(x, gpr)[0], 1e-4)
    assert_array_almost_equal(mean_grad, num_grad, decimal=3)
Esempio n. 5
0
def optimize(x0, y0, n_calls):

    estimator = GaussianProcessRegressor(alpha=1e-4,
                                         normalize_y=True,
                                         noise='gaussian',
                                         n_restarts_optimizer=10,
                                         kernel=RBF())

    w = gp_minimize(black_box, [(low, high)] * dim,
                    base_estimator=estimator,
                    acq_func="EI",
                    n_calls=n_calls,
                    verbose=False,
                    x0=x0,
                    y0=y0,
                    n_random_starts=1,
                    n_jobs=-1)
    return w.x_iters, w.func_vals.tolist()
Esempio n. 6
0
def test_guess_priors():
    """Construct a complicated kernel and check if priors are constructed
    correctly."""
    kernel = Exponentiation(
        ConstantKernel(constant_value_bounds="fixed") * Matern() +
        WhiteKernel() + CompoundKernel([RBF(), Matern()]),
        2.0,
    )

    priors = guess_priors(kernel)

    assert len(priors) == 4
    expected = [
        -1.737085713764618,
        -4.107091211892862,
        -1.737085713764618,
        -1.737085713764618,
    ]
    for p, v in zip(priors, expected):
        assert_almost_equal(p(0.0), v)
Esempio n. 7
0
def test_guess_priors():
    """Construct a complicated kernel and check if priors are constructed
    correctly."""
    kernel = Exponentiation(
        ConstantKernel(constant_value_bounds="fixed") * Matern() +
        WhiteKernel() + RBF(length_scale=(1.0, 1.0)),
        2.0,
    )

    priors = guess_priors(kernel)

    assert len(priors) == 4
    expected = [
        -0.02116327824572739,
        -2.112906921232193,
        -0.02116327824572739,
        -0.02116327824572739,
    ]
    for p, v in zip(priors, expected):
        assert_almost_equal(p(-0.9), v)
Esempio n. 8
0
import numpy as np
from skopt.learning import GaussianProcessRegressor
from skopt.learning.gaussian_process.kernels import RBF

all_x = np.reshape(np.linspace(0, 6, 100), (-1, 1))
all_f = [black_box(xi) for xi in all_x]

# Plot all points.
plt.plot(all_x, all_f)

# Train only one third of the training data.
X = np.reshape(np.linspace(4, 6, 10), (-1, 1))
y = [black_box(xi) for xi in X]

# Use RBF kernel.
rbf = RBF(length_scale=1.0)
gpr = GaussianProcessRegressor(kernel=rbf, alpha=1e-12)
gpr.fit(X, y)
plt.plot(np.ravel(X), y, "ro", label="Fit points")

# Predict on all data.
y_pred, y_std = gpr.predict(all_x, return_std=True)
all_x_plot = np.ravel(all_x)
upper_bound = y_pred + 1.96 * y_std
lower_bound = y_pred - 1.96 * y_std

plt.plot(all_x_plot, y_pred, "r--", label="Predictions")
plt.plot(all_x_plot, lower_bound, color="red")
plt.plot(all_x_plot, upper_bound, color="red")
plt.fill_between(all_x_plot, lower_bound, upper_bound, facecolor="lightcoral")
plt.legend()
Esempio n. 9
0
from skopt.learning.gaussian_process import GaussianProcessRegressor
from skopt.learning.gaussian_process.kernels import ConstantKernel
from skopt.learning.gaussian_process.kernels import DotProduct
from skopt.learning.gaussian_process.kernels import ExpSineSquared
from skopt.learning.gaussian_process.kernels import HammingKernel
from skopt.learning.gaussian_process.kernels import Matern
from skopt.learning.gaussian_process.kernels import RationalQuadratic
from skopt.learning.gaussian_process.kernels import RBF
from skopt.learning.gaussian_process.kernels import WhiteKernel

KERNELS = []

for length_scale in [np.arange(1, 6), [0.2, 0.3, 0.5, 0.6, 0.1]]:
    KERNELS.extend([
        RBF(length_scale=length_scale),
        Matern(length_scale=length_scale, nu=0.5),
        Matern(length_scale=length_scale, nu=1.5),
        Matern(length_scale=length_scale, nu=2.5),
        RationalQuadratic(alpha=2.0, length_scale=2.0),
        ExpSineSquared(length_scale=2.0, periodicity=3.0),
        ConstantKernel(constant_value=1.0),
        WhiteKernel(noise_level=2.0),
        Matern(length_scale=length_scale, nu=2.5)**3.0,
        RBF(length_scale=length_scale) +
        Matern(length_scale=length_scale, nu=1.5),
        RBF(length_scale=length_scale) *
        Matern(length_scale=length_scale, nu=1.5),
        DotProduct(sigma_0=2.0)
    ])
Esempio n. 10
0
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
import pytest

from skopt.learning import GaussianProcessRegressor
from skopt.learning.gaussian_process.kernels import RBF
from skopt.learning.gaussian_process.kernels import Matern
from skopt.learning.gaussian_process.kernels import WhiteKernel
from skopt.learning.gaussian_process.gpr import _param_for_white_kernel_in_Sum

rng = np.random.RandomState(0)
X = rng.randn(5, 5)
y = rng.randn(5)

rbf = RBF()
wk = WhiteKernel()
mat = Matern()
kernel1 = rbf
kernel2 = mat + rbf
kernel3 = mat * rbf
kernel4 = wk * rbf
kernel5 = mat + rbf * wk


def predict_wrapper(X, gpr):
    """Predict that can handle 1-D input"""
    X = np.expand_dims(X, axis=0)
    return gpr.predict(X, return_std=True)

Esempio n. 11
0
first_loop_legal_upper_bounds = [i for i in range(3, 301, 3)]
#second_loop_legal_upper_bounds = [i // 3 * 4 for i in first_loop_legal_upper_bounds]
#space_size = len(first_loop_legal_upper_bounds)
#total_amount_of_inner_part = [first_loop_legal_upper_bounds[i] * second_loop_legal_upper_bounds[i] \
#                              for i in range(space_size)]


def crop_number(n):
    return min(first_loop_legal_upper_bounds, key=lambda t: abs(t - n))


###

# In[ ]:

kernel = Product(ConstantKernel(1), RBF(1)) + ConstantKernel(1)

model = GaussianProcessRegressor(alpha=0,
                                 normalize_y=True,
                                 noise='gaussian',
                                 n_restarts_optimizer=10,
                                 kernel=kernel)

optimizer = Optimizer([[low_constraint, high_constraint]] * dim,
                      model,
                      n_initial_points=n_initial_points,
                      acq_func='EI',
                      acq_optimizer='lbfgs',
                      random_state=None)

# In[ ]:
Esempio n. 12
0
import numpy as np
from scipy import optimize
from sklearn.utils.testing import assert_array_almost_equal

from skopt.learning.gaussian_process.kernels import ConstantKernel
from skopt.learning.gaussian_process.kernels import DotProduct
from skopt.learning.gaussian_process.kernels import Exponentiation
from skopt.learning.gaussian_process.kernels import ExpSineSquared
from skopt.learning.gaussian_process.kernels import Matern
from skopt.learning.gaussian_process.kernels import RationalQuadratic
from skopt.learning.gaussian_process.kernels import RBF
from skopt.learning.gaussian_process.kernels import WhiteKernel

length_scale = np.arange(1, 6)
KERNELS = [
    RBF(length_scale=length_scale),
    Matern(length_scale=length_scale, nu=0.5),
    Matern(length_scale=length_scale, nu=1.5),
    Matern(length_scale=length_scale, nu=2.5),
    RationalQuadratic(alpha=2.0, length_scale=2.0),
    ExpSineSquared(length_scale=2.0, periodicity=3.0),
    ConstantKernel(constant_value=1.0),
    WhiteKernel(noise_level=2.0),
    Matern(length_scale=length_scale, nu=2.5)**3.0,
    RBF(length_scale=length_scale) + Matern(length_scale=length_scale, nu=1.5),
    RBF(length_scale=length_scale) * Matern(length_scale=length_scale, nu=1.5),
    DotProduct(sigma_0=2.0)
]

rng = np.random.RandomState(0)
X = rng.randn(5)
Esempio n. 13
0
def cook_estimator(base_estimator, space=None, **kwargs):
    """
    Cook a default estimator.
    For the special base_estimator called "DUMMY" the return value is None.
    This corresponds to sampling points at random, hence there is no need
    for an estimator.
    Parameters
    ----------
    * `base_estimator` ["GP", "RF", "ET", "GBRT", "DUMMY"
                        or sklearn regressor, default="GP"]:
        Should inherit from `sklearn.base.RegressorMixin`.
        In addition the `predict` method should have an optional `return_std`
        argument, which returns `std(Y | x)`` along with `E[Y | x]`.
        If base_estimator is one of ["GP", "RF", "ET", "GBRT", "DUMMY"], a
        surrogate model corresponding to the relevant `X_minimize` function
        is created.
    * `space` [Space instance]:
        Has to be provided if the base_estimator is a gaussian process.
        Ignored otherwise.
    * `kwargs` [dict]:
        Extra parameters provided to the base_estimator at init time.
    """
    if isinstance(base_estimator, str):
        base_estimator = base_estimator.upper()
        if base_estimator not in ["GP", "ET", "RF", "GBRT", "DUMMY", "GPM32", "GPM1", "RBF", "RQ"]:
            raise ValueError("Valid strings for the base_estimator parameter "
                             " are: 'RF', 'ET', 'GP', 'GBRT' or 'DUMMY' not "
                             "%s." % base_estimator)
    elif not is_regressor(base_estimator):
        raise ValueError("base_estimator has to be a regressor.")

    if base_estimator == "GP":
        if space is not None:
            space = Space(space)
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
            is_cat = space.is_categorical

        else:
            raise ValueError("Expected a Space instance, not None.")

        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        # only special if *all* dimensions are categorical
        if is_cat:
            other_kernel = HammingKernel(length_scale=np.ones(n_dims))
        else:
            other_kernel = Matern(
                length_scale=np.ones(n_dims),
                length_scale_bounds=[(0.01, 100)] * n_dims, nu=2.5)

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True, noise="gaussian",
            n_restarts_optimizer=2)

    elif base_estimator == "GPM32":
        if space is not None:
            space = Space(space)
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
            is_cat = space.is_categorical

        else:
            raise ValueError("Expected a Space instance, not None.")

        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        # only special if *all* dimensions are categorical
        if is_cat:
            other_kernel = HammingKernel(length_scale=np.ones(n_dims))
        else:
            other_kernel = Matern(
                length_scale=np.ones(n_dims),
                length_scale_bounds=[(0.01, 100)] * n_dims, nu=1.5)

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True, noise="gaussian",
            n_restarts_optimizer=2)

    elif base_estimator == "GPM1":
        if space is not None:
            space = Space(space)
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
            is_cat = space.is_categorical

        else:
            raise ValueError("Expected a Space instance, not None.")

        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        # only special if *all* dimensions are categorical
        if is_cat:
            other_kernel = HammingKernel(length_scale=np.ones(n_dims))
        else:
            other_kernel = Matern(
                length_scale=np.ones(n_dims),
                length_scale_bounds=[(0.01, 100)] * n_dims, nu=1.5)

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True, noise="gaussian",
            n_restarts_optimizer=2)

    elif base_estimator == "RBF":
        if space is not None:
            space = Space(space)
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        other_kernel = RBF(length_scale=np.ones(n_dims))

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True, noise="gaussian",
            n_restarts_optimizer=2)

    elif base_estimator == "RQ":
        if space is not None:
            space = Space(space)
            space = Space(normalize_dimensions(space.dimensions))
            n_dims = space.transformed_n_dims
        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        other_kernel = RationalQuadratic(length_scale=np.ones(n_dims), alpha=0.1)

        base_estimator = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True, noise="gaussian",
            n_restarts_optimizer=2)

    elif base_estimator == "RF":
        base_estimator = RandomForestRegressor(n_estimators=100,
                                               min_samples_leaf=3)
    elif base_estimator == "ET":
        base_estimator = ExtraTreesRegressor(n_estimators=100,
                                             min_samples_leaf=3)
    elif base_estimator == "GBRT":
        gbrt = GradientBoostingRegressor(n_estimators=30, loss="quantile")
        base_estimator = GradientBoostingQuantileRegressor(base_estimator=gbrt)

    elif base_estimator == "DUMMY":
        return None

    base_estimator.set_params(**kwargs)
    return base_estimator
Esempio n. 14
0
def minimal_gp():
    kernel = (ConstantKernel(constant_value=1**2,
                             constant_value_bounds=(0.01**2, 1**2)) *
              RBF(length_scale=1.0, length_scale_bounds=(0.5, 1.5)))
    gp = BayesGPR(random_state=1, normalize_y=False, kernel=kernel)
    return gp
Esempio n. 15
0
from utils import black_box
from utils import plot_space

import numpy as np
from skopt.optimizer import Optimizer
from skopt.learning import GaussianProcessRegressor
from skopt.learning.gaussian_process.kernels import RBF
from skopt.benchmarks import branin

# Search from 0.0 to 6.0
dimensions = ((0.0, 6.0),)

# Initialize estimator.
gpr = GaussianProcessRegressor(kernel=RBF(), noise=0.0)
optimizer = Optimizer(
    dimensions=dimensions,
    base_estimator=gpr,
    n_random_starts=0,
    acq_func="LCB",
    random_state=0)

# Tell some points to the optimizer and ask for the next point.
X = np.reshape(np.linspace(4, 6, 10), (-1, 1)).tolist()
y = [black_box(xi) for xi in X]
optimizer.tell(X, y)
x_cand = optimizer.ask()
y_cand = black_box(x_cand)
plot = plot_space(X, y, optimizer.models[-1], x_cand)
plot.show()

# Tell and ask again.
Esempio n. 16
0
    def fit(self, X, y):
        """Fit Gaussian process regression model.

        Parameters
        ----------
        X : array-like, shape = (n_samples, n_features)
            Training data

        y : array-like, shape = (n_samples, [n_output_dims])
            Target values

        Returns
        -------
        self
            Returns an instance of self.
        """

        if self.kernel is None:
            self.kernel = ConstantKernel(1.0, constant_value_bounds="fixed") \
                          * RBF(1.0, length_scale_bounds="fixed")
        if self.noise and not _param_for_white_kernel_in_Sum(self.kernel)[0]:
            if self.noise == "gaussian":
                self.kernel = self.kernel + WhiteKernel()
            else:
                self.kernel = self.kernel + WhiteKernel(
                    noise_level=self.noise, noise_level_bounds="fixed")
        super(GaussianProcessRegressor, self).fit(X, y)

        self.noise_ = None

        if self.noise:
            # The noise component of this kernel should be set to zero
            # while estimating K(X_test, X_test)
            # Note that the term K(X, X) should include the noise but
            # this (K(X, X))^-1y is precomputed as the attribute `alpha_`.
            # (Notice the underscore).
            # This has been described in Eq 2.24 of
            # http://www.gaussianprocess.org/gpml/chapters/RW2.pdf
            # Hence this hack
            if isinstance(self.kernel_, WhiteKernel):
                self.kernel_.set_params(noise_level=0.0)

            else:
                white_present, white_param = _param_for_white_kernel_in_Sum(
                    self.kernel_)

                # This should always be true. Just in case.
                if white_present:
                    noise_kernel = self.kernel_.get_params()[white_param]
                    self.noise_ = noise_kernel.noise_level
                    self.kernel_.set_params(
                        **{white_param: WhiteKernel(noise_level=0.0)})

        # Precompute arrays needed at prediction
        L_inv = solve_triangular(self.L_.T, np.eye(self.L_.shape[0]))
        self.K_inv_ = L_inv.dot(L_inv.T)

        # Fix deprecation warning #462
        if int(sklearn.__version__[2:4]) >= 23:
            self.y_train_std_ = self._y_train_std
            self.y_train_mean_ = self._y_train_mean
        elif int(sklearn.__version__[2:4]) >= 19:
            self.y_train_mean_ = self._y_train_mean
            self.y_train_std_ = 1
        else:
            self.y_train_mean_ = self.y_train_mean
            self.y_train_std_ = 1

        return self