Пример #1
0
def test_gp_regressor():
    rng = np.random.RandomState(0)
    X = np.asarray([["ham", "spam", "ted"], ["ham", "ted", "ted"],
                    ["ham", "spam", "spam"]])
    y = rng.randn(3)
    hm = HammingKernel(length_scale=[1.0, 1.0, 1.0])

    gpr = GaussianProcessRegressor(hm)
    gpr.fit(X, y)
    assert_array_almost_equal(gpr.predict(X), y)
    assert_array_almost_equal(gpr.predict(X[:2]), y[:2])
Пример #2
0
def test_gp_regressor():
    rng = np.random.RandomState(0)
    X = np.asarray([
        ["ham", "spam", "ted"],
        ["ham", "ted", "ted"],
        ["ham", "spam", "spam"]])
    y = rng.randn(3)
    hm = HammingKernel(length_scale=[1.0, 1.0, 1.0])

    gpr = GaussianProcessRegressor(hm)
    gpr.fit(X, y)
    assert_array_almost_equal(gpr.predict(X), y)
    assert_array_almost_equal(gpr.predict(X[:2]), y[:2])
Пример #3
0
class GPSurrogate(BaseSurrogate):
    gp = None

    def __init__(self, num_issues, X, y):
        BaseSurrogate.__init__(self, num_issues, X, y)
        # Instantiate a Gaussian Process model.
        # TODO. A question we need to investigate is what kernel we should be using?
        # TODO. For now, to have a valid comparison, I am using the same kernel used by the bayesian optimization.
        # TODO. Note that I am using the Kernels as given by the skopt library, with the same parameters.
        cov_amplitude = ConstantKernel(1.0, (0.01, 1000.0))
        other_kernel = Matern(length_scale=np.ones(num_issues),
                              length_scale_bounds=[(0.01, 100)] * num_issues,
                              nu=2.5)
        self.gp = GaussianProcessRegressor(
            kernel=cov_amplitude * other_kernel,
            normalize_y=True,
            n_restarts_optimizer=2,
            noise=0.000000001,
            random_state=np.random.mtrand._rand.randint(
                0,
                np.iinfo(np.int32).max))
        # Another option for gaussian process is using The default kernel here, i.e., RBF.
        # self.gp = GaussianProcessRegressor(kernel=None, n_restarts_optimizer=9)

    def update_surrogate(self):
        self.gp.fit(self.X, self.y)

    def query(self, x):
        x = np.array(x).reshape(1, -1)
        return self.gp.predict(np.atleast_2d(x))

    def getGP(self):
        return self.gp
Пример #4
0
def test_gp_regressor():
    rng = np.random.RandomState(0)
    X = np.asarray([["ham", "spam", "ted"], ["ham", "ted", "ted"],
                    ["ham", "spam", "spam"]])
    y = rng.randn(3)
    hm = HammingKernel(length_scale=[1.0, 1.0, 1.0])
    if UseOrdinalEncoder:
        enc = OrdinalEncoder()
        enc.fit(X)

    gpr = GaussianProcessRegressor(hm)
    if UseOrdinalEncoder:
        gpr.fit(enc.transform(X), y)
        assert_array_almost_equal(gpr.predict(enc.transform(X)), y)
        assert_array_almost_equal(gpr.predict(enc.transform(X[:2])), y[:2])
    else:
        gpr.fit(X, y)
        assert_array_almost_equal(gpr.predict(X), y)
        assert_array_almost_equal(gpr.predict(X[:2]), y[:2])