"""

print(__doc__)

import numpy as np
import pylab

from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import RBF, ConstantKernel as C
from gp_extras.kernels import ManifoldKernel

np.random.seed(1)

# Specify Gaussian Process
kernel = C(1.0, (0.01, 100)) \
    * ManifoldKernel.construct(base_kernel=RBF(0.1), architecture=((1, 2),),
                               transfer_fct="tanh", max_nn_weight=1)
gp = GaussianProcessRegressor(kernel=kernel, alpha=1e-5,
                              n_restarts_optimizer=10)

X_ = np.linspace(-7.5, 7.5, 100)
y_ = np.sin(X_) + (X_ > 0)

# Visualization of prior
pylab.figure(0, figsize=(10, 8))
X_nn = gp.kernel.k2._project_manifold(X_[:, None])
pylab.subplot(3, 2, 1)
for i in range(X_nn.shape[1]):
    pylab.plot(X_, X_nn[:, i], label="Manifold-dim %d" % i)
pylab.legend(loc="best")
pylab.xlim(-7.5, 7.5)
pylab.title("Prior mapping to manifold")
y = f(X_)  # Generate target values by applying function to manifold

# Gaussian Process with anisotropic RBF kernel
kernel = C(1.0, (1e-10, 100)) * RBF([1] * n_features,
                                    [(0.1, 100.0)] * n_features) \
    + WhiteKernel(1e-3, (1e-10, 1e-1))
gp = GaussianProcessRegressor(kernel=kernel, alpha=0,
                              n_restarts_optimizer=3)

# Gaussian Process with Manifold kernel (using an isotropic RBF kernel on
# manifold for learning the target function)
# Use an MLP with one hidden-layer for the mapping from data space to manifold
architecture=((n_features, n_hidden, n_dim_manifold),)
kernel_nn = C(1.0, (1e-10, 100)) \
    * ManifoldKernel.construct(base_kernel=RBF(0.1, (1.0, 100.0)),
                               architecture=architecture,
                               transfer_fct="tanh", max_nn_weight=1.0) \
    + WhiteKernel(1e-3, (1e-10, 1e-1))
gp_nn = GaussianProcessRegressor(kernel=kernel_nn, alpha=0,
                                 n_restarts_optimizer=3)

# Fit GPs and create scatter plot on test data
gp.fit(X, y)
gp_nn.fit(X, y)

print "Initial kernel: %s" % gp_nn.kernel
print "Log-marginal-likelihood: %s" \
    % gp_nn.log_marginal_likelihood(gp_nn.kernel.theta)

print "Learned kernel: %s" % gp_nn.kernel_
print "Log-marginal-likelihood: %s" \
Exemplo n.º 3
0
X = X_.dot(A)  # X are the observed values
y = f(X_)  # Generate target values by applying function to manifold

# Gaussian Process with anisotropic RBF kernel
kernel = C(1.0, (1e-10, 100)) * RBF([1] * n_features,
                                    [(0.1, 100.0)] * n_features) \
    + WhiteKernel(1e-3, (1e-10, 1e-1))
gp = GaussianProcessRegressor(kernel=kernel, alpha=0, n_restarts_optimizer=3)

# Gaussian Process with Manifold kernel (using an isotropic RBF kernel on
# manifold for learning the target function)
# Use an MLP with one hidden-layer for the mapping from data space to manifold
architecture = ((n_features, n_hidden, n_dim_manifold), )
kernel_nn = C(1.0, (1e-10, 100)) \
    * ManifoldKernel.construct(base_kernel=RBF(0.1, (1.0, 100.0)),
                               architecture=architecture,
                               transfer_fct="tanh", max_nn_weight=1.0) \
    + WhiteKernel(1e-3, (1e-10, 1e-1))
gp_nn = GaussianProcessRegressor(kernel=kernel_nn,
                                 alpha=0,
                                 n_restarts_optimizer=3)

# Fit GPs and create scatter plot on test data
gp.fit(X, y)
gp_nn.fit(X, y)

print "Initial kernel: %s" % gp_nn.kernel
print "Log-marginal-likelihood: %s" \
    % gp_nn.log_marginal_likelihood(gp_nn.kernel.theta)

print "Learned kernel: %s" % gp_nn.kernel_