Beispiel #1
0
The all-important kernels and their hyperparameters.

Created on Dec 11, 2014

@author: ntraft
'''
from __future__ import division
import numpy as np

from com.ntraft.gp import ParametricGPModel
import com.ntraft.covariance as cov

# Hyperparameters from seq_eth #175
model1 = ParametricGPModel(
    cov.summed_kernel(cov.matern_kernel(np.exp(3.5128), np.exp(2 * 5.3844)),
                      cov.linear_kernel(np.exp(-2 * -2.8770)),
                      cov.noise_kernel(np.exp(2 * -0.3170))),
    cov.summed_kernel(cov.matern_kernel(np.exp(2.2839), np.exp(2 * 2.5229)),
                      cov.linear_kernel(np.exp(-2 * -4.8792)),
                      cov.noise_kernel(np.exp(2 * -0.2407))))

# Hyperparameters from seq_eth #48 - HUGE variance and smooth
model2 = ParametricGPModel(
    cov.summed_kernel(cov.matern_kernel(np.exp(2.0194), np.exp(2 * 2.7259)),
                      cov.linear_kernel(np.exp(-2 * -3.2502)),
                      cov.noise_kernel(np.exp(2 * -1.1128))),
    cov.summed_kernel(cov.matern_kernel(np.exp(3.5181), np.exp(2 * 5.4197)),
                      cov.linear_kernel(np.exp(-2 * -0.8087)),
                      cov.noise_kernel(np.exp(2 * -0.5089))))

# Hyperparameters from seq_eth #201 - pretty squirrely
Beispiel #2
0
# ykernel = cov.summed_kernel(cov.sq_exp_kernel(2.5, 1), cov.noise_kernel(0.01))
# Cafeteria Hyperparams (pre-evaluated)
# xkernel = cov.summed_kernel(
# 	cov.matern_kernel(33.542, 47517),
# 	cov.linear_kernel(315.46),
# 	cov.noise_kernel(0.53043)
# )
# ykernel = cov.summed_kernel(
# 	cov.matern_kernel(9.8147, 155.36),
# 	cov.linear_kernel(17299),
# 	cov.noise_kernel(0.61790)
# )
# Cafeteria Hyperparams
xkernel = cov.summed_kernel(
	cov.matern_kernel(np.exp(3.5128), np.exp(2*5.3844)),
	cov.linear_kernel(np.exp(-2*-2.8770)),
	cov.noise_kernel(np.exp(2*-0.3170))
)
ykernel = cov.summed_kernel(
	cov.matern_kernel(np.exp(2.2839), np.exp(2*2.5229)),
	cov.linear_kernel(np.exp(-2*-4.8792)),
	cov.noise_kernel(np.exp(2*-0.2407))
)
xgp = GaussianProcess(T, x, Ttest, xkernel)
ygp = GaussianProcess(T, y, Ttest, ykernel)

# PLOTS:

# draw samples from the prior at our test points.
xs = xgp.sample_prior(10)
ys = ygp.sample_prior(10)
Beispiel #3
0
# xkernel = cov.summed_kernel(cov.sq_exp_kernel(2.5, 1), cov.noise_kernel(0.01))
# ykernel = cov.summed_kernel(cov.sq_exp_kernel(2.5, 1), cov.noise_kernel(0.01))
# Cafeteria Hyperparams (pre-evaluated)
# xkernel = cov.summed_kernel(
# 	cov.matern_kernel(33.542, 47517),
# 	cov.linear_kernel(315.46),
# 	cov.noise_kernel(0.53043)
# )
# ykernel = cov.summed_kernel(
# 	cov.matern_kernel(9.8147, 155.36),
# 	cov.linear_kernel(17299),
# 	cov.noise_kernel(0.61790)
# )
# Cafeteria Hyperparams
xkernel = cov.summed_kernel(
    cov.matern_kernel(np.exp(3.5128), np.exp(2 * 5.3844)),
    cov.linear_kernel(np.exp(-2 * -2.8770)),
    cov.noise_kernel(np.exp(2 * -0.3170)))
ykernel = cov.summed_kernel(
    cov.matern_kernel(np.exp(2.2839), np.exp(2 * 2.5229)),
    cov.linear_kernel(np.exp(-2 * -4.8792)),
    cov.noise_kernel(np.exp(2 * -0.2407)))
xgp = GaussianProcess(T, x, Ttest, xkernel)
ygp = GaussianProcess(T, y, Ttest, ykernel)

# PLOTS:

# draw samples from the prior at our test points.
xs = xgp.sample_prior(10)
ys = ygp.sample_prior(10)
pl.figure(1)
pl.plot(xs, ys)