def Rwstar_matrix(X, ww, Smooth_lengths, npc):

    ndim = len(Smooth_lengths)
    npts = len(X[:, 0])
    Rw_stars = matrix(zeros((npts, npc)))
    for i in xrange(npc):
        if npc == 1:
            Y = ww
        else:
            Y = ww[i, :npts]
        kernel = SE(params=Smooth_lengths, dimensions=ndim) + noise([0.15])
        #kernel=SoftenedSquaredExponentialKernel(Smooth_lengths,1)
        gpnow = GaussianProcess(X, Y, kernel)
        Rw_stars[:, i] = np.array(gpnow._alpha)
    return Rw_stars
Esempio n. 2
0
def plot_gp(gp, fig_title, filename):
  figure()
  plot([x[0] for x in X], Y, 'ks')
  mean, sigma, LL = gp.predict(support)
  gp_plot_prediction(support, mean, sigma)
  xlabel('x')
  ylabel('f(x)')
  title(fig_title)
  savefig('%s.png' % filename)
  savefig('%s.eps' % filename)
  
#
# Create a kernel with reasonable parameters and plot the GP predictions
#
kernel = SE([1.]) + noise(1.)
gp = GaussianProcess(X, Y, kernel)
plot_gp(
  gp=gp,
  fig_title='Initial parameters: kernel = SE([1]) + noise(1)',
  filename='learning_first_guess'
)

#
# Learn the covariance function's parameters and replot
#
gp_learn_hyperparameters(gp)
plot_gp(
  gp=gp,
  fig_title='Learnt parameters: kernel = SE([%.2f]) + noise(%.2f)' % (
    kernel.k1.params[0],
Esempio n. 3
0
Y = [2.5, 2, -.5, 0.]

def plot_for_kernel(kernel, fig_title, filename):
  figure()
  plot([x[0] for x in X], Y, 'ks')
  gp = GaussianProcess(X, Y, kernel)
  mean, sigma, LL = gp.predict(support)
  gp_plot_prediction(support, mean, sigma)
  xlabel('x')
  ylabel('f(x)')
  title(fig_title)
  savefig('%s.png' % filename)
  savefig('%s.eps' % filename)
  
plot_for_kernel(
  kernel=SE([1.]) + noise(.1),
  fig_title='k = SE + noise(.1)',
  filename='noise_mid'
)

plot_for_kernel(
  kernel=SE([1.]) + noise(1.),
  fig_title='k = SE + noise(1)',
  filename='noise_high'
)

plot_for_kernel(
  kernel=SE([1.]) + noise(.0001),
  fig_title='k = SE + noise(.0001)',
  filename='noise_low'
)