verbose=False)

# Fit the GP model to the data
gp.fit(X, y)
gp.theta0 = gp.theta
gp.thetaL = None
gp.thetaU = None
gp.verbose = False

# Estimate the leave-one-out predictions using the cross_val module
n_jobs = 2 # the distributing capacity available on the machine
y_pred = y + cross_val.cross_val_score(gp, X, y=y,
                                   cv=cross_val.LeaveOneOut(y.size),
                                   n_jobs=n_jobs,
                                ).ravel()

# Compute the empirical explained variance
Q2 = metrics.explained_variance_score(y, y_pred)

# Goodness-of-fit plot
pl.figure()
pl.title('Goodness-of-fit plot (Q2 = %1.2e)' % Q2)
pl.plot(y, y_pred, 'r.', label='Leave-one-out')
pl.plot(y, gp.predict(X), 'k.', label='Whole dataset (nugget=1e-2)')
pl.plot([y.min(), y.max()], [y.min(), y.max()], 'k--')
pl.xlabel('Observations')
pl.ylabel('Predictions')
pl.legend(loc='upper left')
pl.axis('tight')
pl.show()
# Observations
y = f(X).ravel()

# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
x = np.atleast_2d(np.linspace(0, 10, 1000)).T

# Instanciate a Gaussian Process model
gp = GaussianProcess(corr='cubic', theta0=1e-2, thetaL=1e-4, thetaU=1e-1, \
                     random_start=100)

# Fit to data using Maximum Likelihood Estimation of the parameters
gp.fit(X, y)

# Make the prediction on the meshed x-axis (ask for MSE as well)
y_pred, MSE = gp.predict(x, eval_MSE=True)
sigma = np.sqrt(MSE)

# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
pl.plot(X, y, 'r.', markersize=10, label=u'Observations')
pl.plot(x, y_pred, 'b-', label=u'Prediction')
pl.fill(np.concatenate([x, x[::-1]]), \
        np.concatenate([y_pred - 1.9600 * sigma,
                       (y_pred + 1.9600 * sigma)[::-1]]), \
        alpha=.5, fc='b', ec='None', label='95% confidence interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
y = g(X)

# Instanciate and fit Gaussian Process Model
gp = GaussianProcess(theta0=5e-1)

# Don't perform MLE or you'll get a perfect prediction for this simple example!
gp.fit(X, y)

# Evaluate real function, the prediction and its MSE on a grid
res = 50
x1, x2 = np.meshgrid(np.linspace(- lim, lim, res), \
                     np.linspace(- lim, lim, res))
xx = np.vstack([x1.reshape(x1.size), x2.reshape(x2.size)]).T

y_true = g(xx)
y_pred, MSE = gp.predict(xx, eval_MSE=True)
sigma = np.sqrt(MSE)
y_true = y_true.reshape((res, res))
y_pred = y_pred.reshape((res, res))
sigma = sigma.reshape((res, res))
k = PHIinv(.975)

# Plot the probabilistic classification iso-values using the Gaussian property
# of the prediction
fig = pl.figure(1)
ax = fig.add_subplot(111)
ax.axes.set_aspect('equal')
pl.xticks([])
pl.yticks([])
ax.set_xticklabels([])
ax.set_yticklabels([])