build_K(xtrain, xtrain, hyp, K) # writes inside K Ky = K + hyp[-1]*np.eye(ntrain) Kyinv = invert(Ky, 4, 1e-6) # using gp_functions.invert ntest = 300 xtest = np.linspace(0, 1, ntest) ftest = f(xtest) Ks = np.empty((ntrain, ntest)) # train-test Kss = np.empty((ntest, ntest)) # test-test build_K(xtrain, xtest, hyp, Ks) build_K(xtest, xtest, hyp, Kss) fmean = Ks.T.dot(Kyinv.dot(ytrain)) # predictive mean Ef, varf = predict_f(hyp, xtrain.reshape(-1, 1), ytrain.reshape(-1, 1), xtest.reshape(-1, 1), neig=8)# posterior # Estimation and variance varf = np.diag(varf) # we keep only the diag because the variance is on it, the other terms are covariance plt.figure() plt.plot(xtrain, ytrain, 'kx') plt.plot(xtest, ftest, 'm-') plt.plot(xtest, fmean, 'r--') axes = plt.gca() axes.set_ylim([-1.5, 1]) plt.title('Random Gaussian Process with '+ str(ntrain) + ' observation(s) hyp = [0.1, 1e-4]')
plt.colorbar() # Compute C C = np.block([[K + (sigma_n / sigma_f) ** 2 * np.eye(n), np.transpose(K_star)], [K_star, K_star2]]) # Plot C plt.imshow(C) plt.colorbar() # Plot function f(x) plt.figure() plt.plot(x, f_x, 'r') # plot of n_prior samples from prior distribution (100 samples). The mean is zero and the covariance is given by K_star2 n_prior = 100 for i in range(0, n_prior): f_star = np.random.multivariate_normal(np.zeros(n_star), K_star2) plt.plot(x_star, f_star, 'b', linewidth=0.5) # Compute posterior mean and covariance. t = time.time() f_bar_star, cov_f_star = predict_f(a, x, y, x_star, RBF, return_full_cov=True, neig=8) elapsed = time.time() - t print(elapsed) # plot the covariance matrix plt.imshow(cov_f_star) # Plot of n_prior samples from the posterior distribution and of the true function for i in range(n_prior): f_posterior = np.random.multivariate_normal(f_bar_star[:, 0], cov_f_star) plt.figure() plt.plot(x_star, f_posterior) plt.fill_between(x_star.flatten(), (f_bar_star.flatten() + 2 * np.sqrt(np.diag(cov_f_star))), (f_bar_star.flatten() - 2 * np.sqrt(np.diag(cov_f_star))))