Nt0 = 36 i0 = np.random.randint(0, len(E0), size=Nt0) T0 = np.array([E0[i] for i in i0]) X0, Y0 = np.split(T0, 2, axis=1) # Level 1 training set Nt1 = 10 i1 = np.random.choice(i0, size=Nt1) T1 = np.array([E1[i] for i in i1]) X1, Y1 = np.split(T1, 2, axis=1) # Test set Xtest = matrix.Col(E1[:, 0]) # Optimize level 1 m0 = ordinary.optimize(X0, Y0) # Optimize level 2 mu0, C0 = ordinary.predict(m0, X1, full_cov=True) m1 = nargp.optimize(mu0, X1, Y1) # Predict level 1 mu0, C0 = ordinary.predict(m0, Xtest, full_cov=True) S0 = np.sqrt(np.diag(C0)) # Predict Level 2 mu1, C1 = nargp.predict(m1, mu0, C0, Xtest) S1 = np.sqrt(C1) # Row vectors for plotting mu0, S0 = np.ravel(mu0), np.ravel(S0)
# Dimensions of system dim = 3 active_dimensions=np.arange(0, dim) f = open("rmse.dat", "a") for N1, N2 in zip(Nt1, Nt2): # Level 1 training set train, test = data_sampler.smart_random(E1, N1, n_test=None) X1, Y1 = np.split(E1[train], [dim], axis=1) # Train level 1 k1 = GPy.kern.RBF(dim, ARD=True) m1 = ordinary.optimize(X1, Y1, k1, normalize=True, restarts=12) mu1, v1 = ordinary.predict(m1, Xtest, full_cov=True) # Level 2 training set train2, test2 = data_sampler.smart_random2(E2, N2, train, test) X2, Y2 = np.split(E2[train2], [dim], axis=1) # Predict level 1 at X2 mu1_, v1_ = ordinary.predict(m1, X2, full_cov=True) # Train level 2 XX = np.hstack((X2, mu1_)) k2 = GPy.kern.RBF(1, active_dims = [dim]) * GPy.kern.RBF(dim, active_dims = active_dimensions, ARD = True) \ + GPy.kern.RBF(dim, active_dims = active_dimensions, ARD = True) m2 = GPy.models.GPRegression(X=XX, Y=Y2, kernel=k2, normalizer=True) m2.optimize(max_iters=1000)
X1 = Xtrain[idx[0:N1], :] Y1 = np.array([my_high(*i) for i in X1])[:, np.newaxis] lb = np.array([-1.5, -0.5]) ub = np.array([0.9, 0.75]) x1 = np.linspace(lb[0], ub[0], 50) x2 = np.linspace(lb[1], ub[1], 50) tmp = np.random.rand(1000,2) Xtest = scale_range(tmp,ub,lb) active_dimensions = np.arange(0,dim) # Ordinary GP kernel = GPy.kern.RBF(dim, ARD=True) model = ordinary.optimize(X1, Y1, kernel, normalize=False) mu, v = ordinary.predict(model, Xtest) # Calculate error Exact = np.array([my_high(*i) for i in Xtest]) Exact = Exact[:, np.newaxis] ogp_error = np.linalg.norm(Exact - mu)/np.linalg.norm(Exact) print("error: ", ogp_error) # Exact plot X, Y = np.meshgrid(x1, x2) Exactplot = ml.griddata(Xtest[:,0], Xtest[:,1], Exact[:, 0], X, Y, interp='linear') fig = plt.figure(1) ax1 = fig.add_subplot(111, projection='3d') ax1.plot_surface(X, Y, Exactplot, color = '#377eb8', rstride=2, cstride=2, linewidth=0, antialiased=True, shade = True, alpha = 0.6)
import GPy import numpy as np from ARGP import ordinary from ARGP import data_sampler E = np.loadtxt("surfaces/ccsd-t-5z.dat", delimiter=',', skiprows=1) E[:, -1] += 76.203896662997 # Test set Xtest = E[:, :-1] dim = len(Xtest.T) # Training set size Nt = 120 # Sample training set train, test = data_sampler.smart_random(E, Nt, n_test=None) T = E[train] X1, Y1 = np.split(T, [dim], axis=1) # Ordinary GP regression kernel = GPy.kern.RBF(dim, ARD=True) model = ordinary.optimize(X1, Y1, kernel, normalize=True, restarts=10) mu, v = ordinary.predict(model, Xtest) # Calculate error exact = E[:, -1].reshape(-1, 1) error = 1000 * np.sqrt(np.mean((mu - exact)**2)) print("Test error: {:>5.3} mEh".format(error))
np.random.seed(0) # Load ab initio surface E = np.loadtxt("surfaces/mrci-pcv5z.tab") E[:, 1] += 109.15851906 # Training set Nt = 10 index = np.random.randint(0, len(E), size=Nt) X, Y = np.split(E[index], 2, axis=1) # Test set Xtest = E[:, 0].reshape(-1, 1) # Train ordinary model m = ordinary.optimize(X, Y, normalize=True) mu, C = m.predict(Xtest, full_cov=True) S = np.sqrt(np.diag(C)) mu, S = np.ravel(mu), np.ravel(S) rmse = 1000 * np.sqrt(((mu - E[:, 1])**2).mean()) print("Prediction Error: {:>9.4f} mEh".format(rmse)) if __name__ == '__main__': # Size of confidence interval ns = 3 # Plotting plt.xlim(0.8, 2.35) plt.ylim(-0.4, 0.6)