def gpmake(X, Y, name):
    print("Making {}".format(name))
    kernel = GPy.kern.RBF(M)
    noise = 0.1
    normalizer = False
    gp = reg(X, Y, kernel, noise_var=noise, normalizer=normalizer)
    gp.optimize()
    Models[name] = gp
Beispiel #2
0
def gpmake(X, Y, name):
    print("Making {}".format(name))
    kernel = GPy.kern.RBF(M)
    noise = 0.1
    normalizer = False
    if name == 'ENKI':
        gp = reg(X, Y, kernel, noise_var=noise, normalizer=normalizer)
        gp.optimize()
        Models[name] = gp
    else:
        gp = reg(X, Y, kernel, noise_var=noise, normalizer=normalizer)
        gp.optimize()
        Models[name + 'E500'] = gp
        del gp
        gp2 = reg(np.append(XENKI, X, axis=0),
                  np.append(YENKI, Y, axis=0),
                  kernel,
                  noise_var=noise,
                  normalizer=normalizer)
        gp2.optimize()
        Models[name + 'E2100'] = gp2
        del gp2
def gptest(p, i, v):
    if v == 0:
        V = False
    elif v == 1:
        V = True
    else:
        V = None
    gp = reg(xtrain, ytrain, kerns[p], noise_var=i, normalizer=V)
    gp.optimize()
    ymodel = gp.predict(xtest)
    meansqdiff = np.mean((ytest - ymodel[0])**2)
    print(meansqdiff)
    fig = gp.plot()
    GPy.plotting.show(fig)
    plt.scatter(xtest, ytest)
    plt.show()
        print("reject")
        return z
    else:
        print("accept")
        return zstar


#First stage is to train the Gaussian Emulator
#Train on one parameter initially
print("Train GP")
Tsize = 100  #Size of training se
X = np.linspace(-100, 100, Tsize).reshape((Tsize, 1))
Y = X**3 + 20 * np.random.normal(loc=0.0, scale=1.0, size=Tsize).reshape(
    (Tsize, 1))
print("Gaussian Process")
gp = reg(X, Y, noise_var=0.1, normalizer=True)
gp.optimize()

#Obtain the long run statistics (true data)
obs = np.zeros((100))
for i in range(0, 100):
    obs[i] = ztrue[:M]**3 + np.random.normal(loc=0.0, scale=1.0)

ztrue[M:] = np.mean(obs)

#Iterate
print("initial positions")
samples = np.zeros((ITER, chains, M))
#Start the initial Markov chains (In future possibly consider starting these in different places)
for i in range(0, chains):
    samples[0, i, :] = z_ENKI + np.random.normal(loc=0.0, scale=10.0)
 kerns = [
     GPy.kern.RBF(M),
     GPy.kern.Exponential(M),
     GPy.kern.Matern32(M),
     GPy.kern.Matern52(M),
     GPy.kern.Brownian(M),
     GPy.kern.Bias(M),
     GPy.kern.Linear(M),
     GPy.kern.PeriodicExponential(M),
     GPy.kern.White(M)
 ]
 for p in range(0, 9):
     k = kerns[p]
     for i in [0.0, 0.1, 0.5, 1, 10, 100]:
         for V in [True, False, None]:
             gp = reg(xtrain, ytrain, k, noise_var=i, normalizer=V)
             gp.optimize()
             #print(M,k,i,V)
             #print(gp)
             ymodel = gp.predict(xtest)
             #meandiff=np.abs(np.mean(ytest-ymodel[0]))
             #meansqdiff=np.mean((ytest-ymodel[0])**2)
             #meanstddiff=np.abs(np.mean((ytest-ymodel[0])/ymodel[1]))
             #meanstdsqdiff=np.mean(((ytest-ymodel[0])/ymodel[1])**2)
             #pdf=np.prod(st.norm.pdf(ytest,loc=ymodel[0],scale=ymodel[1]))
             gpcont = ()
             B = 0
             while True:
                 try:
                     gpcont = np.append(gpcont, gp[B])
                     B += 1
        return z, 0
    else:
        #print("accept")
        return zstar, 1


#First stage is to train the Gaussian Emulator
#Train on one parameter initially
print("Train GP")
X = np.load(
    "/home/jprosser/Documents/WorkPlacements/Caltech/Data/parameters.npy"
)[:1000, :]
Y = np.load("/home/jprosser/Documents/WorkPlacements/Caltech/Data/Output.npy"
            )[:1000, :]
print("Gaussian Process")
gp = reg(X, Y)
gp.optimize()
#fig=gp.plot()
#GPy.plotting.show(fig)
#plt.show()

#Obtain the long run statistics (true data)
ztrue = np.load(
    "/home/jprosser/Documents/WorkPlacements/Caltech/Data/ztrue.npy")
print(ztrue)

#Iterate
print("initial positions")
samples = np.zeros((ITER + burnin, chains, M))
#Start the initial Markov chains (In future possibly consider starting these in different places)
for i in range(0, chains):
import GPy
import numpy as np
from GPy.models.gp_regression import GPRegression as reg
import matplotlib.pyplot as plt

#Define input data (observations)
xmax = 10
xmin = -10
N = 50
X = np.linspace(xmin, xmax, N).reshape((N, 1))
Y = X**2 + np.random.normal(size=N).reshape((N, 1))

#Create gp

k = GPy.kern.Brownian(input_dim=1)

gp = reg(X, Y, k, noise_var=1, normalizer=True)

#plot gp

fig = gp.plot()

GPy.plotting.show(fig)
plt.show()