Beispiel #1
0
def test_optim():
    # TODO
    ntrain = 128
    xtrain = halton(ntrain, 2)
    l = np.array([0.5, 0.5])
    sigf = 1.0
    sign = 1e-4
    hyp = np.hstack([l, sigf, sign])

    ytrain = np.sin(2 * xtrain[:, 0]) + np.cos(3 * xtrain[:, 1])

    loghyp0 = np.log10(hyp)

    def cost(loghyp):
        h = 10**loghyp
        val, jac = negative_log_likelihood_cholesky(h,
                                                    xtrain,
                                                    ytrain,
                                                    RBF,
                                                    eval_gradient=True,
                                                    log_scale_hyp=True)
        return val, h * np.log(10.0) * jac

    opt_res = minimize(cost,
                       loghyp0,
                       method='L-BFGS-B',
                       jac=True,
                       bounds=((-2, 2), (-2, 2), (-2, 2), (-4, 0)))
Beispiel #2
0
def test_fit_manual():
    ntrain = 128
    xtrain = halton(ntrain, 2)
    l = np.array([0.5, 0.5])
    sigf = 1.0
    sign = 1e-4
    hyp = np.hstack([l, sigf, sign])

    ytrain = np.sin(2 * xtrain[:, 0]) + np.cos(3 * xtrain[:, 1])
    Ky = RBF(xtrain, xtrain, hyp[:-2], *hyp[-2:])
    L = np.linalg.cholesky(Ky)
    alpha = solve_cholesky(L, ytrain)

    n1test = 20
    n2test = 20
    ntest = n1test * n2test
    Xtest = np.mgrid[0:1:1j * n1test, 0:1:1j * n2test]
    xtest = Xtest.reshape([2, ntest]).T

    KstarT = RBF(xtest, xtrain, l, 1, 0)
    ymean = KstarT @ alpha
    yref = np.sin(2 * xtest[:, 0]) + np.cos(3 * xtest[:, 1])
    assert (np.allclose(ymean, yref, rtol=1e-3, atol=1e-3))
Beispiel #3
0
from profit.sur.backend.kernels import kern_sqexp
from profit.util.halton import halton

def f(x): return x*np.cos(10*x)

# Custom function to build GP matrix
def build_K(xa, xb, hyp, K):
    for i in np.arange(len(xa)):
        for j in np.arange(len(xb)):
            K[i, j] = kern_sqexp(xa[i], xb[j], hyp[0])

noise_train = 0.01

#ntrain = 20
for ntrain in range(1, 31):
    xtrain = halton(1, ntrain)
    ftrain = f(xtrain)
    np.random.seed(0)
    ytrain = ftrain + noise_train*np.random.randn(ntrain, 1)

    # GP regression with fixed kernel hyperparameters
    hyp = [0.1, 1e-4]  # l and sig_noise**2

    K = np.empty((ntrain, ntrain))   # train-train
    build_K(xtrain, xtrain, hyp, K)  # writes inside K
    Ky = K + hyp[-1]*np.eye(ntrain)
    Kyinv = invert(Ky, 4, 1e-6)       # using gp_functions.invert

    ntest = 300
    xtest = np.linspace(0, 1, ntest)
    ftest = f(xtest)
Beispiel #4
0
from profit.sur.backend.kernels import kern_sqexp
from profit.util.halton import halton

def f(x): return x*np.cos(10*x)

# Custom function to build GP matrix
def build_K(xa, xb, hyp, K):
    for i in np.arange(len(xa)):
        for j in np.arange(len(xb)):
            K[i, j] = kern_sqexp(xa[i], xb[j], hyp[0])

noise_train = 0.01

#ntrain = 20
for ntrain in range(1, 31):
    xtrain = halton(ntrain, 1)
    ftrain = f(xtrain)
    np.random.seed(0)
    ytrain = ftrain + noise_train*np.random.randn(ntrain, 1)

    # GP regression with fixed kernel hyperparameters
    hyp = [0.1, 1e-4]  # l and sig_noise**2

    K = np.empty((ntrain, ntrain))   # train-train
    build_K(xtrain, xtrain, hyp, K)  # writes inside K
    Ky = K + hyp[-1]*np.eye(ntrain)
    Kyinv = invert(Ky, 4, 1e-6)       # using gp_functions.invert

    ntest = 300
    xtest = np.linspace(0, 1, ntest)
    ftest = f(xtest)
Beispiel #5
0
import numpy as np
from func import (integrate_pendulum)
from param import Nm, N, dtsymp, qmin, qmax, pmin, pmax
from profit.util.halton import halton

# specify initial conditions (q0, p0)
method = 'all'
# samples = bluenoise(2, N, method, qmin, qmax, pmin, pmax)
samples = halton(N, 2) * np.array([qmax - qmin, pmax - pmin]) + np.array(
    [qmin, pmin])

q = samples[:, 0]
p = samples[:, 1] * 1.5

t0 = 0.0  # starting time
t1 = dtsymp * Nm  # end time
t = np.linspace(t0, t1, Nm)  # integration points in time

#integrate pendulum to provide data
ysint = integrate_pendulum(q, p, t)

#%%
P = np.empty((N))
Q = np.empty((N))
for ik in range(0, N):
    P[ik] = ysint[ik][1, -1]
    Q[ik] = ysint[ik][0, -1]

zqtrain = Q - q
zptrain = p - P
Beispiel #6
0
import numpy as np

from profit.util import halton
from profit.sur.backend.gpfunc import *

ndim = 1
ntrain = 10

xtrain = np.asfortranarray(halton.halton(ntrain, ndim))
K = np.empty((ntrain, ntrain), order='F')

print(K.shape)
print(xtrain.shape)
print(ntrain)

gpfunc.build_k_sqexp(xtrain, xtrain, K)