Esempio n. 1
0
optimize its hyperparameters.
"""

import os
import numpy as np
import matplotlib.pyplot as pl

import pygp
import pygp.plotting as pp


if __name__ == '__main__':
    # load the data.
    cdir = os.path.abspath(os.path.dirname(__file__))
    data = np.load(os.path.join(cdir, 'xy.npz'))
    X = data['X']
    y = data['y']

    # create the model, add data, and optimize it.
    gp = pygp.BasicGP(sn=.1, sf=1, ell=.1, mu=0)
    gp.add_data(X, y)
    pygp.optimize(gp)

    # plot the posterior.
    pl.figure(1)
    pl.clf()
    pp.plot_posterior(gp)
    pl.legend(loc=2)
    pl.draw()
    pl.show()
Esempio n. 2
0
    # create a prior structure.
    priors = {
        'sn': pygp.priors.Uniform(0.01, 1.0),
        'sf': pygp.priors.Uniform(0.01, 5.0),
        'ell': pygp.priors.Uniform(0.01, 1.0),
        'mu': pygp.priors.Uniform(-2, 2)}

    # create sample-based models.
    mcmc = pygp.meta.MCMC(model, priors, n=200, burn=100)
    smc = pygp.meta.SMC(model, priors, n=200)

    pl.figure(1)
    pl.clf()

    pl.subplot(131)
    pp.plot_posterior(model)
    pl.title('Type-II ML')
    pl.legend(loc='best')

    axis = pl.axis()

    pl.subplot(132)
    pp.plot_posterior(mcmc)
    pl.axis(axis)
    pl.title('MCMC')

    pl.subplot(133)
    pp.plot_posterior(mcmc)
    pl.axis(axis)
    pl.title('SMC')
    pl.draw()
Esempio n. 3
0
    # create a sparse GPs.
    U = np.linspace(-1.3, 2, 10)[:, None]
    gp2 = pygp.inference.FITC.from_gp(gp1, U)
    gp3 = pygp.inference.DTC.from_gp(gp1, U)

    # find the ML parameters
    pygp.optimize(gp1)
    pygp.optimize(gp2)
    pygp.optimize(gp3)

    # plot the dense gp.
    pl.figure(1)
    pl.clf()
    pl.subplot(131)
    pp.plot_posterior(gp1)
    pl.title("Full GP")

    # grab the axis limits.
    axis = pl.axis()

    # plot the FITC sparse gp.
    pl.subplot(132)
    pp.plot_posterior(gp2, pseudoinputs=True)
    pl.title("Sparse GP (FITC)")
    pl.axis(axis)
    pl.draw()

    # plot the sparse gp.
    pl.subplot(133)
    pp.plot_posterior(gp3, pseudoinputs=True)
Esempio n. 4
0
def test_plot_posterior():
    gp = pygp.BasicGP(1, 1, 1)
    gp = pygp.inference.FITC.from_gp(gp, mr.grid((0, 1), 10))

    nt.assert_raises(ValueError, pg.plot_posterior, gp)

    pg.plot_posterior(gp, 0, 1)
    pg.plot_posterior(gp, 0, 1, error=False)
    pg.plot_posterior(gp, 0, 1, mean=False)
    pg.plot_posterior(gp, 0, 1, data=False)
    pg.plot_posterior(gp, 0, 1, pseudoinputs=False)
    pg.plot_posterior(gp, 0, 1, color='b')
    pg.plot_posterior(gp, 0, 1, lw=3)
    pg.plot_posterior(gp, 0, 1, ls='.')

    X = mr.grid((0, 1), 10)
    y = np.random.rand(10)

    gp.add_data(X, y)
    pg.plot_posterior(gp)
Esempio n. 5
0

if __name__ == "__main__":
    # load the file from the current directory and get rid of any censored data.
    cdir = os.path.abspath(os.path.dirname(__file__))
    data = np.loadtxt(os.path.join(cdir, "maunaloa.txt")).flatten()
    data = np.array([(x, y) for x, y in enumerate(data) if y > -99])

    # minor manipulations of the data to make the ranges reasonable.
    X = data[:, 0, None] / 12.0 + 1958
    y = data[:, 1]

    # these are near the values called for in Rasmussen and Williams, so they
    # should give reasonable results and thus we'll skip the fit.
    kernel = pk.SE(67, 66) + pk.SE(2.4, 90) * pk.Periodic(1, 1, 1) + pk.RQ(1.2, 0.66, 0.78) + pk.SE(0.15, 0.15)

    # use a gaussian likeihood with this standard deviation.
    likelihood = pygp.likelihoods.Gaussian(sigma=0.2)

    # construct the model and add the data.
    gp = pygp.inference.ExactGP(likelihood, kernel, y.mean())
    gp.add_data(X, y)

    # plot everything.
    pl.figure(1)
    pl.clf()
    pp.plot_posterior(gp, mean=False, xmax=2020, marker=".")
    pl.legend(loc="upper left")
    pl.draw()
    pl.show()
Esempio n. 6
0
    priors = {
        'sn': pygp.priors.Uniform(0.01, 1.0),
        'sf': pygp.priors.Uniform(0.01, 5.0),
        'ell': pygp.priors.Uniform(0.01, 1.0),
        'mu': pygp.priors.Uniform(-2, 2)
    }

    # create sample-based models.
    mcmc = pygp.meta.MCMC(model, priors, n=200, burn=100)
    smc = pygp.meta.SMC(model, priors, n=200)

    pl.figure(1)
    pl.clf()

    pl.subplot(131)
    pp.plot_posterior(model)
    pl.title('Type-II ML')
    pl.legend(loc='best')

    axis = pl.axis()

    pl.subplot(132)
    pp.plot_posterior(mcmc)
    pl.axis(axis)
    pl.title('MCMC')

    pl.subplot(133)
    pp.plot_posterior(mcmc)
    pl.axis(axis)
    pl.title('SMC')
    pl.draw()
Esempio n. 7
0
Basic demo showing how to instantiate a simple GP model, add data to it, and
optimize its hyperparameters.
"""

import os
import numpy as np
import matplotlib.pyplot as pl

import pygp
import pygp.plotting as pp

if __name__ == '__main__':
    # load the data.
    cdir = os.path.abspath(os.path.dirname(__file__))
    data = np.load(os.path.join(cdir, 'xy.npz'))
    X = data['X']
    y = data['y']

    # create the model, add data, and optimize it.
    gp = pygp.BasicGP(sn=.1, sf=1, ell=.1, mu=0)
    gp.add_data(X, y)
    pygp.optimize(gp)

    # plot the posterior.
    pl.figure(1)
    pl.clf()
    pp.plot_posterior(gp)
    pl.legend(loc=2)
    pl.draw()
    pl.show()