def main(options):
    """
    The main function.
    """
    model = NativeModel(initialize_native_model(options.model))
    proposal = pm.MALAProposal(dt=options.dt)
    db_filename = options.db_filename
    if db_filename is None:
        db_filename = os.path.splitext(os.path.abspath(
            options.model))[0] + '_mcmc.h5'
    mcmc = pm.MetropolisHastings(model,
                                 proposal=proposal,
                                 db_filename=db_filename)
    mcmc.sample(options.num_sample,
                num_thin=options.num_thin,
                num_burn=options.num_burn,
                verbose=True,
                stop_tuning_after=0)
예제 #2
0
import pymcmc as pm
import numpy as np
import matplotlib.pyplot as plt

# Construct a GPy Model (anything really..., here we are using a regression
# example)
model = GPy.examples.regression.olympic_marathon_men(optimize=False,
                                                     plot=False)
# Look at the model before it is trained:
print 'Model before training:'
print str(model)
# Pick a proposal for MCMC (here we pick a Metropolized Langevin Proposal
proposal = pm.MALAProposal(dt=1.)
# Construct a Metropolis Hastings object
mcmc = pm.MetropolisHastings(
    model,  # The model you want to train
    proposal=proposal,  # The proposal you want to use
    db_filename='demo_1_db.h5')  # The HDF5 database to write the results
# Look at the model now: We have automatically added uninformative priors
# by looking at the constraints of the parameters
print 'Model after adding priors:'
print str(model)
# Now we can sample it:
mcmc.sample(
    100000,  # Number of MCMC steps
    num_thin=100,  # Number of steps to skip
    num_burn=1000,  # Number of steps to burn initially
    verbose=True)  # Be verbose or not
# Here is the model at the last MCMC step:
print 'Model after training:'
print str(model)
# Let's plot the results:
예제 #3
0
Unit tests for the GPyModel class.
"""

import sys
import os

sys.path.insert(0, os.path.abspath(os.path.split(__file__)[0]))
import GPy
import pymcmc as pm
import numpy as np
import matplotlib.pyplot as plt

if __name__ == '__main__':
    model = GPy.examples.regression.olympic_marathon_men(optimize=False,
                                                         plot=False)
    noise_prior = pm.UninformativeScalePrior()
    variance_prior = pm.UninformativeScalePrior()
    length_scale_prior = pm.UninformativeScalePrior()
    model.set_prior('rbf_variance', variance_prior)
    model.set_prior('noise_variance', noise_prior)
    model.set_prior('rbf_lengthscale', length_scale_prior)
    #print str(model)
    #model.optimize()
    print str(model)
    mcmc_model = pm.GPyModel(model, compute_grad=True)
    mcmc = pm.MetropolisHastings(mcmc_model, db_filename='test_db.h5')
    mcmc.sample(1000)
#    model_state, prop_state = mcmc.db.get_states(-1, -1)
#    mcmc.sample(1000, num_thin=100, init_model_state=model_state,
#                      init_proposal_state=prop_state)
예제 #4
0
y = np.sin(x) / x - 0.1 * x + 0.1 * x**3
plt.plot(x, y, 'r', linewidth=2)
plt.legend([
    'Mean of GP', '5% percentile of GP', '95% percentile of GP',
    'Observations', 'Real Underlying Function'
],
           loc='best')
plt.title('Model trained by maximizing the likelihood')
plt.show()
a = raw_input('press enter to continue...')
# Or you might want to do it using MCMC:
new_mean = pm.MeanFunction(input_dim, poly_basis, ARD=True)
new_kernel = GPy.kern.RBF(input_dim)
new_model = GPy.models.GPRegression(X, Y, kernel=mean + new_kernel)
proposal = pm.MALAProposal(dt=0.1)
mcmc = pm.MetropolisHastings(new_model, proposal=proposal)
mcmc.sample(50000, num_thin=100, num_burn=1000, verbose=True)
print 'Model trained with MCMC:'
print str(new_model)
print new_model.add.mean.variance
# Plot everything for this too:
new_model.plot(plot_limits=(-10., 15.))
# Let us also plot the full function
plt.plot(x, y, 'r', linewidth=2)
plt.legend([
    'Mean of GP', '5% percentile of GP', '95% percentile of GP',
    'Observations', 'Real Underlying Function'
],
           loc='best')
plt.title('Model trained by MCMC')
plt.show()