def main(options): """ The main function. """ model = NativeModel(initialize_native_model(options.model)) proposal = pm.MALAProposal(dt=options.dt) db_filename = options.db_filename if db_filename is None: db_filename = os.path.splitext(os.path.abspath( options.model))[0] + '_mcmc.h5' mcmc = pm.MetropolisHastings(model, proposal=proposal, db_filename=db_filename) mcmc.sample(options.num_sample, num_thin=options.num_thin, num_burn=options.num_burn, verbose=True, stop_tuning_after=0)
""" import GPy import pymcmc as pm import numpy as np import matplotlib.pyplot as plt # Construct a GPy Model (anything really..., here we are using a regression # example) model = GPy.examples.regression.olympic_marathon_men(optimize=False, plot=False) # Look at the model before it is trained: print 'Model before training:' print str(model) # Pick a proposal for MCMC (here we pick a Metropolized Langevin Proposal proposal = pm.MALAProposal(dt=1.) # Construct a Metropolis Hastings object mcmc = pm.MetropolisHastings( model, # The model you want to train proposal=proposal, # The proposal you want to use db_filename='demo_1_db.h5') # The HDF5 database to write the results # Look at the model now: We have automatically added uninformative priors # by looking at the constraints of the parameters print 'Model after adding priors:' print str(model) # Now we can sample it: mcmc.sample( 100000, # Number of MCMC steps num_thin=100, # Number of steps to skip num_burn=1000, # Number of steps to burn initially verbose=True) # Be verbose or not
x = np.linspace(-10, 15, 100)[:, None] y = np.sin(x) / x - 0.1 * x + 0.1 * x**3 plt.plot(x, y, 'r', linewidth=2) plt.legend([ 'Mean of GP', '5% percentile of GP', '95% percentile of GP', 'Observations', 'Real Underlying Function' ], loc='best') plt.title('Model trained by maximizing the likelihood') plt.show() a = raw_input('press enter to continue...') # Or you might want to do it using MCMC: new_mean = pm.MeanFunction(input_dim, poly_basis, ARD=True) new_kernel = GPy.kern.RBF(input_dim) new_model = GPy.models.GPRegression(X, Y, kernel=mean + new_kernel) proposal = pm.MALAProposal(dt=0.1) mcmc = pm.MetropolisHastings(new_model, proposal=proposal) mcmc.sample(50000, num_thin=100, num_burn=1000, verbose=True) print 'Model trained with MCMC:' print str(new_model) print new_model.add.mean.variance # Plot everything for this too: new_model.plot(plot_limits=(-10., 15.)) # Let us also plot the full function plt.plot(x, y, 'r', linewidth=2) plt.legend([ 'Mean of GP', '5% percentile of GP', '95% percentile of GP', 'Observations', 'Real Underlying Function' ], loc='best') plt.title('Model trained by MCMC')
""" Unit tests for the GPyModel class. """ import sys import os sys.path.insert(0, os.path.abspath(os.path.split(__file__)[0])) import GPy import pymcmc if __name__ == '__main__': model = GPy.examples.regression.olympic_marathon_men(optimize=True, plot=False) mcmc_model = pymcmc.GPyModel(model, compute_grad=True) print str(mcmc_model) print str(model) quit() print mcmc_model.log_likelihood print mcmc_model.log_prior print mcmc_model.num_params print mcmc_model.params mcmc_model.params = mcmc_model.params print mcmc_model.param_names print mcmc_model.grad_log_likelihood print mcmc_model.grad_log_prior proposal = pymcmc.MALAProposal() print str(mcmc_model) new_state, log_p = proposal.propose(mcmc_model) print new_state print log_p