def setUpClass(cls): """ Set up problem for tests. """ # Create toy model cls.model = toy.LogisticModel() cls.real_parameters = [0.015, 500] cls.times = np.linspace(0, 1000, 1000) cls.values = cls.model.simulate(cls.real_parameters, cls.times) # Add noise cls.noise = 10 cls.values += np.random.normal(0, cls.noise, cls.values.shape) cls.real_parameters.append(cls.noise) cls.real_parameters = np.array(cls.real_parameters) # Create an object with links to the model and time series cls.problem = pints.SingleOutputProblem(cls.model, cls.times, cls.values) # Create a uniform prior over both the parameters and the new noise # variable cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1], [0.02, 600, cls.noise * 100]) # Create a log likelihood cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior = pints.LogPosterior(cls.log_likelihood, cls.log_prior)
def __init__(self, name): super(TestCMAES, self).__init__(name) # Create toy model self.model = toy.LogisticModel() self.real_parameters = [0.015, 500] self.times = np.linspace(0, 1000, 1000) self.values = self.model.simulate(self.real_parameters, self.times) # Create an object with links to the model and time series self.problem = pints.SingleSeriesProblem(self.model, self.times, self.values) # Select a score function self.score = pints.SumOfSquaresError(self.problem) # Select some boundaries self.boundaries = pints.Boundaries([0, 400], [0.03, 600]) # Set an initial position self.x0 = 0.014, 499 # Set a guess for the standard deviation around the initial position # (in both directions) self.sigma0 = 0.01 # Minimum score function value to obtain self.cutoff = 1e-9 # Maximum tries before it counts as failed self.max_tries = 3
def test_scaled_log_likelihood(self): import pints import pints.toy as toy import numpy as np model = toy.LogisticModel() real_parameters = [0.015, 500] test_parameters = [0.014, 501] sigma = 0.001 times = np.linspace(0, 1000, 100) values = model.simulate(real_parameters, times) # Create an object with links to the model and time series problem = pints.SingleSeriesProblem(model, times, values) # Create a scaled and not scaled log_likelihood log_likelihood_not_scaled = pints.KnownNoiseLogLikelihood( problem, sigma) log_likelihood_scaled = pints.ScaledLogLikelihood( log_likelihood_not_scaled) eval_not_scaled = log_likelihood_not_scaled(test_parameters) eval_scaled = log_likelihood_scaled(test_parameters) self.assertEqual(int(eval_not_scaled), -20959169232) self.assertAlmostEqual(eval_scaled * len(times), eval_not_scaled)
def __init__(self, name): super(TestAdaptiveCovarianceMCMC, self).__init__(name) # Create toy model self.model = toy.LogisticModel() self.real_parameters = [0.015, 500] self.times = np.linspace(0, 1000, 1000) self.values = self.model.simulate(self.real_parameters, self.times) # Add noise noise = 10 self.values += np.random.normal(0, noise, self.values.shape) self.real_parameters.append(noise) # Create an object with links to the model and time series self.problem = pints.SingleSeriesProblem( self.model, self.times, self.values) # Create a uniform prior over both the parameters and the new noise # variable self.prior = pints.UniformPrior( [0.01, 400, noise * 0.1], [0.02, 600, noise * 100] ) # Create an un-normalised log-posterior (prior * likelihood) self.log_likelihood = pints.LogPosterior( self.prior, pints.UnknownNoiseLogLikelihood(self.problem)) # Select initial point and covariance self.x0 = np.array(self.real_parameters) * 1.1 self.sigma0 = [0.005, 100, 0.5 * noise]
def setUpClass(cls): """ Set up problem for tests. """ # Create toy model cls.model = toy.LogisticModel() cls.real_parameters = [0.015, 500] cls.times = np.linspace(0, 1000, 1000) cls.values = cls.model.simulate(cls.real_parameters, cls.times) # Add noise cls.noise = 10 cls.values += np.random.normal(0, cls.noise, cls.values.shape) cls.real_parameters.append(cls.noise) cls.real_parameters = np.array(cls.real_parameters) # Create an object with links to the model and time series cls.problem = pints.SingleOutputProblem(cls.model, cls.times, cls.values) # Create a uniform prior over both the parameters and the new noise # variable cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1], [0.02, 600, cls.noise * 100]) # Create a log likelihood cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior = pints.LogPosterior(cls.log_likelihood, cls.log_prior) # Run MCMC sampler xs = [ cls.real_parameters * 1.1, cls.real_parameters * 0.9, cls.real_parameters * 1.15, ] mcmc = pints.MCMCController(cls.log_posterior, 3, xs, method=pints.HaarioBardenetACMC) mcmc.set_max_iterations(200) mcmc.set_initial_phase_iterations(50) mcmc.set_log_to_screen(False) start = time.time() cls.chains = mcmc.run() end = time.time() cls.time = end - start
def test_known_unknown_log_likelihood(self): import pints import pints.toy as toy import numpy as np model = toy.LogisticModel() parameters = [0.015, 500] sigma = 0.1 times = np.linspace(0, 1000, 100) values = model.simulate(parameters, times) problem = pints.SingleSeriesProblem(model, times, values) # Test if known/unknown give same result l1 = pints.KnownNoiseLogLikelihood(problem, sigma) l2 = pints.UnknownNoiseLogLikelihood(problem) self.assertAlmostEqual(l1(parameters), l2(parameters + [sigma]))
def setUpClass(cls): # Create a single output optimisation toy model cls.model1 = toy.LogisticModel() cls.real_parameters1 = [0.015, 500] cls.times1 = np.linspace(0, 1000, 100) cls.values1 = cls.model1.simulate(cls.real_parameters1, cls.times1) # Add noise cls.noise1 = 50 cls.values1 += np.random.normal(0, cls.noise1, cls.values1.shape) # Set up optimisation problem cls.problem1 = pints.SingleOutputProblem(cls.model1, cls.times1, cls.values1) # Instead of running the optimisation, choose fixed values to serve as # the results cls.found_parameters1 = np.array([0.0149, 494.6]) # Create a multiple output MCMC toy model cls.model2 = toy.LotkaVolterraModel() cls.real_parameters2 = cls.model2.suggested_parameters() # Downsample the times for speed cls.times2 = cls.model2.suggested_times()[::10] cls.values2 = cls.model2.simulate(cls.real_parameters2, cls.times2) # Add noise cls.noise2 = 0.05 cls.values2 += np.random.normal(0, cls.noise2, cls.values2.shape) # Set up 2-output MCMC problem cls.problem2 = pints.MultiOutputProblem(cls.model2, cls.times2, cls.values2) # Instead of running MCMC, generate three chains which actually contain # independent samples near the true values (faster than MCMC) samples = np.zeros((3, 50, 4)) for chain_idx in range(3): for parameter_idx in range(4): if parameter_idx == 0 or parameter_idx == 2: chain = np.random.normal(3.01, .2, 50) else: chain = np.random.normal(1.98, .2, 50) samples[chain_idx, :, parameter_idx] = chain cls.samples2 = samples
def __init__(self, name): super(TestPlot, self).__init__(name) # Create toy model (single output) self.model = toy.LogisticModel() self.real_parameters = [0.015, 500] self.times = np.linspace(0, 1000, 100) # small problem self.values = self.model.simulate(self.real_parameters, self.times) # Add noise self.noise = 10 self.values += np.random.normal(0, self.noise, self.values.shape) self.real_parameters.append(self.noise) self.real_parameters = np.array(self.real_parameters) # Create an object with links to the model and time series self.problem = pints.SingleOutputProblem(self.model, self.times, self.values) # Create a uniform prior over both the parameters and the new noise # variable self.lower = [0.01, 400, self.noise * 0.1] self.upper = [0.02, 600, self.noise * 100] self.log_prior = pints.UniformLogPrior(self.lower, self.upper) # Create a log likelihood self.log_likelihood = pints.GaussianLogLikelihood(self.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) self.log_posterior = pints.LogPosterior(self.log_likelihood, self.log_prior) # Run MCMC self.x0 = [ self.real_parameters * 1.1, self.real_parameters * 0.9, self.real_parameters * 1.05 ] mcmc = pints.MCMCController(self.log_posterior, 3, self.x0) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples = mcmc.run() # Create toy model (multi-output) self.model2 = toy.LotkaVolterraModel() self.real_parameters2 = self.model2.suggested_parameters() self.times2 = self.model2.suggested_times()[::10] # down sample it self.values2 = self.model2.simulate(self.real_parameters2, self.times2) # Add noise self.noise2 = 0.05 self.values2 += np.random.normal(0, self.noise2, self.values2.shape) # Create an object with links to the model and time series self.problem2 = pints.MultiOutputProblem(self.model2, self.times2, self.values2) # Create a uniform prior over both the parameters and the new noise # variable self.log_prior2 = pints.UniformLogPrior([1, 1, 1, 1], [6, 6, 6, 6]) # Create a log likelihood self.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood( self.problem2, self.noise2) # Create an un-normalised log-posterior (log-likelihood + log-prior) self.log_posterior2 = pints.LogPosterior(self.log_likelihood2, self.log_prior2) # Run MCMC self.x02 = [ self.real_parameters2 * 1.1, self.real_parameters2 * 0.9, self.real_parameters2 * 1.05 ] mcmc = pints.MCMCController(self.log_posterior2, 3, self.x02) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples2 = mcmc.run() # Create toy model (single-output, single-parameter) self.real_parameters3 = [0] self.log_posterior3 = toy.GaussianLogPDF(self.real_parameters3, [1]) self.lower3 = [-3] self.upper3 = [3] # Run MCMC self.x03 = [[1], [-2], [3]] mcmc = pints.MCMCController(self.log_posterior3, 3, self.x03) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples3 = mcmc.run()
def __init__(self): self.model = toy.LogisticModel()
chains = mcmc.run() print('Done!') s = sample_size//4+1 #HMC: s = 1 b = False while s < sample_size: chains_cut = chains[:,sample_size//4:s+1] rhat = pints.rhat(chains_cut) s+=1 if rhat[0] < 1.05: b = True break print(s) return chains[0][s:][:, 0] model = toy.LogisticModel() real_parameters = [0.016, 500] chain1 = run(model, real_parameters, 10,pints.UniformLogPrior([0.015, 400],[0.017, 600])) chain2 = run(model, real_parameters, 1,pints.UniformLogPrior([0.015, 400],[0.017, 600])) current_date_and_time_string = datetime.now() bins=np.histogram(np.hstack((chain1,chain2)), bins=40)[1] plt.figure(figsize=(12,4),dpi=500) plt.xlabel('r') plt.hist(chain1,bins, alpha=0.5, label=r'posterior samples generated with correct likelihood $N(f(t),10^2)$') plt.hist(chain2,bins, alpha=0.5, label=r'posterior samples generated with incorrect likelihood $N(f(t),1^2)$') plt.title(r'Compare posterior samples generated with correct likelihood $N(f(t),10^2)$ and with incorrect likelihood $N(f(t),1^2)$ (Time series of length 50)') plt.legend(loc='upper right') plt.savefig('compare'+str(current_date_and_time_string)+'.png') '''model = Model()
def setUpClass(cls): # Number of samples: Make this as small as possible to speed up testing n_samples = 300 # Create toy model (single output) cls.model = toy.LogisticModel() cls.real_parameters = [0.015, 500] cls.times = np.linspace(0, 1000, 100) # small problem cls.values = cls.model.simulate(cls.real_parameters, cls.times) # Add noise cls.noise = 10 cls.values += np.random.normal(0, cls.noise, cls.values.shape) cls.real_parameters.append(cls.noise) cls.real_parameters = np.array(cls.real_parameters) # Create an object with links to the model and time series cls.problem = pints.SingleOutputProblem(cls.model, cls.times, cls.values) # Create a uniform prior over both the parameters and the new noise # variable cls.lower = [0.01, 400, cls.noise * 0.1] cls.upper = [0.02, 600, cls.noise * 100] cls.log_prior = pints.UniformLogPrior(cls.lower, cls.upper) # Create a log likelihood cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior = pints.LogPosterior(cls.log_likelihood, cls.log_prior) # Run MCMC cls.x0 = [ cls.real_parameters * 1.1, cls.real_parameters * 0.9, cls.real_parameters * 1.05 ] mcmc = pints.MCMCController(cls.log_posterior, 3, cls.x0) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples = mcmc.run() # Create toy model (multi-output) cls.model2 = toy.LotkaVolterraModel() cls.real_parameters2 = cls.model2.suggested_parameters() cls.times2 = cls.model2.suggested_times()[::10] # downsample it cls.values2 = cls.model2.simulate(cls.real_parameters2, cls.times2) # Add noise cls.noise2 = 0.05 cls.values2 += np.random.normal(0, cls.noise2, cls.values2.shape) # Create an object with links to the model and time series cls.problem2 = pints.MultiOutputProblem(cls.model2, cls.times2, np.log(cls.values2)) # Create a uniform prior over both the parameters and the new noise # variable cls.log_prior2 = pints.UniformLogPrior([0, 0, 0, 0], [6, 6, 6, 6]) # Create a log likelihood cls.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood( cls.problem2, cls.noise2) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior2 = pints.LogPosterior(cls.log_likelihood2, cls.log_prior2) # Run MCMC cls.x02 = [ cls.real_parameters2 * 1.1, cls.real_parameters2 * 0.9, cls.real_parameters2 * 1.05 ] mcmc = pints.MCMCController(cls.log_posterior2, 3, cls.x02) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples2 = mcmc.run() # Create toy model (single-output, single-parameter) cls.real_parameters3 = [0] cls.log_posterior3 = toy.GaussianLogPDF(cls.real_parameters3, [1]) cls.lower3 = [-3] cls.upper3 = [3] # Run MCMC cls.x03 = [[1], [-2], [3]] mcmc = pints.MCMCController(cls.log_posterior3, 3, cls.x03) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples3 = mcmc.run()