def setUpClass(cls): # Create a single output optimisation toy model cls.model1 = toy.LogisticModel() cls.real_parameters1 = [0.015, 500] cls.times1 = np.linspace(0, 1000, 100) cls.values1 = cls.model1.simulate(cls.real_parameters1, cls.times1) # Add noise cls.noise1 = 50 cls.values1 += np.random.normal(0, cls.noise1, cls.values1.shape) # Set up optimisation problem cls.problem1 = pints.SingleOutputProblem(cls.model1, cls.times1, cls.values1) # Instead of running the optimisation, choose fixed values to serve as # the results cls.found_parameters1 = np.array([0.0149, 494.6]) # Create a multiple output MCMC toy model cls.model2 = toy.LotkaVolterraModel() cls.real_parameters2 = cls.model2.suggested_parameters() # Downsample the times for speed cls.times2 = cls.model2.suggested_times()[::10] cls.values2 = cls.model2.simulate(cls.real_parameters2, cls.times2) # Add noise cls.noise2 = 0.05 cls.values2 += np.random.normal(0, cls.noise2, cls.values2.shape) # Set up 2-output MCMC problem cls.problem2 = pints.MultiOutputProblem(cls.model2, cls.times2, cls.values2) # Instead of running MCMC, generate three chains which actually contain # independent samples near the true values (faster than MCMC) samples = np.zeros((3, 50, 4)) for chain_idx in range(3): for parameter_idx in range(4): if parameter_idx == 0 or parameter_idx == 2: chain = np.random.normal(3.01, .2, 50) else: chain = np.random.normal(1.98, .2, 50) samples[chain_idx, :, parameter_idx] = chain cls.samples2 = samples
def __init__(self, name): super(TestPlot, self).__init__(name) # Create toy model (single output) self.model = toy.LogisticModel() self.real_parameters = [0.015, 500] self.times = np.linspace(0, 1000, 100) # small problem self.values = self.model.simulate(self.real_parameters, self.times) # Add noise self.noise = 10 self.values += np.random.normal(0, self.noise, self.values.shape) self.real_parameters.append(self.noise) self.real_parameters = np.array(self.real_parameters) # Create an object with links to the model and time series self.problem = pints.SingleOutputProblem(self.model, self.times, self.values) # Create a uniform prior over both the parameters and the new noise # variable self.lower = [0.01, 400, self.noise * 0.1] self.upper = [0.02, 600, self.noise * 100] self.log_prior = pints.UniformLogPrior(self.lower, self.upper) # Create a log likelihood self.log_likelihood = pints.GaussianLogLikelihood(self.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) self.log_posterior = pints.LogPosterior(self.log_likelihood, self.log_prior) # Run MCMC self.x0 = [ self.real_parameters * 1.1, self.real_parameters * 0.9, self.real_parameters * 1.05 ] mcmc = pints.MCMCController(self.log_posterior, 3, self.x0) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples = mcmc.run() # Create toy model (multi-output) self.model2 = toy.LotkaVolterraModel() self.real_parameters2 = self.model2.suggested_parameters() self.times2 = self.model2.suggested_times()[::10] # down sample it self.values2 = self.model2.simulate(self.real_parameters2, self.times2) # Add noise self.noise2 = 0.05 self.values2 += np.random.normal(0, self.noise2, self.values2.shape) # Create an object with links to the model and time series self.problem2 = pints.MultiOutputProblem(self.model2, self.times2, self.values2) # Create a uniform prior over both the parameters and the new noise # variable self.log_prior2 = pints.UniformLogPrior([1, 1, 1, 1], [6, 6, 6, 6]) # Create a log likelihood self.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood( self.problem2, self.noise2) # Create an un-normalised log-posterior (log-likelihood + log-prior) self.log_posterior2 = pints.LogPosterior(self.log_likelihood2, self.log_prior2) # Run MCMC self.x02 = [ self.real_parameters2 * 1.1, self.real_parameters2 * 0.9, self.real_parameters2 * 1.05 ] mcmc = pints.MCMCController(self.log_posterior2, 3, self.x02) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples2 = mcmc.run() # Create toy model (single-output, single-parameter) self.real_parameters3 = [0] self.log_posterior3 = toy.GaussianLogPDF(self.real_parameters3, [1]) self.lower3 = [-3] self.upper3 = [3] # Run MCMC self.x03 = [[1], [-2], [3]] mcmc = pints.MCMCController(self.log_posterior3, 3, self.x03) mcmc.set_max_iterations(300) # make it as small as possible mcmc.set_log_to_screen(False) self.samples3 = mcmc.run()
def setUpClass(cls): # Number of samples: Make this as small as possible to speed up testing n_samples = 300 # Create toy model (single output) cls.model = toy.LogisticModel() cls.real_parameters = [0.015, 500] cls.times = np.linspace(0, 1000, 100) # small problem cls.values = cls.model.simulate(cls.real_parameters, cls.times) # Add noise cls.noise = 10 cls.values += np.random.normal(0, cls.noise, cls.values.shape) cls.real_parameters.append(cls.noise) cls.real_parameters = np.array(cls.real_parameters) # Create an object with links to the model and time series cls.problem = pints.SingleOutputProblem(cls.model, cls.times, cls.values) # Create a uniform prior over both the parameters and the new noise # variable cls.lower = [0.01, 400, cls.noise * 0.1] cls.upper = [0.02, 600, cls.noise * 100] cls.log_prior = pints.UniformLogPrior(cls.lower, cls.upper) # Create a log likelihood cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior = pints.LogPosterior(cls.log_likelihood, cls.log_prior) # Run MCMC cls.x0 = [ cls.real_parameters * 1.1, cls.real_parameters * 0.9, cls.real_parameters * 1.05 ] mcmc = pints.MCMCController(cls.log_posterior, 3, cls.x0) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples = mcmc.run() # Create toy model (multi-output) cls.model2 = toy.LotkaVolterraModel() cls.real_parameters2 = cls.model2.suggested_parameters() cls.times2 = cls.model2.suggested_times()[::10] # downsample it cls.values2 = cls.model2.simulate(cls.real_parameters2, cls.times2) # Add noise cls.noise2 = 0.05 cls.values2 += np.random.normal(0, cls.noise2, cls.values2.shape) # Create an object with links to the model and time series cls.problem2 = pints.MultiOutputProblem(cls.model2, cls.times2, np.log(cls.values2)) # Create a uniform prior over both the parameters and the new noise # variable cls.log_prior2 = pints.UniformLogPrior([0, 0, 0, 0], [6, 6, 6, 6]) # Create a log likelihood cls.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood( cls.problem2, cls.noise2) # Create an un-normalised log-posterior (log-likelihood + log-prior) cls.log_posterior2 = pints.LogPosterior(cls.log_likelihood2, cls.log_prior2) # Run MCMC cls.x02 = [ cls.real_parameters2 * 1.1, cls.real_parameters2 * 0.9, cls.real_parameters2 * 1.05 ] mcmc = pints.MCMCController(cls.log_posterior2, 3, cls.x02) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples2 = mcmc.run() # Create toy model (single-output, single-parameter) cls.real_parameters3 = [0] cls.log_posterior3 = toy.GaussianLogPDF(cls.real_parameters3, [1]) cls.lower3 = [-3] cls.upper3 = [3] # Run MCMC cls.x03 = [[1], [-2], [3]] mcmc = pints.MCMCController(cls.log_posterior3, 3, cls.x03) mcmc.set_max_iterations(n_samples) mcmc.set_log_to_screen(False) cls.samples3 = mcmc.run()