def test_run_optimize(self): gp_noise_process = flexnoise.GPNoiseProcess(self.problem, self.kernel, [2.0], self.gp_times) y = gp_noise_process.run_optimize(num_restarts=3, iprint=False) self.assertEqual(len(y), self.problem.n_parameters() + 2 * len(self.gp_times))
def test_run_mcmc(self): gp_noise_process = flexnoise.GPNoiseProcess(self.problem, self.kernel, [2.0], self.gp_times) iters = 300 chain = gp_noise_process.run_mcmc(iters, 3, iprint=False) self.assertEqual( chain.shape, (iters // 2, self.problem.n_parameters() + 2 * len(self.gp_times)))
def test_set_gp_beta(self): gp_noise_process = flexnoise.GPNoiseProcess(self.problem, self.kernel, [2.0], self.gp_times) dt = self.times[1] - self.times[0] limit = 0.01 beta = gp_noise_process.set_gp_beta(100, dt, limit=0.01) expected = 100 * dt / math.sqrt(-2 * math.log(limit)) self.assertAlmostEqual(beta, expected)
def test_set_gp_hyperparameters(self): gp_noise_process = flexnoise.GPNoiseProcess(self.problem, self.kernel, [2.0], self.gp_times) gp_noise_process.set_gp_hyperparameters( mu=1.0, alpha=10.0, beta=2.0, ) self.assertEqual(gp_noise_process.mu, 1.0) self.assertEqual(gp_noise_process.alpha, 10.0) self.assertEqual(gp_noise_process.beta, 2.0)
def run_figureS2(num_runs=3, output_dir='./'): """Run the Gaussian process on block noise data. This function runs the simulations and saves the results to pickle. """ random.seed(12345) np.random.seed(12345) all_fits = [] iid_runs = [] sigmas = [] mult_runs = [] gp_runs = [] for run in range(num_runs): # Make a synthetic time series times, values, data = generate_time_series(model='logistic', noise='blocks', n_times=625) # Make Pints model and problem model = pints.toy.LogisticModel() problem = pints.SingleOutputProblem(model, times, data) # Initial conditions for model parameters model_starting_point = [0.08, 50] # Infer the nonstationary kernel fit # Run an optimization assumming IID log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3) log_likelihood = pints.GaussianLogLikelihood(problem) log_posterior = pints.LogPosterior(log_likelihood, log_prior) opt = pints.OptimisationController(log_posterior, model_starting_point + [2]) xbest, fbest = opt.run() # Run the GP fit, using the best fit for initialization gp_times = times[::25] kernel = flexnoise.kernels.GPLaplacianKernel gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times) gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200) x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150) all_fits.append(x) # Save all results to pickle kernel = kernel(None, gp_times) results = [all_fits, times, data, values, model, problem, kernel] fname = os.path.join(output_dir, 'figS2_data.pkl') with open(fname, 'wb') as f: pickle.dump(results, f)
def test_construct(self): flexnoise.GPNoiseProcess(self.problem, self.kernel, [2.0], self.gp_times)
def run_figure2(num_mcmc_samples=20000, num_mcmc_chains=3, num_runs=8, output_dir='./'): """Run the Gaussian process on multiplicative data. This function runs the simulations and saves the results to pickle. """ random.seed(123) np.random.seed(123) all_fits = [] iid_runs = [] sigmas = [] mult_runs = [] gp_runs = [] for run in range(num_runs): # Make a synthetic time series times, values, data = generate_time_series(model='logistic', noise='multiplicative', n_times=251) # Make Pints model and problem model = pints.toy.LogisticModel() problem = pints.SingleOutputProblem(model, times, data) # Initial conditions for model parameters model_starting_point = [0.08, 50] # Run MCMC for IID posterior likelihood = pints.GaussianLogLikelihood x0 = model_starting_point + [2] posterior_iid = run_pints(problem, likelihood, x0, num_mcmc_samples) iid_runs.append(posterior_iid) # Save standard deviations from IID runs sigma = np.median(posterior_iid[:, 2]) sigmas.append(sigma) # Run MCMC for multiplicative noise posterior likelihood = pints.MultiplicativeGaussianLogLikelihood x0 = model_starting_point + [0.5, 0.5] posterior_mult = run_pints(problem, likelihood, x0, num_mcmc_samples) mult_runs.append(posterior_mult) # Infer the nonstationary kernel fit # Run an optimization assumming IID log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3) log_likelihood = pints.GaussianLogLikelihood(problem) log_posterior = pints.LogPosterior(log_likelihood, log_prior) opt = pints.OptimisationController(log_posterior, model_starting_point + [2]) xbest, fbest = opt.run() # Run the GP fit, using the best fit for initialization gp_times = times[::10] kernel = flexnoise.kernels.GPLaplacianKernel gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times) gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200) x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150) all_fits.append(x) # Run MCMC for multivariate normal noise kernel = flexnoise.kernels.GPLaplacianKernel(None, gp_times) kernel.parameters = x[2:] cov = kernel.get_matrix(times) likelihood = flexnoise.CovarianceLogLikelihood x0 = model_starting_point posterior_gp = run_pints(problem, likelihood, x0, num_mcmc_samples, likelihood_args=[cov]) gp_runs.append(posterior_gp) # Save all results to pickle results = [ iid_runs, mult_runs, all_fits, gp_runs, times, data, values, model, problem, kernel, sigmas ] fname = os.path.join(output_dir, 'fig2_data.pkl') with open(fname, 'wb') as f: pickle.dump(results, f)