def test_user_provided_invalid_kernel(): params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) with pytest.raises(ValueError): m = gp_model(params, ensemble, kernel=5)
def test_sample(): training_params = get_uniform_params(2) training_ensemble = get_1d_two_param_cube(training_params) m = gp_model(training_params, training_ensemble) m.train() # Test that sample returns the correct shape array for # the given model, obs and params. obs_uncertainty = training_ensemble.data.std(axis=0) # Perturbing the obs by one sd should lead to an implausibility of 1. obs = training_ensemble[10].copy() + obs_uncertainty sampler = MCMCSampler(m, obs, obs_uncertainty=obs_uncertainty / obs.data, interann_uncertainty=0., repres_uncertainty=0., struct_uncertainty=0.) # Generate only valid samples, don't bother with burn-in valid_samples = sampler.sample(n_samples=10, mcmc_kwargs=dict(num_burnin_steps=0)) # Just check the shape. We test the actual probabilities above # and we don't need to test the tf mcmc code assert valid_samples.shape == (10, 2)
def test_user_specified_single_kernel(): """ Setup for the simple 1D 2 parameter test case with user specified kernel """ params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble, kernel=['RBF']) m.train()
def test_user_specified_kernel(): """ Setup for the simple 1D 2 parameter test case with user specified kernel """ params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble, kernel=['Bias', "Polynomial", 'Linear', "RBF"]) m.train()
def test_user_specified_invalid_op(): """ Setup for the simple 1D 2 parameter test case with user specified kernel """ params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) with pytest.raises(ValueError): m = gp_model(params, ensemble, kernel=['RBF', 'White'], kernel_op='Blah')
def setup_class(cls) -> None: params, test = pop_elements(get_uniform_params(3), 50) ensemble = get_three_param_cube(params) m = gp_model(params, ensemble) m.train() cls.model = m cls.params = params cls.test_params = test cls.eval_fn = eval_cube
def setup_class(cls) -> None: params, test = pop_elements(get_uniform_params(3), 50) ensemble = get_three_param_cube(params) # Create a new, ensemble at lower precision ensemble = ensemble.copy(data=ensemble.data.astype('float32')) m = gp_model(params, ensemble) m.train() cls.model = m cls.params = params cls.test_params = test cls.eval_fn = eval_cube
def setup_class(cls) -> None: params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble, kernel=['Bias', "Polynomial", 'Linear', "RBF"]) m.train() cls.model = m cls.params = params cls.test_params = test cls.eval_fn = eval_1d_cube
def setup_class(cls) -> None: params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble) m.train() cls.model = m cls.params = params cls.test_params = test cls.eval_fn = eval_1d_cube
def test_user_provided_kernel(): """ Setup for the simple 1D 2 parameter test case with user provided kernel """ import gpflow kernel = gpflow.kernels.RBF(lengthscales=[0.5] * 2, variance=0.01) + \ gpflow.kernels.Linear(variance=[1.] * 2) + \ gpflow.kernels.Polynomial(variance=[1.] * 2) + \ gpflow.kernels.Bias() params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble, kernel=kernel) m.train()
def setup_class(cls) -> None: import gpflow kernel = gpflow.kernels.RBF(lengthscales=[0.5] * 2, variance=0.01) + \ gpflow.kernels.Linear(variance=[1.] * 2) + \ gpflow.kernels.Polynomial(variance=[1.] * 2) + \ gpflow.kernels.Bias() params, test = pop_elements(get_uniform_params(2, 6), 10, 12) ensemble = get_1d_two_param_cube(params) m = gp_model(params, ensemble, kernel=kernel) m.train() cls.model = m cls.params = params cls.test_params = test cls.eval_fn = eval_1d_cube
def test_simple_sample(): from iris.cube import Cube X = get_uniform_params(2) z = simple_polynomial_fn_two_param(*X.T) m = gp_model(X, z) m.train() sampler = MCMCSampler(m, Cube(np.asarray([2.])), obs_uncertainty=0.1, interann_uncertainty=0., repres_uncertainty=0., struct_uncertainty=0.) # Use as few burn-in steps as we can get away with to speed up the test samples = sampler.sample(n_samples=500, mcmc_kwargs=dict(num_burnin_steps=50)) Zs = simple_polynomial_fn_two_param(*samples.T) assert_allclose(Zs.mean(), 2., rtol=0.1)
def setup_method(self): self.training_params = get_uniform_params(3) self.training_ensemble = get_three_param_cube(self.training_params) self.m = gp_model(self.training_params, self.training_ensemble) self.m.train()