def test_prior_mll(): """ Test that the MLL evaluation works with priors attached to the parameter values. """ key = jr.PRNGKey(123) x = jnp.sort(jr.uniform(key, minval=-5.0, maxval=5.0, shape=(100, 1)), axis=0) f = lambda x: jnp.sin(jnp.pi * x) / (jnp.pi * x) y = f(x) + jr.normal(key, shape=x.shape) * 0.1 D = Dataset(X=x, y=y) posterior = Prior(kernel=RBF()) * Gaussian() params = initialise(posterior) config = get_defaults() constrainer, unconstrainer = build_all_transforms(params.keys(), config) params = unconstrainer(params) print(params) mll = marginal_ll(posterior, transform=constrainer) priors = { "lengthscale": tfd.Gamma(1.0, 1.0), "variance": tfd.Gamma(2.0, 2.0), "obs_noise": tfd.Gamma(2.0, 2.0), } mll_eval = mll(params, D) mll_eval_priors = mll(params, D, priors) assert pytest.approx(mll_eval) == jnp.array(-103.28180663) assert pytest.approx(mll_eval_priors) == jnp.array(-105.509218857)
def test_prior_random_variable(n): f = Prior(kernel=RBF()) sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1) D = Dataset(X=sample_points) params = initialise(RBF()) rv = random_variable(f, params, D) assert isinstance(rv, tfd.MultivariateNormalFullCovariance)
def test_prior_sample_array(n, n_sample): key = jr.PRNGKey(123) f = Prior(kernel=RBF()) sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1) D = Dataset(X = sample_points) params = initialise(RBF()) samples = sample(key, f, params, D, n_samples=n_sample) assert samples.shape == (n_sample, sample_points.shape[0])
def test_posterior_sample(n, n_sample): key = jr.PRNGKey(123) f = Prior(kernel=RBF()) * Gaussian() x = jnp.linspace(-1.0, 1.0, 10).reshape(-1, 1) y = jnp.sin(x) D = Dataset(X=x, y=y) sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1) params = initialise(f) rv = random_variable(f, params, D)(sample_points) samples = sample(key, rv, n_samples=n_sample) assert samples.shape == (n_sample, sample_points.shape[0])
def test_posterior_random_variable(n): f = Prior(kernel=RBF()) * Gaussian() x = jnp.linspace(-1.0, 1.0, 10).reshape(-1, 1) y = jnp.sin(x) D = Dataset(X=x, y=y) sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1) params = initialise(f) rv = random_variable(f, params, D) assert isinstance(rv, Callable) fstar = rv(sample_points) assert isinstance(fstar, tfd.MultivariateNormalFullCovariance)
def test_non_conjugate_variance(): key = jr.PRNGKey(123) x = jnp.sort(jr.uniform(key, shape=(10, 1), minval=-1.0, maxval=1.0), axis=0) y = 0.5 * jnp.sign(jnp.cos(3 * x + jr.normal(key, shape=x.shape) * 0.05)) + 0.5 D = Dataset(X=x, y=y) xtest = jnp.linspace(-1.05, 1.05, 50).reshape(-1, 1) posterior = Prior(kernel=RBF()) * Bernoulli() params = initialise(posterior, x.shape[0]) varf = variance(posterior, params, D) sigma = varf(xtest) assert sigma.shape == (xtest.shape[0],)
def test_conjugate_variance(): key = jr.PRNGKey(123) x = jr.uniform(key, shape=(20, 1), minval=-3.0, maxval=3.0) y = jnp.sin(x) D = Dataset(X=x, y=y) posterior = Prior(kernel=RBF()) * Gaussian() params = initialise(posterior) xtest = jnp.linspace(-3.0, 3.0, 30).reshape(-1, 1) varf = variance(posterior, params, D) sigma = varf(xtest) assert sigma.shape == (xtest.shape[0], xtest.shape[0])
def test_non_conjugate(): posterior = Prior(kernel=RBF()) * Bernoulli() n = 20 x = jnp.linspace(-1.0, 1.0, n).reshape(-1, 1) y = jnp.sin(x) D = Dataset(X=x, y=y) params = initialise(posterior, 20) config = get_defaults() unconstrainer, constrainer = build_all_transforms(params.keys(), config) params = unconstrainer(params) mll = marginal_ll(posterior, transform=constrainer) assert isinstance(mll, Callable) neg_mll = marginal_ll(posterior, transform=constrainer, negative=True) assert neg_mll(params, D) == jnp.array(-1.0) * mll(params, D)
def test_spectral_sample(): key = jr.PRNGKey(123) M = 10 x = jnp.linspace(-1.0, 1.0, 20).reshape(-1, 1) y = jnp.sin(x) D = Dataset(X=x, y=y) sample_points = jnp.linspace(-1.0, 1.0, num=50).reshape(-1, 1) kernel = to_spectral(RBF(), M) post = Prior(kernel=kernel) * Gaussian() params = initialise(key, post) sparams = {"basis_fns": params["basis_fns"]} del params["basis_fns"] posterior_rv = random_variable(post, params, D, static_params=sparams)(sample_points) assert isinstance(posterior_rv, tfd.Distribution) assert isinstance(posterior_rv, tfd.MultivariateNormalFullCovariance)
def test_non_conjugate_rv(n): key = jr.PRNGKey(123) f = posterior = Prior(kernel=RBF()) * Bernoulli() x = jnp.sort(jr.uniform(key, shape=(n, 1), minval=-1.0, maxval=1.0), axis=0) y = 0.5 * jnp.sign(jnp.cos(3 * x + jr.normal(key, shape=x.shape) * 0.05)) + 0.5 D = Dataset(X=x, y=y) sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1) hyperparams = {"lengthscale": jnp.array([1.0]), "variance": jnp.array([1.0])} params = complete(hyperparams, posterior, x.shape[0]) rv = random_variable(f, params, D) assert isinstance(rv, Callable) fstar = rv(sample_points) assert isinstance(fstar, tfd.ProbitBernoulli)
def test_spectral(): key = jr.PRNGKey(123) kern = to_spectral(RBF(), 10) posterior = Prior(kernel=kern) * Gaussian() x = jnp.linspace(-1.0, 1.0, 20).reshape(-1, 1) y = jnp.sin(x) D = Dataset(X=x, y=y) params = initialise(key, posterior) config = get_defaults() unconstrainer, constrainer = build_all_transforms(params.keys(), config) params = unconstrainer(params) mll = marginal_ll(posterior, transform=constrainer) assert isinstance(mll, Callable) neg_mll = marginal_ll(posterior, transform=constrainer, negative=True) assert neg_mll(params, D) == jnp.array(-1.0) * mll(params, D) nmll = neg_mll(params, D) assert nmll.shape == ()