Ejemplo n.º 1
0
def test_prior_sample(n, n_sample):
    key = jr.PRNGKey(123)
    f = Prior(kernel=RBF())
    sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1)
    params = initialise(RBF())
    samples = sample(key, f, params, sample_points, n_samples=n_sample)
    assert samples.shape == (n_sample, sample_points.shape[0])
Ejemplo n.º 2
0
def _get_conjugate_posterior_params() -> dict:
    kernel = RBF()
    prior = Prior(kernel=kernel)
    lik = Gaussian()
    posterior = prior * lik
    params = initialise(posterior)
    return params, posterior
Ejemplo n.º 3
0
def test_prior_random_variable(n):
    f = Prior(kernel=RBF())
    sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1)
    D = Dataset(X=sample_points)
    params = initialise(RBF())
    rv = random_variable(f, params, D)
    assert isinstance(rv, tfd.MultivariateNormalFullCovariance)
Ejemplo n.º 4
0
def test_prior_mll():
    """
    Test that the MLL evaluation works with priors attached to the parameter values.
    """
    key = jr.PRNGKey(123)
    x = jnp.sort(jr.uniform(key, minval=-5.0, maxval=5.0, shape=(100, 1)),
                 axis=0)
    f = lambda x: jnp.sin(jnp.pi * x) / (jnp.pi * x)
    y = f(x) + jr.normal(key, shape=x.shape) * 0.1
    posterior = Prior(kernel=RBF()) * Gaussian()

    params = initialise(posterior)
    config = get_defaults()
    constrainer, unconstrainer = build_all_transforms(params.keys(), config)
    params = unconstrainer(params)
    print(params)

    mll = marginal_ll(posterior, transform=constrainer)

    priors = {
        "lengthscale": tfd.Gamma(1.0, 1.0),
        "variance": tfd.Gamma(2.0, 2.0),
        "obs_noise": tfd.Gamma(2.0, 2.0),
    }
    mll_eval = mll(params, x, y)
    mll_eval_priors = mll(params, x, y, priors)

    assert pytest.approx(mll_eval) == jnp.array(-103.28180663)
    assert pytest.approx(mll_eval_priors) == jnp.array(-105.509218857)
Ejemplo n.º 5
0
def test_posterior_random_variable(n):
    f = Prior(kernel=RBF()) * Gaussian()
    x = jnp.linspace(-1.0, 1.0, 10).reshape(-1, 1)
    y = jnp.sin(x)
    sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1)
    params = initialise(f)
    rv = random_variable(f, params, sample_points, x, y)
    assert isinstance(rv, tfd.MultivariateNormalFullCovariance)
Ejemplo n.º 6
0
def test_constrain(likelihood):
    posterior = Prior(kernel=RBF()) * likelihood()
    params = initialise(posterior, 10)
    config = get_defaults()
    transform_map = build_constrain(params.keys(), config)
    transformed_params = transform_map(params)
    assert transformed_params.keys() == params.keys()
    for u, v in zip(transformed_params.values(), params.values()):
        assert u.dtype == v.dtype
Ejemplo n.º 7
0
def test_posterior_sample(n, n_sample):
    key = jr.PRNGKey(123)
    f = Prior(kernel=RBF()) * Gaussian()
    x = jnp.linspace(-1.0, 1.0, 10).reshape(-1, 1)
    y = jnp.sin(x)
    sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1)
    params = initialise(f)
    rv = random_variable(f, params, sample_points, x, y)
    samples = sample(key, rv, n_samples=n_sample)
    assert samples.shape == (n_sample, sample_points.shape[0])
Ejemplo n.º 8
0
def test_posterior_random_variable(n):
    f = Prior(kernel=RBF()) * Gaussian()
    x = jnp.linspace(-1.0, 1.0, 10).reshape(-1, 1)
    y = jnp.sin(x)
    D = Dataset(X=x, y=y)
    sample_points = jnp.linspace(-1.0, 1.0, num=n).reshape(-1, 1)
    params = initialise(f)
    rv = random_variable(f, params, D)
    assert isinstance(rv, Callable)
    fstar = rv(sample_points)
    assert isinstance(fstar, tfd.MultivariateNormalFullCovariance)
Ejemplo n.º 9
0
def test_conjugate_variance():
    key = jr.PRNGKey(123)
    x = jr.uniform(key, shape=(20, 1), minval=-3.0, maxval=3.0)
    y = jnp.sin(x)

    posterior = Prior(kernel=RBF()) * Gaussian()
    params = initialise(posterior)

    xtest = jnp.linspace(-3.0, 3.0, 30).reshape(-1, 1)
    sigma = variance(posterior, params, xtest, x, y)
    assert sigma.shape == (xtest.shape[0], xtest.shape[0])
Ejemplo n.º 10
0
def test_non_conjugate():
    posterior = Prior(kernel=RBF()) * Bernoulli()
    n = 20
    x = jnp.linspace(-1.0, 1.0, n).reshape(-1, 1)
    y = jnp.sin(x)
    params = initialise(posterior, 20)
    config = get_defaults()
    unconstrainer, constrainer = build_all_transforms(params.keys(), config)
    params = unconstrainer(params)
    mll = marginal_ll(posterior, transform=constrainer)
    assert isinstance(mll, Callable)
    neg_mll = marginal_ll(posterior, transform=constrainer, negative=True)
    assert neg_mll(params, x, y) == jnp.array(-1.0) * mll(params, x, y)
Ejemplo n.º 11
0
def test_conjugate_mean():
    key = jr.PRNGKey(123)
    x = jr.uniform(key, shape=(20, 1), minval=-3.0, maxval=3.0)
    y = jnp.sin(x)
    D = Dataset(X=x, y=y)

    posterior = Prior(kernel=RBF()) * Gaussian()
    params = initialise(posterior)

    xtest = jnp.linspace(-3.0, 3.0, 30).reshape(-1, 1)
    meanf = mean(posterior, params, D)
    mu = meanf(xtest)
    assert mu.shape == (xtest.shape[0], y.shape[1])
Ejemplo n.º 12
0
def test_non_conjugate_variance():
    key = jr.PRNGKey(123)
    x = jnp.sort(jr.uniform(key, shape=(10, 1), minval=-1.0, maxval=1.0), axis=0)
    y = 0.5 * jnp.sign(jnp.cos(3 * x + jr.normal(key, shape=x.shape) * 0.05)) + 0.5
    D = Dataset(X=x, y=y)
    xtest = jnp.linspace(-1.05, 1.05, 50).reshape(-1, 1)

    posterior = Prior(kernel=RBF()) * Bernoulli()
    params = initialise(posterior, x.shape[0])

    varf = variance(posterior, params, D)
    sigma = varf(xtest)
    assert sigma.shape == (xtest.shape[0],)
Ejemplo n.º 13
0
def test_non_conjugate_mean():
    key = jr.PRNGKey(123)
    x = jnp.sort(jr.uniform(key, shape=(10, 1), minval=-1.0, maxval=1.0),
                 axis=0)
    y = 0.5 * jnp.sign(
        jnp.cos(3 * x + jr.normal(key, shape=x.shape) * 0.05)) + 0.5
    xtest = jnp.linspace(-1.05, 1.05, 50).reshape(-1, 1)

    posterior = Prior(kernel=RBF()) * Bernoulli()
    params = initialise(posterior, x.shape[0])

    mu = mean(posterior, params, xtest, x, y)
    assert mu.shape == (xtest.shape[0], )
Ejemplo n.º 14
0
def test_conjugate():
    posterior = Prior(kernel=RBF()) * Gaussian()

    x = jnp.linspace(-1.0, 1.0, 20).reshape(-1, 1)
    y = jnp.sin(x)
    D = Dataset(X=x, y=y)
    params = initialise(posterior)
    config = get_defaults()
    unconstrainer, constrainer = build_all_transforms(params.keys(), config)
    params = unconstrainer(params)
    mll = marginal_ll(posterior, transform=constrainer)
    assert isinstance(mll, Callable)
    neg_mll = marginal_ll(posterior, transform=constrainer, negative=True)
    assert neg_mll(params, D) == jnp.array(-1.0) * mll(params, D)
Ejemplo n.º 15
0
def test_spectral_sample():
    key = jr.PRNGKey(123)
    M = 10
    x = jnp.linspace(-1.0, 1.0, 20).reshape(-1, 1)
    y = jnp.sin(x)
    D = Dataset(X=x, y=y)
    sample_points = jnp.linspace(-1.0, 1.0, num=50).reshape(-1, 1)
    kernel = to_spectral(RBF(), M)
    post = Prior(kernel=kernel) * Gaussian()
    params = initialise(key, post)
    sparams = {"basis_fns": params["basis_fns"]}
    del params["basis_fns"]
    posterior_rv = random_variable(post, params, D, static_params=sparams)(sample_points)
    assert isinstance(posterior_rv, tfd.Distribution)
    assert isinstance(posterior_rv, tfd.MultivariateNormalFullCovariance)
Ejemplo n.º 16
0
def test_conjugate():
    key = jr.PRNGKey(123)
    kern = to_spectral(RBF(), 10)
    posterior = Prior(kernel=kern) * Gaussian()
    x = jnp.linspace(-1.0, 1.0, 20).reshape(-1, 1)
    y = jnp.sin(x)
    params = initialise(key, posterior)
    config = get_defaults()
    unconstrainer, constrainer = build_all_transforms(params.keys(), config)
    params = unconstrainer(params)
    mll = marginal_ll(posterior, transform=constrainer)
    assert isinstance(mll, Callable)
    neg_mll = marginal_ll(posterior, transform=constrainer, negative=True)
    assert neg_mll(params, x, y) == jnp.array(-1.0) * mll(params, x, y)
    nmll = neg_mll(params, x, y)
    assert nmll.shape == ()
Ejemplo n.º 17
0
def test_build_all_transforms(likelihood):
    posterior = Prior(kernel=RBF()) * likelihood()
    params = initialise(posterior, 10)
    config = get_defaults()
    t1, t2 = build_all_transforms(params.keys(), config)
    constrainer = build_constrain(params.keys(), config)
    constrained = t1(params)
    constrained2 = constrainer(params)
    assert constrained2.keys() == constrained2.keys()
    for u, v in zip(constrained.values(), constrained2.values()):
        assert_array_equal(u, v)
        assert u.dtype == v.dtype
    unconstrained = t2(params)
    unconstrainer = build_unconstrain(params.keys(), config)
    unconstrained2 = unconstrainer(params)
    for u, v in zip(unconstrained.values(), unconstrained2.values()):
        assert_array_equal(u, v)
        assert u.dtype == v.dtype
Ejemplo n.º 18
0
def plot(kernel: Kernel, X: Array, params: dict = None, ax = None):
    """
    Plot the kernel's Gram matrix.

    :param kernel: The kernel function that generates the Gram matrix
    :param X: The data points for which the Gram matrix is computed on.
    :param params: A dictionary containing the kernel parameters
    :param ax: An optional matplotlib axes
    :return:
    """
    if params is None:
        params = initialise(kernel)

    cols = get_cmap()
    if ax is None:
        fig, ax = plt.subplots()

    K = gpjax.kernels.gram(kernel, X, params)
    ax.matshow(K, cmap = cols)
Ejemplo n.º 19
0
def fit(posterior, nits, data, configs):
    params = initialise(posterior)
    constrainer, unconstrainer = build_all_transforms(params.keys(), configs)

    mll = jit(marginal_ll(posterior, transform=constrainer, negative=True))

    opt_init, opt_update, get_params = optimizers.adam(step_size=0.05)
    opt_state = opt_init(params)

    def step(i, opt_state):
        p = get_params(opt_state)
        v, g = value_and_grad(mll)(p, data)
        return opt_update(i, g, opt_state), v

    for i in range(nits):
        opt_state, mll_estimate = step(i, opt_state)
    print(f"{posterior.prior.kernel.name} GP's marginal log-likelihood: {mll_estimate: .2f}")

    final_params = constrainer(get_params(opt_state))
    return final_params
Ejemplo n.º 20
0
def plot(kernel: Kernel, X: Array, Y: Array, params: dict = None, ax=None):
    """
    Plot the kernel's cross-covariance matrix.

    :param kernel: The kernel function that generates the covariance matrix
    :param X: The first set of data points for which the covariance matrix is computed on.
    :param Y: The second set of data points for which the covariance matrix is computed on.
    :param params: A dictionary containing the kernel parameters
    :param ax: An optional matplotlib axes
    :return:
    """
    if params is None:
        params = initialise(kernel)

    cols = get_cmap()
    if ax is None:
        fig, ax = plt.subplots()
        fig.set_tight_layout(False)

    K = gpjax.kernels.cross_covariance(kernel, X, Y, params)
    c = ax.matshow(K, cmap = cols)
Ejemplo n.º 21
0
def plot(kernel: Kernel, params: dict = None, ax=None, xrange: Tuple[float, float] = (-10, 10.)):
    """
    Plot the kernel's shape.

    :param kernel: The kernel function
    :param params: A dictionary containing the kernel parameters
    :param ax: An optional matplotlib axes
    :param xrange The tuple pair lower and upper values over which the kernel should be evaluated.
    :return:
    """
    if params is None:
        params = initialise(kernel)

    cols = get_colours()
    if ax is None:
        fig, ax = plt.subplots()

    X = jnp.linspace(xrange[0], xrange[1], num=200).reshape(-1, 1)
    x1 = jnp.array([[0.0]])
    K = gpjax.kernels.cross_covariance(kernel, X, x1, params)
    ax.plot(X, K.T, color=cols['base'])
    mplcyberpunk.add_underglow(ax=ax)
Ejemplo n.º 22
0
def test_output(transformation, likelihood):
    posterior = Prior(kernel=RBF()) * likelihood()
    params = initialise(posterior, 10)
    config = get_defaults()
    transform_map = transformation(params.keys(), config)
    assert isinstance(transform_map, Callable)