def test_variational_complete():
    # construct objects
    train_inputs, train_outputs, test_inputs, num_train, inducing_inputs = construct_input(
    )
    input_dim = 1
    output_dim = 1
    args = dict(num_samples=5000000,
                num_components=1,
                optimize_inducing=False,
                use_loo=True,
                diag_post=False,
                sn=1.0,
                length_scale=0.5,
                sf=1.0,
                iso=False,
                cov='SquaredExponential')
    vi = inference.Variational(args, 'LikelihoodGaussian', output_dim,
                               num_train, inducing_inputs)
    vi.build((num_train, input_dim))

    # compute losses and predictions
    losses = vi.inference(train_inputs, train_outputs, True)
    nelbo = losses['NELBO']
    loo = losses['LOO_VARIATIONAL']
    pred_mean, pred_var = vi.prediction(test_inputs)

    # check results
    np.testing.assert_allclose(nelbo.numpy(), 3.844, rtol=1e-3)
    np.testing.assert_allclose(
        loo.numpy(), 4.45, rtol=1e-2)  # test with a relative tolerance of 1%
    np.testing.assert_allclose(pred_mean.numpy(), 0.0, RTOL)
    np.testing.assert_allclose(tf.squeeze(pred_var).numpy(), 2.0, 1e-2)
def construct_simple_full():
    input_dim = 1
    output_dim = 1
    num_train = 1
    inf = inference.Variational(PARAMS, 'LikelihoodGaussian', output_dim,
                                num_train, num_train)
    inf.build((num_train, input_dim))
    return inf
Ejemplo n.º 3
0
def construct_simple_full():
    likelihood = lik.LikelihoodGaussian({'sn': 1.0})
    kernel = [
        cov.SquaredExponential(input_dim=1,
                               args=dict(length_scale=1.0, sf=1.0, iso=False))
    ]
    # In most of our unit test, we will replace this value with something else.
    return inference.Variational(kernel, likelihood, 1, 1, PARAMS)
def construct_simple_diag():
    input_dim = 1
    output_dim = 1
    num_train = 1
    inf = inference.Variational({
        **PARAMS, 'diag_post': True
    }, 'LikelihoodGaussian', output_dim, num_train, num_train)
    inf.build((num_train, input_dim))
    return inf
Ejemplo n.º 5
0
def construct_simple_diag():
    likelihood = lik.LikelihoodGaussian({'sn': 1.0})
    kernel = [
        cov.SquaredExponential(input_dim=1,
                               args=dict(length_scale=1.0, sf=1.0, iso=False))
    ]
    return inference.Variational(kernel, likelihood, 1, 1, {
        **PARAMS, 'diag_post': True
    })
Ejemplo n.º 6
0
def construct_multi_full():
    likelihood = lik.LikelihoodSoftmax({'num_samples_pred': 100})
    kernels = [
        cov.SquaredExponential(input_dim=2,
                               args=dict(length_scale=1.0, sf=1.0, iso=False))
        for _ in range(2)
    ]
    return inference.Variational(kernels, likelihood, 1, 1, {
        **PARAMS, 'num_components': 2
    })
def construct_multi_full():
    input_dim = 2
    output_dim = 2
    num_train = 1
    inf = inference.Variational(
        {
            **PARAMS, 'num_samples_pred': 100,
            'num_components': 2
        }, 'LikelihoodSoftmax', output_dim, num_train, num_train)
    inf.build((num_train, input_dim))
    return inf
Ejemplo n.º 8
0
def test_variational_complete():
    # construct objects
    train_inputs, train_outputs, test_inputs, num_train, inducing_inputs = construct_input()
    likelihood = lik.LikelihoodGaussian({'sn': 1.0})
    kernel = [cov.SquaredExponential(input_dim=1, args=dict(length_scale=0.5, sf=1.0, iso=False))]
    vi = inference.Variational(kernel, likelihood, num_train, inducing_inputs,
                               {'num_samples': 5000000, 'num_components': 1, 'optimize_inducing': False,
                                'use_loo': True, 'diag_post': False})

    # compute losses and predictions
    losses, _ = vi.inference(train_inputs, train_outputs, True)
    nelbo = losses['NELBO']
    loo = losses['LOO_VARIATIONAL']
    pred_mean, pred_var = vi.predict(test_inputs)

    # check results
    np.testing.assert_allclose(nelbo.numpy(), 3.844, rtol=1e-3)
    np.testing.assert_allclose(loo.numpy(), 4.45, rtol=1e-2)  # test with a relative tolerance of 1%
    np.testing.assert_allclose(pred_mean.numpy(), 0.0, RTOL)
    np.testing.assert_allclose(tf.squeeze(pred_var).numpy(), 2.0, 1e-2)