def test_switched_likelihood_regression_valid_num_latent_gps(num_latent_gps):
    """
    A Regression test when using Switched likelihood: the number of latent
    functions in a GP model must be equal to the number of columns in Y minus
    one. The final column of Y is used to index the switch. If the number of
    latent functions does not match, an exception will be raised.
    """
    x = np.random.randn(100, 1)
    y = np.hstack((np.random.randn(100, 1), np.random.randint(0, 3, (100, 1))))
    data = x, y

    Z = InducingPoints(np.random.randn(num_latent_gps, 1))
    likelihoods = [StudentT()] * 3
    switched_likelihood = SwitchedLikelihood(likelihoods)
    m = gpflow.models.SVGP(
        kernel=gpflow.kernels.Matern12(),
        inducing_variable=Z,
        likelihood=switched_likelihood,
        num_latent_gps=num_latent_gps,
    )
    if num_latent_gps == 1:
        _ = m.training_loss(data)
    else:
        with pytest.raises(tf.errors.InvalidArgumentError):
            _ = m.training_loss(data)
def test_switched_likelihood_with_vgp():
    """
    Reproduces the bug in https://github.com/GPflow/GPflow/issues/951
    """
    X = np.random.randn(12 + 15, 1)
    Y = np.random.randn(12 + 15, 1)
    idx = np.array([0] * 12 + [1] * 15)
    Y_aug = np.c_[Y, idx]
    assert Y_aug.shape == (12 + 15, 2)

    kernel = gpflow.kernels.Matern32()
    likelihood = gpflow.likelihoods.SwitchedLikelihood([StudentT(), StudentT()])
    model = gpflow.models.VGP((X, Y_aug), kernel=kernel, likelihood=likelihood)
    # without bugfix, optimization errors out
    opt = gpflow.optimizers.Scipy()
    opt.minimize(model.training_loss, model.trainable_variables, options=dict(maxiter=1))
Exemple #3
0
def test_switched_likelihood_regression_valid_num_latent(X, Y, num_latent):
    """
    A Regression test when using Switched likelihood: the number of latent
    functions in a GP model must be equal to the number of columns in Y minus
    one. The final column of Y is used to index the switch. If the number of
    latent functions does not match, an exception will be raised.
    """

    Z = InducingPoints(np.random.randn(num_latent, 1))
    likelihoods = [StudentT()] * 3
    switched_likelihood = SwitchedLikelihood(likelihoods)
    m = gpflow.models.SVGP(kernel=gpflow.kernels.Matern12(),
                           inducing_variable=Z,
                           likelihood=switched_likelihood,
                           num_latent=num_latent)
    if num_latent == 1:
        m.log_likelihood(X, Y)
    else:
        with pytest.raises(tf.errors.InvalidArgumentError):
            m.log_likelihood(X, Y)
Exemple #4
0
class LikelihoodSetup(object):
    def __init__(self, likelihood, Y=Datum.Y, rtol=1e-06, atol=0.):
        self.likelihood = likelihood
        self.Y = Y
        self.rtol = rtol
        self.atol = atol

    def __repr__(self):
        name = self.likelihood.__class__.__name__
        return f"{name}-rtol={self.rtol}-atol={self.atol}"


likelihood_setups = [
    LikelihoodSetup(Gaussian()),
    LikelihoodSetup(StudentT()),
    LikelihoodSetup(Beta(),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    LikelihoodSetup(Ordinal(np.array([-1, 1])),
                    Y=tf.random.uniform(Datum.Yshape,
                                        0,
                                        3,
                                        dtype=default_int())),
    LikelihoodSetup(Poisson(invlink=tf.square),
                    Y=tf.random.poisson(Datum.Yshape,
                                        1.0,
                                        dtype=default_float())),
    LikelihoodSetup(Exponential(invlink=tf.square),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    LikelihoodSetup(Gamma(invlink=tf.square),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),