def test_switched_likelihood_regression_valid_num_latent_gps(num_latent_gps): """ A Regression test when using Switched likelihood: the number of latent functions in a GP model must be equal to the number of columns in Y minus one. The final column of Y is used to index the switch. If the number of latent functions does not match, an exception will be raised. """ x = np.random.randn(100, 1) y = np.hstack((np.random.randn(100, 1), np.random.randint(0, 3, (100, 1)))) data = x, y Z = InducingPoints(np.random.randn(num_latent_gps, 1)) likelihoods = [StudentT()] * 3 switched_likelihood = SwitchedLikelihood(likelihoods) m = gpflow.models.SVGP( kernel=gpflow.kernels.Matern12(), inducing_variable=Z, likelihood=switched_likelihood, num_latent_gps=num_latent_gps, ) if num_latent_gps == 1: _ = m.training_loss(data) else: with pytest.raises(tf.errors.InvalidArgumentError): _ = m.training_loss(data)
def test_optimize(self): with defer_build(): input_layer = InputLayer(input_dim=1, output_dim=1, num_inducing=self.M, kernel=RBF(1) + White(1), multitask=True) output_layer = OutputLayer(input_dim=1, output_dim=1, num_inducing=self.M, kernel=RBF(1) + White(1), multitask=True) seq = MultitaskSequential([input_layer, output_layer]) model = MultitaskDSDGP(X=self.X, Y=self.Y, Z=self.Z, layers=seq, likelihood=SwitchedLikelihood( [Gaussian(), Gaussian()]), num_latent=1) model.compile() before = model.compute_log_likelihood() opt = gpflow.train.AdamOptimizer(0.01) opt.minimize(model, maxiter=100) after = model.compute_log_likelihood() self.assertGreaterEqual(after, before)
def test_switched_likelihood_variational_expectations(Y_list, F_list, Fvar_list, Y_label): Y_perm = list(range(3 + 4 + 5)) np.random.shuffle(Y_perm) # shuffle the original data Y_sw = np.hstack([np.concatenate(Y_list), np.concatenate(Y_label)])[Y_perm, :3] F_sw = np.concatenate(F_list)[Y_perm, :] Fvar_sw = np.concatenate(Fvar_list)[Y_perm, :] likelihoods = [Gaussian()] * 3 for lik in likelihoods: lik.variance = np.exp(np.random.randn(1)).squeeze().astype(np.float32) switched_likelihood = SwitchedLikelihood(likelihoods) switched_results = switched_likelihood.variational_expectations(F_sw, Fvar_sw, Y_sw) results = [ lik.variational_expectations(f, fvar, y) for lik, y, f, fvar in zip(likelihoods, Y_list, F_list, Fvar_list) ] assert_allclose(switched_results, np.concatenate(results)[Y_perm])
def test_switched_likelihood_log_prob(Y_list, F_list, Fvar_list, Y_label): """ SwitchedLikelihood is separately tested here. Here, we make sure the partition-stitch works fine. """ Y_perm = list(range(3 + 4 + 5)) np.random.shuffle(Y_perm) # shuffle the original data Y_sw = np.hstack([np.concatenate(Y_list), np.concatenate(Y_label)])[Y_perm, :3] F_sw = np.concatenate(F_list)[Y_perm, :] likelihoods = [Gaussian()] * 3 for lik in likelihoods: lik.variance = np.exp(np.random.randn(1)).squeeze().astype(np.float32) switched_likelihood = SwitchedLikelihood(likelihoods) switched_results = switched_likelihood.log_prob(F_sw, Y_sw) results = [lik.log_prob(f, y) for lik, y, f in zip(likelihoods, Y_list, F_list)] assert_allclose(switched_results, np.concatenate(results)[Y_perm])
def prepare(self): N = 100 M = 10 rng = np.random.RandomState(42) X = rng.randn(N, 2) Y = rng.randn(N, 1) Z = rng.randn(M, 2) X_ind = rng.randint(0, 2, (N, 1)) Z_ind = rng.randint(0, 2, (M, 1)) X = np.hstack([X, X_ind]) Y = np.hstack([Y, X_ind]) Z = np.hstack([Z, Z_ind]) Xs = rng.randn(M, 2) Xs_ind = rng.randint(0, 2, (M, 1)) Xs = np.hstack([Xs, Xs_ind]) with defer_build(): lik = SwitchedLikelihood([Gaussian(), Gaussian()]) input_layer = InputLayer(input_dim=2, output_dim=1, num_inducing=M, kernel=RBF(2) + White(2), mean_function=Linear(A=np.ones((3, 1))), multitask=True) output_layer = OutputLayer(input_dim=1, output_dim=1, num_inducing=M, kernel=RBF(1) + White(1), multitask=True) seq = MultitaskSequential([input_layer, output_layer]) model = MultitaskDSDGP(X=X, Y=Y, Z=Z, layers=seq, likelihood=lik, num_latent=1) model.compile() return model, Xs
def test_switched_likelihood_regression_valid_num_latent(X, Y, num_latent): """ A Regression test when using Switched likelihood: the number of latent functions in a GP model must be equal to the number of columns in Y minus one. The final column of Y is used to index the switch. If the number of latent functions does not match, an exception will be raised. """ Z = InducingPoints(np.random.randn(num_latent, 1)) likelihoods = [StudentT()] * 3 switched_likelihood = SwitchedLikelihood(likelihoods) m = gpflow.models.SVGP(kernel=gpflow.kernels.Matern12(), inducing_variable=Z, likelihood=switched_likelihood, num_latent=num_latent) if num_latent == 1: m.log_likelihood(X, Y) else: with pytest.raises(tf.errors.InvalidArgumentError): m.log_likelihood(X, Y)