Ejemplo n.º 1
0
 def test_kernelsActiveDims(self):
     ''' Test sum and product compositional kernels '''
     Q = 2  # latent dimensions
     X_mean = GPflow.gplvm.PCA_reduce(self.Y, Q)
     kernsQuadratu = [kernels.RBF(1, active_dims=[0])+kernels.Linear(1, active_dims=[1]),
                      kernels.RBF(1, active_dims=[0])+kernels.PeriodicKernel(1, active_dims=[1]),
                      kernels.RBF(1, active_dims=[0])*kernels.Linear(1, active_dims=[1]),
                      kernels.RBF(Q)+kernels.Linear(Q)]  # non-overlapping
     kernsAnalytic = [ekernels.Add([ekernels.RBF(1, active_dims=[0]), ekernels.Linear(1, active_dims=[1])]),
                      ekernels.Add([ekernels.RBF(1, active_dims=[0]), kernels.PeriodicKernel(1, active_dims=[1])]),
                      ekernels.Prod([ekernels.RBF(1, active_dims=[0]), ekernels.Linear(1, active_dims=[1])]),
                      ekernels.Add([ekernels.RBF(Q), ekernels.Linear(Q)])]
     fOnSeparateDims = [True, True, True, False]
     Z = np.random.permutation(X_mean.copy())[:self.M]
     # Also test default N(0,1) is used
     X_prior_mean = np.zeros((self.N, Q))
     X_prior_var = np.ones((self.N, Q))
     Xtest = self.rng.randn(10, Q)
     for kq, ka, sepDims in zip(kernsQuadratu, kernsAnalytic, fOnSeparateDims):
         kq.num_gauss_hermite_points = 20  # speed up quadratic for tests
         ka.kern_list[0].num_gauss_hermite_points = 0  # RBF should throw error if quadrature is used
         if(sepDims):
             self.assertTrue(ka.on_separate_dimensions, 'analytic kernel must not use quadrature')
         mq = GPflow.gplvm.BayesianGPLVM(X_mean=X_mean, X_var=np.ones((self.N, Q)), Y=self.Y,
                                         kern=kq, M=self.M, Z=Z, X_prior_mean=X_prior_mean, X_prior_var=X_prior_var)
         ma = GPflow.gplvm.BayesianGPLVM(X_mean=X_mean, X_var=np.ones((self.N, Q)), Y=self.Y,
                                         kern=ka, M=self.M, Z=Z)
         mq._compile()
         ma._compile()
         ql = mq.compute_log_likelihood()
         al = ma.compute_log_likelihood()
         self.assertTrue(np.allclose(ql, al, atol=1e-2), 'Likelihood not equal %f<>%f' % (ql, al))
         mu_f_a, var_f_a = ma.predict_f(Xtest)
         mu_f_q, var_f_q = mq.predict_f(Xtest)
         self.assertTrue(np.allclose(mu_f_a, mu_f_q, atol=1e-4), ('Posterior means different', mu_f_a-mu_f_q))
         self.assertTrue(np.allclose(mu_f_a, mu_f_q, atol=1e-4), ('Posterior vars different', var_f_a-var_f_q))
Ejemplo n.º 2
0
    def setUp(self):
        self._threshold = 0.5
        self.rng = np.random.RandomState(0)
        self.N = 4
        self.D = 2

        # Test summed kernels, non-overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()

        self.kernel = kernels.Prod([
            kernels.RBF(1, rbfvariance, rbfard, [1], False),
            kernels.Linear(1, linvariance, [0])
        ])

        self.ekernel = ekernels.Prod([
            ekernels.RBF(1, rbfvariance, rbfard, [1], False),
            ekernels.Linear(1, linvariance, [0])
        ])

        self.Xmu = self.rng.rand(self.N, self.D)
        self.Xcov = self.rng.rand(self.N, self.D)
        self.Z = self.rng.rand(2, self.D)