Пример #1
0
    def setUp(self):
        self.N = 4
        self.D = 2
        self.Xmu = rnd.rand(self.N, self.D)
        self.Z = rnd.rand(3, self.D)
        unconstrained = rnd.randn(self.N, 2 * self.D, self.D)
        t = TriDiagonalBlockRep()
        self.Xcov = t.forward(unconstrained)

        variance = 0.3 + rnd.rand()

        k1 = ekernels.RBF(1, variance, active_dims=[0])
        k2 = ekernels.RBF(1, variance, active_dims=[1])
        klin = ekernels.Linear(1, variance, active_dims=[1])
        self.ekernels = [k1, k2, klin]

        k1 = ekernels.RBF(2, variance)
        k2 = ekernels.RBF(2, variance)
        klin = ekernels.Linear(2, variance)
        self.pekernels = [k1, k2, klin]

        k1 = kernels.RBF(1, variance, active_dims=[0])
        klin = kernels.Linear(1, variance, active_dims=[1])
        self.kernels = [k1, klin]

        k1 = kernels.RBF(2, variance)
        klin = kernels.Linear(2, variance)
        self.pkernels = [k1, klin]
Пример #2
0
    def setUp(self):
        self.rng = np.random.RandomState(0)
        self.N = 4
        self.D = 2
        self.Xmu = self.rng.rand(self.N, self.D)
        self.Z = self.rng.rand(2, self.D)

        self.Xcov_diag = 0.05 + self.rng.rand(self.N, self.D)
        self.Xcov = np.zeros((self.Xcov_diag.shape[0], self.Xcov_diag.shape[1],
                              self.Xcov_diag.shape[1]))
        self.Xcov[(np.s_[:], ) +
                  np.diag_indices(self.Xcov_diag.shape[1])] = self.Xcov_diag

        # Set up "normal" kernels
        ekernel_classes = [ekernels.RBF, ekernels.Linear]
        kernel_classes = [kernels.RBF, kernels.Linear]
        params = [(self.D, 0.3 + self.rng.rand(),
                   self.rng.rand(2) + [0.5, 1.5], None, True),
                  (self.D, 0.3 + self.rng.rand(), None)]
        self.ekernels = [c(*p) for c, p in zip(ekernel_classes, params)]
        self.kernels = [c(*p) for c, p in zip(kernel_classes, params)]

        # Test summed kernels, non-overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()
        self.kernels.append(
            kernels.Add([
                kernels.RBF(1, rbfvariance, rbfard, [1], False),
                kernels.Linear(1, linvariance, [0])
            ]))
        self.kernels[-1].input_size = self.kernels[-1].input_dim
        for k in self.kernels[-1].kern_list:
            k.input_size = self.kernels[-1].input_size
        self.ekernels.append(
            ekernels.Add([
                ekernels.RBF(1, rbfvariance, rbfard, [1], False),
                ekernels.Linear(1, linvariance, [0])
            ]))
        self.ekernels[-1].input_size = self.ekernels[-1].input_dim
        for k in self.ekernels[-1].kern_list:
            k.input_size = self.ekernels[-1].input_size

        # Test summed kernels, overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()
        self.kernels.append(
            kernels.Add([
                kernels.RBF(self.D, rbfvariance, rbfard),
                kernels.Linear(self.D, linvariance)
            ]))
        self.ekernels.append(
            ekernels.Add([
                ekernels.RBF(self.D, rbfvariance, rbfard),
                ekernels.Linear(self.D, linvariance)
            ]))

        self.assertTrue(self.ekernels[-2].on_separate_dimensions)
        self.assertTrue(not self.ekernels[-1].on_separate_dimensions)
Пример #3
0
    def setUp(self):
        self.N = 4
        self.D = 2
        self.Xmu = rnd.rand(self.N, self.D)
        self.Z = rnd.rand(3, self.D)
        unconstrained = rnd.randn(self.N, 2 * self.D, self.D)
        t = TriDiagonalBlockRep()
        self.Xcov = t.forward(unconstrained)

        variance = 0.3 + rnd.rand()

        k1 = ekernels.RBF(1, variance, active_dims=[0])
        k2 = ekernels.RBF(1, variance, active_dims=[1])
        klin = ekernels.Linear(1, variance, active_dims=[1])
        self.ekernels = [
            k1, k2, klin
        ]  # Kernels doing the expectation in closed form, doing the slicing

        k1 = ekernels.RBF(1, variance)
        k2 = ekernels.RBF(1, variance)
        klin = ekernels.Linear(1, variance)
        self.pekernels = [
            k1, k2, klin
        ]  # kernels doing the expectations in closed form, without slicing

        k1 = kernels.RBF(1, variance, active_dims=[0])
        klin = kernels.Linear(1, variance, active_dims=[1])
        self.kernels = [k1, klin]

        k1 = kernels.RBF(1, variance)
        klin = kernels.Linear(1, variance)
        self.pkernels = [k1, klin]
Пример #4
0
    def setUp(self):
        self.rng = np.random.RandomState(
            1)  # this seed works with 60 GH points
        self.N = 4
        self.D = 2
        self.Xmu = self.rng.rand(self.N, self.D)
        self.Z = self.rng.rand(2, self.D)

        unconstrained = self.rng.randn(self.N, 2 * self.D, self.D)
        t = TriDiagonalBlockRep()
        self.Xcov = t.forward(unconstrained)

        # Set up "normal" kernels
        ekernel_classes = [ekernels.RBF, ekernels.Linear]
        kernel_classes = [kernels.RBF, kernels.Linear]
        params = [(self.D, 0.3 + self.rng.rand(),
                   self.rng.rand(2) + [0.5, 1.5], None, True),
                  (self.D, 0.3 + self.rng.rand(), None)]
        self.ekernels = [c(*p) for c, p in zip(ekernel_classes, params)]
        self.kernels = [c(*p) for c, p in zip(kernel_classes, params)]

        # Test summed kernels, non-overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()
        self.kernels.append(
            kernels.Add([
                kernels.RBF(1, rbfvariance, rbfard, [1], False),
                kernels.Linear(1, linvariance, [0])
            ]))
        self.kernels[-1].input_size = self.kernels[-1].input_dim
        for k in self.kernels[-1].kern_list:
            k.input_size = self.kernels[-1].input_size
        self.ekernels.append(
            ekernels.Add([
                ekernels.RBF(1, rbfvariance, rbfard, [1], False),
                ekernels.Linear(1, linvariance, [0])
            ]))
        self.ekernels[-1].input_size = self.ekernels[-1].input_dim
        for k in self.ekernels[-1].kern_list:
            k.input_size = self.ekernels[-1].input_size

        # Test summed kernels, overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()
        self.kernels.append(
            kernels.Add([
                kernels.RBF(self.D, rbfvariance, rbfard, active_dims=[0, 1]),
                kernels.Linear(self.D, linvariance, active_dims=[0, 1])
            ]))
        self.ekernels.append(
            ekernels.Add([
                ekernels.RBF(self.D, rbfvariance, rbfard, active_dims=[0, 1]),
                ekernels.Linear(self.D, linvariance, active_dims=[0, 1])
            ]))

        self.assertTrue(self.ekernels[-2].on_separate_dimensions)
        self.assertTrue(not self.ekernels[-1].on_separate_dimensions)
Пример #5
0
    def __init__(self,
                 Y,
                 latent_dim,
                 X_mean=None,
                 kern=None,
                 mean_function=Zero()):
        """
        Y is a data matrix, size N x R
        Z is a matrix of pseudo inputs, size M x D
        X_mean is a matrix, size N x Q, for the initialisation of the latent space.
        kern, mean_function are appropriate GPflow objects

        This method only works with a Gaussian likelihood.

        """
        if kern is None:
            kern = kernels.RBF(latent_dim, ARD=True)
        if X_mean is None:
            X_mean = PCA_reduce(Y, latent_dim)
        assert X_mean.shape[
            1] == latent_dim, 'Passed in number of latent ' + str(
                latent_dim) + ' does not match initial X ' + str(
                    X_mean.shape[1])
        self.num_latent = X_mean.shape[1]
        assert Y.shape[
            1] >= self.num_latent, 'More latent dimensions than observed.'
        GPR.__init__(self, X_mean, Y, kern, mean_function=mean_function)
        del self.X  # in GPLVM this is a Param
        self.X = Param(X_mean)
Пример #6
0
    def __init__(self,
                 Y,
                 latent_dim,
                 X_mean=None,
                 kern=None,
                 back_kern=None,
                 mean_function=Zero()):
        """
        Initialise GPLVM object. This method only works with a Gaussian likelihood.
        :param Y: data matrix (N x D)
        :param X_mean: latent positions (N x Q), by default initialized using PCA.
        :param kern: kernel specification, by default RBF
        :param mean_function: mean function, by default None.
        """

        # define kernel function
        if kern is None:
            kern = kernels.RBF(latent_dim)
            back_kern = kernels.RBF(latent_dim)

        # initialize latent_positions
        if X_mean is None:
            X_mean = PCA_reduce(Y, latent_dim)

        # initialize variables
        self.num_latent = X_mean.shape[1]

        # initialize variables
        likelihood = likelihoods.Gaussian()
        Y = DataHolder(Y, on_shape_change='pass')
        X = DataHolder(X_mean, on_shape_change='pass')

        # initialize parent GPModel
        GPModel.__init__(self, X, Y, kern, likelihood, mean_function)

        # initialize back constraint model
        self.back_kern = back_kern
        self.back_mean_function = Zero()
        self.back_likelihood = likelihoods.Gaussian()

        # set latent positions as model param
        del self.X
        self.X = Param(X_mean)
Пример #7
0
 def test_kernelsActiveDims(self):
     ''' Test sum and product compositional kernels '''
     Q = 2  # latent dimensions
     X_mean = GPflow.gplvm.PCA_reduce(self.Y, Q)
     kernsQuadratu = [kernels.RBF(1, active_dims=[0])+kernels.Linear(1, active_dims=[1]),
                      kernels.RBF(1, active_dims=[0])+kernels.PeriodicKernel(1, active_dims=[1]),
                      kernels.RBF(1, active_dims=[0])*kernels.Linear(1, active_dims=[1]),
                      kernels.RBF(Q)+kernels.Linear(Q)]  # non-overlapping
     kernsAnalytic = [ekernels.Add([ekernels.RBF(1, active_dims=[0]), ekernels.Linear(1, active_dims=[1])]),
                      ekernels.Add([ekernels.RBF(1, active_dims=[0]), kernels.PeriodicKernel(1, active_dims=[1])]),
                      ekernels.Prod([ekernels.RBF(1, active_dims=[0]), ekernels.Linear(1, active_dims=[1])]),
                      ekernels.Add([ekernels.RBF(Q), ekernels.Linear(Q)])]
     fOnSeparateDims = [True, True, True, False]
     Z = np.random.permutation(X_mean.copy())[:self.M]
     # Also test default N(0,1) is used
     X_prior_mean = np.zeros((self.N, Q))
     X_prior_var = np.ones((self.N, Q))
     Xtest = self.rng.randn(10, Q)
     for kq, ka, sepDims in zip(kernsQuadratu, kernsAnalytic, fOnSeparateDims):
         kq.num_gauss_hermite_points = 20  # speed up quadratic for tests
         ka.kern_list[0].num_gauss_hermite_points = 0  # RBF should throw error if quadrature is used
         if(sepDims):
             self.assertTrue(ka.on_separate_dimensions, 'analytic kernel must not use quadrature')
         mq = GPflow.gplvm.BayesianGPLVM(X_mean=X_mean, X_var=np.ones((self.N, Q)), Y=self.Y,
                                         kern=kq, M=self.M, Z=Z, X_prior_mean=X_prior_mean, X_prior_var=X_prior_var)
         ma = GPflow.gplvm.BayesianGPLVM(X_mean=X_mean, X_var=np.ones((self.N, Q)), Y=self.Y,
                                         kern=ka, M=self.M, Z=Z)
         mq._compile()
         ma._compile()
         ql = mq.compute_log_likelihood()
         al = ma.compute_log_likelihood()
         self.assertTrue(np.allclose(ql, al, atol=1e-2), 'Likelihood not equal %f<>%f' % (ql, al))
         mu_f_a, var_f_a = ma.predict_f(Xtest)
         mu_f_q, var_f_q = mq.predict_f(Xtest)
         self.assertTrue(np.allclose(mu_f_a, mu_f_q, atol=1e-4), ('Posterior means different', mu_f_a-mu_f_q))
         self.assertTrue(np.allclose(mu_f_a, mu_f_q, atol=1e-4), ('Posterior vars different', var_f_a-var_f_q))
Пример #8
0
    def setUp(self):
        self._threshold = 0.5
        self.rng = np.random.RandomState(0)
        self.N = 4
        self.D = 2

        # Test summed kernels, non-overlapping
        rbfvariance = 0.3 + self.rng.rand()
        rbfard = [self.rng.rand() + 0.5]
        linvariance = 0.3 + self.rng.rand()

        self.kernel = kernels.Prod([
            kernels.RBF(1, rbfvariance, rbfard, [1], False),
            kernels.Linear(1, linvariance, [0])
        ])

        self.ekernel = ekernels.Prod([
            ekernels.RBF(1, rbfvariance, rbfard, [1], False),
            ekernels.Linear(1, linvariance, [0])
        ])

        self.Xmu = self.rng.rand(self.N, self.D)
        self.Xcov = self.rng.rand(self.N, self.D)
        self.Z = self.rng.rand(2, self.D)