コード例 #1
0
ファイル: svconvgp.py プロジェクト: yimingzhang521/convgp
    def __init__(self,
                 X,
                 Y,
                 kern,
                 likelihood,
                 Z,
                 colour_channels,
                 mean_function=GPflow.mean_functions.Zero(),
                 num_latent=None,
                 q_diag=False,
                 whiten=True,
                 minibatch_size=None):
        assert not q_diag
        super(SVColourConvGP,
              self).__init__(X, Y, kern, likelihood, Z, mean_function,
                             num_latent, q_diag, whiten, minibatch_size)

        # init variational parameters
        self.q_mu = Param(
            np.zeros((self.num_inducing, self.num_latent * colour_channels)))
        q_sqrt = np.array([
            np.eye(self.num_inducing)
            for _ in range(self.num_latent * colour_channels)
        ]).swapaxes(0, 2).reshape(self.num_inducing, self.num_inducing,
                                  self.num_latent * colour_channels)
        self.q_sqrt = Param(
            q_sqrt
        )  # , transforms.LowerTriangular(q_sqrt.shape[2]))  # Temp remove transform

        self.colour_channels = colour_channels
        self.wc = GPflow.param.Param(np.ones(colour_channels))
コード例 #2
0
ファイル: gplvm.py プロジェクト: markvdw/GPLVMTry
    def __init__(self,
                 X_mean,
                 X_var,
                 Y,
                 kern,
                 M,
                 Z=None,
                 X_prior_mean=None,
                 X_prior_var=None):
        """
        X_mean is a data matrix, size N x D
        X_var is a data matrix, size N x D (X_var > 0)
        Y is a data matrix, size N x R
        M is the number of inducing points
        Z is a matrix of pseudo inputs, size M x D
        kern, mean_function are appropriate GPflow objects

        This method only works with a Gaussian likelihood.

        """
        GPModel.__init__(self,
                         X_mean,
                         Y,
                         kern,
                         likelihood=likelihoods.Gaussian(),
                         mean_function=Zero())
        del self.X
        self.X_mean = Param(X_mean)
        self.X_var = Param(X_var, transforms.positive)
        self.num_data = X_mean.shape[0]
        self.output_dim = Y.shape[1]

        assert np.all(X_mean.shape == X_var.shape)
        assert X_mean.shape[0] == Y.shape[0], 'X mean and Y must be same size.'
        assert X_var.shape[0] == Y.shape[0], 'X var and Y must be same size.'

        # inducing points
        if Z is None:
            # By default we initialize by subset of initial latent points
            Z = np.random.permutation(X_mean.copy())[:M]
        else:
            assert Z.shape[0] == M
        self.Z = Param(Z)
        self.num_latent = Z.shape[1]
        assert X_mean.shape[1] == self.num_latent

        # deal with parameters for the prior mean variance of X
        if X_prior_mean is None:
            X_prior_mean = np.zeros((self.num_data, self.num_latent))
        self.X_prior_mean = X_prior_mean
        if X_prior_var is None:
            X_prior_var = np.ones((self.num_data, self.num_latent))
        self.X_prior_var = X_prior_var

        assert X_prior_mean.shape[0] == self.num_data
        assert X_prior_mean.shape[1] == self.num_latent
        assert X_prior_var.shape[0] == self.num_data
        assert X_prior_var.shape[1] == self.num_latent
コード例 #3
0
ファイル: kernels.py プロジェクト: yincheng/GGP
 def __init__(self, denseAdjMat, denseFeatureMat, X_tr, degree=3.0, variance=1.0, offset=1.0):
     GPflow.kernels.Kern.__init__(self, 1)
     self.degree = degree
     self.offset = Param(offset, transform=transforms.positive)
     self.variance = Param(variance, transforms.positive)
     denseAdjMat[np.diag_indices(len(denseAdjMat))] = 1.
     self.tr_features, self.tr_masks, self.tr_masks_counts = self._diag_tr_helper(denseFeatureMat, denseAdjMat, X_tr)
     self.sparse_P = SparseDataHolder(sparse_to_tuple(sparse.csr_matrix(
                     denseAdjMat/np.sum(denseAdjMat, 1, keepdims=True))))
     self.sparseFeatureMat = SparseDataHolder(sparse_to_tuple(sparse.csr_matrix(denseFeatureMat)))
     self.denseFeatureMat = DataHolder(denseFeatureMat)
コード例 #4
0
ファイル: flowkern.py プロジェクト: zhouyonglong/gpbo
    def __init__(self, input_dim, **kwargs):
        gpf.kernels.Kern.__init__(self, 2 * input_dim, **kwargs)
        self.latent_input_dim = input_dim
        if not 'lengthscales' in kwargs.keys():
            lengthscales = np.ones(input_dim, np_float_type)
        else:
            # accepts float or array:
            lengthscales = kwargs['lengthscales'] * np.ones(
                input_dim, np_float_type)
        self.lengthscales = Param(lengthscales, transforms.positive)

        if not 'variance' in kwargs.keys():
            variance = 1.
        self.variance = Param(variance, transforms.positive)
コード例 #5
0
ファイル: gplvm.py プロジェクト: markvdw/GPLVMTry
    def __init__(self,
                 Y,
                 latent_dim,
                 X_mean=None,
                 kern=None,
                 mean_function=Zero()):
        """
        Y is a data matrix, size N x R
        Z is a matrix of pseudo inputs, size M x D
        X_mean is a matrix, size N x Q, for the initialisation of the latent space.
        kern, mean_function are appropriate GPflow objects

        This method only works with a Gaussian likelihood.

        """
        if kern is None:
            kern = kernels.RBF(latent_dim, ARD=True)
        if X_mean is None:
            X_mean = PCA_reduce(Y, latent_dim)
        assert X_mean.shape[
            1] == latent_dim, 'Passed in number of latent ' + str(
                latent_dim) + ' does not match initial X ' + str(
                    X_mean.shape[1])
        self.num_latent = X_mean.shape[1]
        assert Y.shape[
            1] >= self.num_latent, 'More latent dimensions than observed.'
        GPR.__init__(self, X_mean, Y, kern, mean_function=mean_function)
        del self.X  # in GPLVM this is a Param
        self.X = Param(X_mean)
コード例 #6
0
    def __init__(self,
                 Y,
                 latent_dim,
                 X_mean=None,
                 kern=None,
                 back_kern=None,
                 mean_function=Zero()):
        """
        Initialise GPLVM object. This method only works with a Gaussian likelihood.
        :param Y: data matrix (N x D)
        :param X_mean: latent positions (N x Q), by default initialized using PCA.
        :param kern: kernel specification, by default RBF
        :param mean_function: mean function, by default None.
        """

        # define kernel function
        if kern is None:
            kern = kernels.RBF(latent_dim)
            back_kern = kernels.RBF(latent_dim)

        # initialize latent_positions
        if X_mean is None:
            X_mean = PCA_reduce(Y, latent_dim)

        # initialize variables
        self.num_latent = X_mean.shape[1]

        # initialize variables
        likelihood = likelihoods.Gaussian()
        Y = DataHolder(Y, on_shape_change='pass')
        X = DataHolder(X_mean, on_shape_change='pass')

        # initialize parent GPModel
        GPModel.__init__(self, X, Y, kern, likelihood, mean_function)

        # initialize back constraint model
        self.back_kern = back_kern
        self.back_mean_function = Zero()
        self.back_likelihood = likelihoods.Gaussian()

        # set latent positions as model param
        del self.X
        self.X = Param(X_mean)
コード例 #7
0
 def __init__(self, kern, q_mu, q_sqrt, Z, mean_function):
     Parameterized.__init__(self)
     self.q_mu, self.q_sqrt, self.Z = Param(q_mu), Param(q_sqrt), Param(Z)
     self.kern = kern
     self.mean_function = mean_function
コード例 #8
0
 def __init__(self, output_dim, c=None):
     MeanFunction.__init__(self, output_dim)
     if c is None:
         c = np.ones(output_dim, np_float_type)
     self.c = Param(c)