Esempio n. 1
0
    def full_K(self):
        """

        Returns: full prior covariance

        """
        return kron_list(self.Ks)
Esempio n. 2
0
    def full_S(self):
        """
        Returns full variaitonal covariance (based on kronecker decomps)
        Returns: Full variational covariance

        """
        return kron_list([R.T.dot(R) for R in self.Rs])
Esempio n. 3
0
File: base.py Progetto: as4529/gp3
    def full_K(self):
        """
        Composes full kernel matrix
        Returns:K

        """

        return kron_list(self.Ks)
Esempio n. 4
0
File: data.py Progetto: as4529/gp3
def sim_f_kron(X, kernels, mu=0):
    K = kron_list([
        kernels[d].eval(kernels[d].params, np.unique(X[:, d]))
        for d in X.shape[1]
    ])
    K_chol = np.linalg.cholesky(K)
    eps = np.random.multivariate_normal(size=K.shape[0])
    return mu + np.dot(K_chol, eps)
Esempio n. 5
0
    def predict_mean(self, x_new):

        k_dims = [self.kernels[d].eval(self.kernels[d].params,
                                   np.expand_dims(np.unique(self.X[:, d]), 1),
                                   np.expand_dims(x_new[:, d], 1))
                  for d in self.X.shape[1]]
        kx = np.squeeze(kron_list(k_dims))
        mean = np.sum(np.multiply(kx, self.alpha)) + self.mu[0]

        return mean
Esempio n. 6
0
    def marginal(self, kernel):
        """
        calculates marginal likelihood
        Args:
            Ks_new: new covariance if needed
        Returns: np.array for marginal likelihood

        """

        if kernel.params is not None:
            self.Ks = self.construct_Ks()
            self.alpha = np.zeros([self.X.shape[0]])
            self.W = np.zeros([self.X.shape[0]])
            self.grads = np.zeros([self.X.shape[0]])
            self.f = self.mu
            self.f_pred = self.f
            self.run(10)

        Ks = self.Ks
        eigs = [np.expand_dims(np.linalg.eig(K)[0], 1) for K in Ks]
        eig_K = np.squeeze(kron_list(eigs))
        self.eig_K = eig_K

        if self.obs_idx is not None:
            f_lim = self.f[self.obs_idx]
            alpha_lim = self.alpha[self.obs_idx]
            mu_lim = self.mu[self.obs_idx]
            W_lim = self.W[self.obs_idx]
            eig_k_lim = eig_K[self.obs_idx]

            pen = -0.5 * np.sum(np.multiply(alpha_lim,
                                       f_lim - mu_lim))
            pen = np.where(np.isnan(pen), np.zeros_like(pen), pen)
            eigs = 0.5 * np.sum(np.log(1 + np.multiply(eig_k_lim,
                                       W_lim)))
            eigs = np.where(np.isnan(eigs), np.zeros_like(eigs), eigs)
            like = np.sum(self.likelihood.log_like(f_lim, self.y))
            like = np.where(np.isnan(like), np.zeros_like(like), like)

            return -(pen+eigs+like)

        pen = -0.5 * np.sum(np.multiply(self.alpha,
                                   self.f - self.mu))
        eigs = - 0.5*np.sum(np.log(1 +
                                   np.multiply(eig_K, self.W)))
        like = np.sum(self.likelihood.log_like(self.f, self.y))

        return -(pen+eigs+like)
Esempio n. 7
0
    def variance_exact(self):
        """
        Exact computation of variance
        Returns: exact variance

        """
        K_uu = kron_list(self.Ks)
        K_xx = K_uu
        K_ux = K_uu
        if self.obs_idx is not None:
            K_ux = K_uu[:, self.obs_idx]
            K_xx = K_uu[self.obs_idx, :][:, self.obs_idx]

        A = K_xx + np.diag(np.ones(self.n) * self.noise)
        A_inv = np.linalg.inv(A)
        return np.diag(K_uu - np.dot(K_ux, A_inv).dot(K_ux.T)) + self.noise
Esempio n. 8
0
    def sqrt_eig(self):
        """
        Calculates square root of kernel matrix using
         fast kronecker eigendecomp.
        This is used in stochastic approximations
         of the predictive variance.

        Returns: Square root of kernel matrix

        """
        res = []

        for e, v in self.K_eigs:
            e_root_diag = np.sqrt(e)
            e_root = np.diag(np.real(np.nan_to_num(e_root_diag)))
            res.append(np.real(np.dot(np.dot(v, e_root), np.transpose(v))))

        res = np.squeeze(kron_list(res))
        self.root_eigdecomp = res

        return res