Пример #1
0
 def _build_conditional(self, Xnew, pred_noise, diag, X, Xu, y, sigma,
                        cov_total, mean_total):
     sigma2 = at.square(sigma)
     Kuu = cov_total(Xu)
     Kuf = cov_total(Xu, X)
     Luu = cholesky(stabilize(Kuu))
     A = solve_lower(Luu, Kuf)
     Qffd = at.sum(A * A, 0)
     if self.approx == "FITC":
         Kffd = cov_total(X, diag=True)
         Lamd = at.clip(Kffd - Qffd, 0.0, np.inf) + sigma2
     else:  # VFE or DTC
         Lamd = at.ones_like(Qffd) * sigma2
     A_l = A / Lamd
     L_B = cholesky(at.eye(Xu.shape[0]) + at.dot(A_l, at.transpose(A)))
     r = y - mean_total(X)
     r_l = r / Lamd
     c = solve_lower(L_B, at.dot(A, r_l))
     Kus = self.cov_func(Xu, Xnew)
     As = solve_lower(Luu, Kus)
     mu = self.mean_func(Xnew) + at.dot(at.transpose(As),
                                        solve_upper(at.transpose(L_B), c))
     C = solve_lower(L_B, As)
     if diag:
         Kss = self.cov_func(Xnew, diag=True)
         var = Kss - at.sum(at.square(As), 0) + at.sum(at.square(C), 0)
         if pred_noise:
             var += sigma2
         return mu, var
     else:
         cov = self.cov_func(Xnew) - at.dot(at.transpose(As), As) + at.dot(
             at.transpose(C), C)
         if pred_noise:
             cov += sigma2 * at.identity_like(cov)
         return mu, cov if pred_noise else stabilize(cov)
Пример #2
0
def stabilize(K, jitter=JITTER_DEFAULT):
    R"""
    Adds small diagonal to a covariance matrix.

    Often the matrices calculated from covariance functions, `K = cov_func(X)`
    do not appear numerically to be positive semi-definite.  Adding a small
    correction, `jitter`, to the diagonal is usually enough to fix this.

    Parameters
    ----------
    K: array-like
        A square covariance or kernel matrix.
    jitter: float
        A small constant.
    """
    return K + jitter * at.identity_like(K)
Пример #3
0
    def _build_conditional(self, Xnew, pred_noise, diag):
        Xs, y, sigma = self.Xs, self.y, self.sigma

        # Old points
        X = cartesian(*Xs)
        delta = y - self.mean_func(X)
        Kns = [f(x) for f, x in zip(self.cov_funcs, Xs)]
        eigs_sep, Qs = zip(*map(eigh, Kns))  # Unzip
        QTs = list(map(at.transpose, Qs))
        eigs = kron_diag(*eigs_sep)  # Combine separate eigs
        if sigma is not None:
            eigs += sigma**2

        # New points
        Km = self.cov_func(Xnew, diag=diag)
        Knm = self.cov_func(X, Xnew)
        Kmn = Knm.T

        # Build conditional mu
        alpha = kron_dot(QTs, delta)
        alpha = alpha / eigs[:, None]
        alpha = kron_dot(Qs, alpha)
        mu = at.dot(Kmn, alpha).ravel() + self.mean_func(Xnew)

        # Build conditional cov
        A = kron_dot(QTs, Knm)
        A = A / at.sqrt(eigs[:, None])
        if diag:
            Asq = at.sum(at.square(A), 0)
            cov = Km - Asq
            if pred_noise:
                cov += sigma
        else:
            Asq = at.dot(A.T, A)
            cov = Km - Asq
            if pred_noise:
                cov += sigma * at.identity_like(cov)
        return mu, cov
Пример #4
0
def stabilize(K):
    """ adds small diagonal to a covariance matrix """
    return K + 1e-6 * aet.identity_like(K)