Ejemplo n.º 1
0
 def _build_marginal_likelihood_logp(self, y, X, Xu, sigma):
     sigma2 = tt.square(sigma)
     Kuu = self.cov_func(Xu)
     Kuf = self.cov_func(Xu, X)
     Luu = cholesky(stabilize(Kuu))
     A = solve_lower(Luu, Kuf)
     Qffd = tt.sum(A * A, 0)
     if self.approx == "FITC":
         Kffd = self.cov_func(X, diag=True)
         Lamd = tt.clip(Kffd - Qffd, 0.0, np.inf) + sigma2
         trace = 0.0
     elif self.approx == "VFE":
         Lamd = tt.ones_like(Qffd) * sigma2
         trace = ((1.0 / (2.0 * sigma2)) *
                  (tt.sum(self.cov_func(X, diag=True)) -
                   tt.sum(tt.sum(A * A, 0))))
     else:  # DTC
         Lamd = tt.ones_like(Qffd) * sigma2
         trace = 0.0
     A_l = A / Lamd
     L_B = cholesky(tt.eye(Xu.shape[0]) + tt.dot(A_l, tt.transpose(A)))
     r = y - self.mean_func(X)
     r_l = r / Lamd
     c = solve_lower(L_B, tt.dot(A, r_l))
     constant = 0.5 * X.shape[0] * tt.log(2.0 * np.pi)
     logdet = 0.5 * tt.sum(tt.log(Lamd)) + tt.sum(tt.log(tt.diag(L_B)))
     quadratic = 0.5 * (tt.dot(r, r_l) - tt.dot(c, c))
     return -1.0 * (constant + logdet + quadratic + trace)
Ejemplo n.º 2
0
 def _build_conditional(self, Xnew, pred_noise, diag, X, Xu, y, sigma, cov_total, mean_total):
     sigma2 = tt.square(sigma)
     Kuu = cov_total(Xu)
     Kuf = cov_total(Xu, X)
     Luu = cholesky(stabilize(Kuu))
     A = solve_lower(Luu, Kuf)
     Qffd = tt.sum(A * A, 0)
     if self.approx == "FITC":
         Kffd = cov_total(X, diag=True)
         Lamd = tt.clip(Kffd - Qffd, 0.0, np.inf) + sigma2
     else:  # VFE or DTC
         Lamd = tt.ones_like(Qffd) * sigma2
     A_l = A / Lamd
     L_B = cholesky(tt.eye(Xu.shape[0]) + tt.dot(A_l, tt.transpose(A)))
     r = y - mean_total(X)
     r_l = r / Lamd
     c = solve_lower(L_B, tt.dot(A, r_l))
     Kus = self.cov_func(Xu, Xnew)
     As = solve_lower(Luu, Kus)
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(As), solve_upper(tt.transpose(L_B), c))
     C = solve_lower(L_B, As)
     if diag:
         Kss = self.cov_func(Xnew, diag=True)
         var = Kss - tt.sum(tt.square(As), 0) + tt.sum(tt.square(C), 0)
         if pred_noise:
             var += sigma2
         return mu, var
     else:
         cov = (self.cov_func(Xnew) - tt.dot(tt.transpose(As), As) +
                tt.dot(tt.transpose(C), C))
         if pred_noise:
             cov += sigma2 * tt.identity_like(cov)
         return mu, stabilize(cov)
Ejemplo n.º 3
0
 def _build_conditional(self, Xnew, pred_noise, diag, X, Xu, y, sigma,
                        cov_total, mean_total):
     sigma2 = tt.square(sigma)
     Kuu = cov_total(Xu)
     Kuf = cov_total(Xu, X)
     Luu = cholesky(stabilize(Kuu))
     A = solve_lower(Luu, Kuf)
     Qffd = tt.sum(A * A, 0)
     if self.approx == "FITC":
         Kffd = cov_total(X, diag=True)
         Lamd = tt.clip(Kffd - Qffd, 0.0, np.inf) + sigma2
     else:  # VFE or DTC
         Lamd = tt.ones_like(Qffd) * sigma2
     A_l = A / Lamd
     L_B = cholesky(tt.eye(Xu.shape[0]) + tt.dot(A_l, tt.transpose(A)))
     r = y - mean_total(X)
     r_l = r / Lamd
     c = solve_lower(L_B, tt.dot(A, r_l))
     Kus = self.cov_func(Xu, Xnew)
     As = solve_lower(Luu, Kus)
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(As),
                                        solve_upper(tt.transpose(L_B), c))
     C = solve_lower(L_B, As)
     if diag:
         Kss = self.cov_func(Xnew, diag=True)
         var = Kss - tt.sum(tt.square(As), 0) + tt.sum(tt.square(C), 0)
         if pred_noise:
             var += sigma2
         return mu, var
     else:
         cov = (self.cov_func(Xnew) - tt.dot(tt.transpose(As), As) +
                tt.dot(tt.transpose(C), C))
         if pred_noise:
             cov += sigma2 * tt.identity_like(cov)
         return mu, cov if pred_noise else stabilize(cov)
Ejemplo n.º 4
0
 def _build_marginal_likelihood_logp(self, y, X, Xu, sigma):
     sigma2 = tt.square(sigma)
     Kuu = self.cov_func(Xu)
     Kuf = self.cov_func(Xu, X)
     Luu = cholesky(stabilize(Kuu))
     A = solve_lower(Luu, Kuf)
     Qffd = tt.sum(A * A, 0)
     if self.approx == "FITC":
         Kffd = self.cov_func(X, diag=True)
         Lamd = tt.clip(Kffd - Qffd, 0.0, np.inf) + sigma2
         trace = 0.0
     elif self.approx == "VFE":
         Lamd = tt.ones_like(Qffd) * sigma2
         trace = ((1.0 / (2.0 * sigma2)) *
                  (tt.sum(self.cov_func(X, diag=True)) -
                   tt.sum(tt.sum(A * A, 0))))
     else:  # DTC
         Lamd = tt.ones_like(Qffd) * sigma2
         trace = 0.0
     A_l = A / Lamd
     L_B = cholesky(tt.eye(Xu.shape[0]) + tt.dot(A_l, tt.transpose(A)))
     r = y - self.mean_func(X)
     r_l = r / Lamd
     c = solve_lower(L_B, tt.dot(A, r_l))
     constant = 0.5 * X.shape[0] * tt.log(2.0 * np.pi)
     logdet = 0.5 * tt.sum(tt.log(Lamd)) + tt.sum(tt.log(tt.diag(L_B)))
     quadratic = 0.5 * (tt.dot(r, r_l) - tt.dot(c, c))
     return -1.0 * (constant + logdet + quadratic + trace)
Ejemplo n.º 5
0
 def _build_conditional(self, Xnew, X, f, cov_total, mean_total):
     Kxx = cov_total(X)
     Kxs = self.cov_func(X, Xnew)
     L = cholesky(stabilize(Kxx))
     A = solve_lower(L, Kxs)
     v = solve_lower(L, f - mean_total(X))
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     Kss = self.cov_func(Xnew)
     cov = Kss - tt.dot(tt.transpose(A), A)
     return mu, cov
Ejemplo n.º 6
0
 def _build_conditional(self, Xnew, X, f, cov_total, mean_total):
     Kxx = cov_total(X)
     Kxs = self.cov_func(X, Xnew)
     L = cholesky(stabilize(Kxx))
     A = solve_lower(L, Kxs)
     v = solve_lower(L, f - mean_total(X))
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     Kss = self.cov_func(Xnew)
     cov = Kss - tt.dot(tt.transpose(A), A)
     return mu, cov
Ejemplo n.º 7
0
def get_mean_var1(choices, actions, rewards, kern):
    Kxx = kern(actions)
    Kxs = kern(actions, choices)
    L = cholesky(stabilize(Kxx))
    A = solve_lower(L, Kxs)
    v = solve_lower(L, rewards)
    mu = tt.dot(tt.transpose(A), v)
    Kss = kern(choices, diag=True)
    var = Kss - tt.sum(tt.square(A), 0)
    return mu, var
Ejemplo n.º 8
0
 def _build_conditional(self, Xnew, X, f):
     Kxx = self.cov_func(X)
     Kxs = self.cov_func(X, Xnew)
     Kss = self.cov_func(Xnew)
     L = cholesky(stabilize(Kxx))
     A = solve_lower(L, Kxs)
     cov = Kss - tt.dot(tt.transpose(A), A)
     v = solve_lower(L, f - self.mean_func(X))
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     beta = tt.dot(v, v)
     nu2 = self.nu + X.shape[0]
     covT = (self.nu + beta - 2)/(nu2 - 2) * cov
     return nu2, mu, covT
Ejemplo n.º 9
0
 def _build_conditional(self, Xnew, X, f):
     Kxx = self.cov_func(X)
     Kxs = self.cov_func(X, Xnew)
     Kss = self.cov_func(Xnew)
     L = cholesky(stabilize(Kxx))
     A = solve_lower(L, Kxs)
     cov = Kss - tt.dot(tt.transpose(A), A)
     v = solve_lower(L, f - self.mean_func(X))
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     beta = tt.dot(v, v)
     nu2 = self.nu + X.shape[0]
     covT = (self.nu + beta - 2) / (nu2 - 2) * cov
     return nu2, mu, covT
Ejemplo n.º 10
0
 def _build_conditional(self, Xnew, pred_noise, diag, X, y, noise, cov_total, mean_total):
     Kxx = cov_total(X)
     Kxs = self.cov_func(X, Xnew)
     Knx = noise(X)
     rxx = y - mean_total(X)
     L = cholesky(stabilize(Kxx) + Knx)
     A = solve_lower(L, Kxs)
     v = solve_lower(L, rxx)
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     if diag:
         Kss = self.cov_func(Xnew, diag=True)
         var = Kss - tt.sum(tt.square(A), 0)
         if pred_noise:
             var += noise(Xnew, diag=True)
         return mu, var
     else:
         Kss = self.cov_func(Xnew)
         cov = Kss - tt.dot(tt.transpose(A), A)
         if pred_noise:
             cov += noise(Xnew)
         return mu, cov if pred_noise else stabilize(cov)
Ejemplo n.º 11
0
 def _build_conditional(self, Xnew, pred_noise, diag, X, y, noise,
                        cov_total, mean_total):
     Kxx = cov_total(X)
     Kxs = self.cov_func(X, Xnew)
     Knx = noise(X)
     rxx = y - mean_total(X)
     L = cholesky(stabilize(Kxx) + Knx)
     A = solve_lower(L, Kxs)
     v = solve_lower(L, rxx)
     mu = self.mean_func(Xnew) + tt.dot(tt.transpose(A), v)
     if diag:
         Kss = self.cov_func(Xnew, diag=True)
         var = Kss - tt.sum(tt.square(A), 0)
         if pred_noise:
             var += noise(Xnew, diag=True)
         return mu, var
     else:
         Kss = self.cov_func(Xnew)
         cov = Kss - tt.dot(tt.transpose(A), A)
         if pred_noise:
             cov += noise(Xnew)
         return mu, stabilize(cov)