Exemplo n.º 1
0
 def setup_method(self):
     self.Xs = [
         np.linspace(0, 1, 7)[:, None],
         np.linspace(0, 1, 5)[:, None],
         np.linspace(0, 1, 6)[:, None],
     ]
     self.X = cartesian(*self.Xs)
     self.N = np.prod([len(X) for X in self.Xs])
     self.y = np.random.randn(self.N) * 0.1
     self.Xnews = (np.random.randn(5, 1), np.random.randn(5, 1), np.random.randn(5, 1))
     self.Xnew = np.concatenate(self.Xnews, axis=1)
     self.sigma = 0.2
     self.pnew = np.random.randn(len(self.Xnew)) * 0.01
     ls = 0.2
     with pm.Model() as model:
         self.cov_funcs = [
             pm.gp.cov.ExpQuad(1, ls),
             pm.gp.cov.ExpQuad(1, ls),
             pm.gp.cov.ExpQuad(1, ls),
         ]
         cov_func = pm.gp.cov.Kron(self.cov_funcs)
         self.mean = pm.gp.mean.Constant(0.5)
         gp = pm.gp.Marginal(mean_func=self.mean, cov_func=cov_func)
         f = gp.marginal_likelihood("f", self.X, self.y, noise=self.sigma)
         p = gp.conditional("p", self.Xnew)
         self.mu, self.cov = gp.predict(self.Xnew)
     self.logp = model.logp({"p": self.pnew})
Exemplo n.º 2
0
 def setup_method(self):
     self.Xs = [
         np.linspace(0, 1, 7)[:, None],
         np.linspace(0, 1, 5)[:, None],
         np.linspace(0, 1, 6)[:, None],
     ]
     self.X = cartesian(*self.Xs)
     self.N = np.prod([len(X) for X in self.Xs])
     self.y = np.random.randn(self.N) * 0.1
     self.Xnews = (np.random.randn(5, 1), np.random.randn(5, 1), np.random.randn(5, 1))
     self.Xnew = np.concatenate(self.Xnews, axis=1)
     self.pnew = np.random.randn(len(self.Xnew)) * 0.01
     ls = 0.2
     with pm.Model() as latent_model:
         self.cov_funcs = (
             pm.gp.cov.ExpQuad(1, ls),
             pm.gp.cov.ExpQuad(1, ls),
             pm.gp.cov.ExpQuad(1, ls),
         )
         cov_func = pm.gp.cov.Kron(self.cov_funcs)
         self.mean = pm.gp.mean.Constant(0.5)
         gp = pm.gp.Latent(mean_func=self.mean, cov_func=cov_func)
         f = gp.prior("f", self.X)
         p = gp.conditional("p", self.Xnew)
     chol = np.linalg.cholesky(cov_func(self.X).eval())
     self.y_rotated = np.linalg.solve(chol, self.y - 0.5)
     self.logp = latent_model.logp({"f_rotated_": self.y_rotated, "p": self.pnew})
Exemplo n.º 3
0
 def test_multiops(self):
     X1 = np.linspace(0, 1, 3)[:, None]
     X21 = np.linspace(0, 1, 5)[:, None]
     X22 = np.linspace(0, 1, 4)[:, None]
     X2 = cartesian(X21, X22)
     X = cartesian(X1, X21, X22)
     with pm.Model() as model:
         cov1 = (
             3
             + pm.gp.cov.ExpQuad(1, 0.1)
             + pm.gp.cov.ExpQuad(1, 0.1) * pm.gp.cov.ExpQuad(1, 0.1)
         )
         cov2 = pm.gp.cov.ExpQuad(1, 0.1) * pm.gp.cov.ExpQuad(2, 0.1)
         cov = pm.gp.cov.Kron([cov1, cov2])
     K_true = kronecker(theano.function([], cov1(X1))(), theano.function([], cov2(X2))()).eval()
     K = theano.function([], cov(X))()
     npt.assert_allclose(K_true, K)
Exemplo n.º 4
0
Arquivo: gp.py Projeto: gdupret/pymc3
 def _build_prior(self, name, Xs, **kwargs):
     self.N = np.prod([len(X) for X in Xs])
     mu = self.mean_func(cartesian(*Xs))
     chols = [cholesky(stabilize(cov(X))) for cov, X in zip(self.cov_funcs, Xs)]
     # remove reparameterization option
     v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, shape=self.N, **kwargs)
     f = pm.Deterministic(name, mu + tt.flatten(kron_dot(chols, v)))
     return f
Exemplo n.º 5
0
def test_cartesian_2d():
    np.random.seed(1)
    a = [[1, 2], [3, 4]]
    b = [5, 6]
    c = [0]
    manual_cartesian = np.array([
        [1, 2, 5, 0],
        [1, 2, 6, 0],
        [3, 4, 5, 0],
        [3, 4, 6, 0],
    ])
    auto_cart = cartesian(a, b, c)
    np.testing.assert_array_equal(manual_cartesian, auto_cart)
Exemplo n.º 6
0
 def test_symprod_cov(self):
     X1 = np.linspace(0, 1, 10)[:, None]
     X2 = np.linspace(0, 1, 10)[:, None]
     X = cartesian(X1, X2)
     with pm.Model() as model:
         cov1 = pm.gp.cov.ExpQuad(1, 0.1)
         cov2 = pm.gp.cov.ExpQuad(1, 0.1)
         cov = pm.gp.cov.Kron([cov1, cov2])
     K = theano.function([], cov(X))()
     npt.assert_allclose(K[0, 1], 1 * 0.53940, atol=1e-3)
     npt.assert_allclose(K[0, 11], 0.53940 * 0.53940, atol=1e-3)
     # check diagonal
     Kd = theano.function([], cov(X, diag=True))()
     npt.assert_allclose(np.diag(K), Kd, atol=1e-5)
Exemplo n.º 7
0
 def _build_conditional(self, Xnew):
     Xs, f = self.Xs, self.f
     X = cartesian(*Xs)
     delta = f - self.mean_func(X)
     covs = [stabilize(cov(Xi)) for cov, Xi in zip(self.cov_funcs, Xs)]
     chols = [cholesky(cov) for cov in covs]
     cholTs = [at.transpose(chol) for chol in chols]
     Kss = self.cov_func(Xnew)
     Kxs = self.cov_func(X, Xnew)
     Ksx = at.transpose(Kxs)
     alpha = kron_solve_lower(chols, delta)
     alpha = kron_solve_upper(cholTs, alpha)
     mu = at.dot(Ksx, alpha).ravel() + self.mean_func(Xnew)
     A = kron_solve_lower(chols, Kxs)
     cov = stabilize(Kss - at.dot(at.transpose(A), A))
     return mu, cov
Exemplo n.º 8
0
def test_cartesian():
    np.random.seed(1)
    a = [1, 2, 3]
    b = [0, 2]
    c = [5, 6]
    manual_cartesian = np.array([
        [1, 0, 5],
        [1, 0, 6],
        [1, 2, 5],
        [1, 2, 6],
        [2, 0, 5],
        [2, 0, 6],
        [2, 2, 5],
        [2, 2, 6],
        [3, 0, 5],
        [3, 0, 6],
        [3, 2, 5],
        [3, 2, 6],
    ])
    auto_cart = cartesian(a, b, c)
    np.testing.assert_array_equal(manual_cartesian, auto_cart)
Exemplo n.º 9
0
def test_cartesian():
    np.random.seed(1)
    a = [1, 2, 3]
    b = [0, 2]
    c = [5, 6]
    manual_cartesian = np.array(
        [[1, 0, 5],
         [1, 0, 6],
         [1, 2, 5],
         [1, 2, 6],
         [2, 0, 5],
         [2, 0, 6],
         [2, 2, 5],
         [2, 2, 6],
         [3, 0, 5],
         [3, 0, 6],
         [3, 2, 5],
         [3, 2, 6],
         ]
        )
    auto_cart = cartesian(a, b, c)
    np.testing.assert_array_almost_equal(manual_cartesian, auto_cart)
Exemplo n.º 10
0
    def _build_conditional(self, Xnew, pred_noise, diag):
        Xs, y, sigma = self.Xs, self.y, self.sigma

        # Old points
        X = cartesian(*Xs)
        delta = y - self.mean_func(X)
        Kns = [f(x) for f, x in zip(self.cov_funcs, Xs)]
        eigs_sep, Qs = zip(*map(eigh, Kns))  # Unzip
        QTs = list(map(at.transpose, Qs))
        eigs = kron_diag(*eigs_sep)  # Combine separate eigs
        if sigma is not None:
            eigs += sigma**2

        # New points
        Km = self.cov_func(Xnew, diag=diag)
        Knm = self.cov_func(X, Xnew)
        Kmn = Knm.T

        # Build conditional mu
        alpha = kron_dot(QTs, delta)
        alpha = alpha / eigs[:, None]
        alpha = kron_dot(Qs, alpha)
        mu = at.dot(Kmn, alpha).ravel() + self.mean_func(Xnew)

        # Build conditional cov
        A = kron_dot(QTs, Knm)
        A = A / at.sqrt(eigs[:, None])
        if diag:
            Asq = at.sum(at.square(A), 0)
            cov = Km - Asq
            if pred_noise:
                cov += sigma
        else:
            Asq = at.dot(A.T, A)
            cov = Km - Asq
            if pred_noise:
                cov += sigma * at.identity_like(cov)
        return mu, cov
Exemplo n.º 11
0
 def _build_marginal_likelihood(self, Xs):
     self.X = cartesian(*Xs)
     mu = self.mean_func(self.X)
     covs = [f(X) for f, X in zip(self.cov_funcs, Xs)]
     return mu, covs