Beispiel #1
0
def test_gp_gradient2():
    random = RandomState(94584)
    N = 10
    X = random.randn(N, 2)

    K0 = random.randn(N, N + 1)
    K0 = K0 @ K0.T + eye(N) * 1e-5

    K1 = random.randn(N, N + 1)
    K1 = K1 @ K1.T + eye(N) * 1e-5

    mean = LinearMean(X)
    mean.effsizes = [-1.2, +0.9]

    cov0 = GivenCov(K0)
    cov1 = GivenCov(K1)
    cov2 = EyeCov(N)
    cov = SumCov([cov0, cov1, cov2])
    cov0.scale = 0.1
    cov1.scale = 2.3
    cov2.scale = 0.3

    y = random.randn(N)

    gp = GP(y, mean, cov)

    assert_allclose(gp._check_grad(), 0, atol=1e-5)
Beispiel #2
0
def _get_data():
    random = RandomState(0)
    N = 10
    X = random.randn(N, N + 1)
    X -= X.mean(0)
    X /= X.std(0)
    X /= sqrt(X.shape[1])
    offset = 1.0

    mean = OffsetMean(N)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(N)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = BernoulliProdLik(LogitLink())

    y = GGPSampler(lik, mean, cov).sample(random)

    return dict(mean=mean,
                cov=cov,
                lik=lik,
                y=y,
                cov_left=cov_left,
                cov_right=cov_right)
Beispiel #3
0
 def _fit_glmm(self, verbose):
     from glimix_core.cov import SumCov
     from glimix_core.ggp import ExpFamGP
     from numpy import asarray
     y = asarray(self._y, float).ravel()
     gp = ExpFamGP(y, self._lik, self._mean, SumCov(self._covariance))
     gp.fit(verbose=verbose)
     self._glmm = gp
Beispiel #4
0
def test_sumcov():
    random = np.random.RandomState(0)
    X = random.randn(3, 2)
    cov_left = LinearCov(X)

    K = random.randn(3, 3)
    K = K @ K.T
    cov_right = GivenCov(K)

    cov = SumCov([cov_left, cov_right])
    assert_allclose(cov.value(), cov_left.value() + cov_right.value())
    assert_allclose(cov._check_grad(), 0, atol=1e-5)
    cov_left.scale = 0.1
    assert_allclose(cov._check_grad(), 0, atol=1e-5)

    with pytest.raises(ValueError):
        K = random.randn(3, 3)
        GivenCov(K)
Beispiel #5
0
def _outcome_sample(random, offset, X):
    n = X.shape[0]
    mean = OffsetMean(n)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(n)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = DeltaProdLik()

    return GGPSampler(lik, mean, cov).sample(random)
Beispiel #6
0
def linear_eye_cov():
    from numpy import sqrt
    from numpy.random import RandomState
    from glimix_core.cov import EyeCov, LinearCov, SumCov

    random = RandomState(458)
    X = random.randn(_nsamples(), _nsamples() + 1)
    X -= X.mean(0)
    X /= X.std(0)
    X /= sqrt(X.shape[1])

    cov_left = LinearCov(X)
    cov_left.scale = 1.0

    cov_right = EyeCov(_nsamples())
    cov_right.scale = 1.0

    return SumCov([cov_left, cov_right])
Beispiel #7
0
def test_lmm_predict():
    random = RandomState(9458)
    n = 30

    X = random.randn(n, n + 1)
    X -= X.mean(0)
    X /= X.std(0)
    X /= sqrt(X.shape[1])

    offset = 1.0

    mean = OffsetMean(n)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(n)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = DeltaProdLik()

    y = GGPSampler(lik, mean, cov).sample(random)

    QS = economic_qs_linear(X)

    lmm = LMM(y, ones((n, 1)), QS)

    lmm.fit(verbose=False)

    plmm = LMMPredict(y, lmm.beta, lmm.v0, lmm.v1, lmm.mean(),
                      lmm.covariance())

    K = dot(X, X.T)
    pm = plmm.predictive_mean(ones((n, 1)), K, K.diagonal())
    assert_allclose(corrcoef(y, pm)[0, 1], 0.8358820971891354)
Beispiel #8
0
    def _setup_cov(self):
        from glimix_core.cov import SumCov

        if self._cov is None:
            cov = SumCov(self._covariance_matrices["impl"])
            self._cov = {"impl": cov, "user": user_cov.SumCov(cov)}