Esempio n. 1
0
def test_gp_nonfinite_phenotype():
    random = RandomState(94584)
    N = 50
    X = random.randn(N, 100)
    offset = 0.5

    mean = OffsetMean(N)
    mean.offset = offset
    mean.fix_offset()

    cov = LinearCov(X)
    cov.scale = 1.0

    y = zeros(N)

    y[0] = nan
    with pytest.raises(ValueError):
        GP(y, mean, cov)

    y[0] = -inf
    with pytest.raises(ValueError):
        GP(y, mean, cov)

    y[0] = +inf
    with pytest.raises(ValueError):
        GP(y, mean, cov)
Esempio n. 2
0
def offset_mean():
    from glimix_core.mean import OffsetMean

    mean = OffsetMean(_nsamples())
    mean.offset = 0.5

    return mean
Esempio n. 3
0
def _get_data():
    random = RandomState(0)
    N = 10
    X = random.randn(N, N + 1)
    X -= X.mean(0)
    X /= X.std(0)
    X /= sqrt(X.shape[1])
    offset = 1.0

    mean = OffsetMean(N)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(N)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = BernoulliProdLik(LogitLink())

    y = GGPSampler(lik, mean, cov).sample(random)

    return dict(mean=mean,
                cov=cov,
                lik=lik,
                y=y,
                cov_left=cov_left,
                cov_right=cov_right)
Esempio n. 4
0
    def append_offset(self):
        from glimix_core.mean import OffsetMean

        mean = OffsetMean()
        mean.set_data(self._sample_idx)
        self._fixed_effects["impl"].append(mean)
        self._fixed_effects["user"].append(user_mean.OffsetMean(mean))
        self._fixed_effects["user"][-1].name = "offset"
        self._mean = None
Esempio n. 5
0
def test_mean_sum():
    X = [[5.1, 1.0], [2.1, -0.2]]

    mean0 = LinearMean(X)
    mean0.effsizes = [-1.0, 0.5]

    mean1 = OffsetMean(2)
    mean1.offset = 2.0

    mean = SumMean([mean0, mean1])

    assert_allclose(mean.value(), [-2.6, -0.2])
    assert_allclose(mean._check_grad(), 0, atol=1e-5)
Esempio n. 6
0
def _outcome_sample(random, offset, X):
    n = X.shape[0]
    mean = OffsetMean(n)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(n)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = DeltaProdLik()

    return GGPSampler(lik, mean, cov).sample(random)
Esempio n. 7
0
def test_gp_value_1():
    random = RandomState(94584)
    N = 50
    X = random.randn(N, 100)
    offset = 0.5

    mean = OffsetMean(N)
    mean.offset = offset
    mean.fix_offset()

    cov = LinearCov(X)
    cov.scale = 1.0

    y = random.randn(N)

    gp = GP(y, mean, cov)
    assert_allclose(gp.value(), -153.623791551399108)
Esempio n. 8
0
def test_gp_gradient():
    random = RandomState(94584)
    N = 50
    X = random.randn(N, 100)
    offset = 0.5

    mean = OffsetMean(N)
    mean.offset = offset
    mean.fix_offset()

    cov = LinearCov(X)
    cov.scale = 1.0

    y = random.randn(N)

    gp = GP(y, mean, cov)

    assert_allclose(gp._check_grad(), 0, atol=1e-5)
Esempio n. 9
0
def test_gp_maximize():
    random = RandomState(94584)
    N = 50
    X = random.randn(N, 100)
    offset = 0.5

    mean = OffsetMean(N)
    mean.offset = offset
    mean.fix_offset()

    cov = LinearCov(X)
    cov.scale = 1.0

    y = random.randn(N)

    gp = GP(y, mean, cov)

    assert_allclose(gp.value(), -153.623791551)
    gp.fit(verbose=False)
    assert_allclose(gp.value(), -79.8992122415)
Esempio n. 10
0
def test_lmm_predict():
    random = RandomState(9458)
    n = 30

    X = random.randn(n, n + 1)
    X -= X.mean(0)
    X /= X.std(0)
    X /= sqrt(X.shape[1])

    offset = 1.0

    mean = OffsetMean(n)
    mean.offset = offset

    cov_left = LinearCov(X)
    cov_left.scale = 1.5

    cov_right = EyeCov(n)
    cov_right.scale = 1.5

    cov = SumCov([cov_left, cov_right])

    lik = DeltaProdLik()

    y = GGPSampler(lik, mean, cov).sample(random)

    QS = economic_qs_linear(X)

    lmm = LMM(y, ones((n, 1)), QS)

    lmm.fit(verbose=False)

    plmm = LMMPredict(y, lmm.beta, lmm.v0, lmm.v1, lmm.mean(),
                      lmm.covariance())

    K = dot(X, X.T)
    pm = plmm.predictive_mean(ones((n, 1)), K, K.diagonal())
    assert_allclose(corrcoef(y, pm)[0, 1], 0.8358820971891354)
Esempio n. 11
0
def test_ggp_expfam_tobi():
    random = RandomState(2)

    n = 30

    ntrials = random.randint(30, size=n)
    K = random.randn(n, n)
    K = matmul(K, K.T)

    lik = BinomialProdLik(ntrials=ntrials, link=LogitLink())

    mean = OffsetMean(n)

    cov2 = EyeCov(n)

    y = GGPSampler(lik, mean, cov2).sample(random)

    ggp = ExpFamGP(y, ("binomial", ntrials), mean, cov2)
    assert_allclose(ggp.lml(), -67.84095700542488)

    ggp.fit(verbose=False)
    assert_allclose(ggp.lml(), -64.26701904994792)
Esempio n. 12
0
def test_mean_offset():
    mean = OffsetMean(2)
    mean.offset = 1.5
    assert_allclose(mean.value(), [1.5, 1.5])
    assert_allclose(mean.offset, 1.5)
    assert_allclose(mean._check_grad(), 0, atol=1e-5)