Example #1
0
def test_bernoulli_optimize():
    random = RandomState(139)
    nsamples = 100
    nfeatures = nsamples + 10

    G = random.randn(nsamples, nfeatures) / sqrt(nfeatures)

    M = ones((nsamples, 1))

    u = random.randn(nfeatures)

    z = 0.1 + dot(G, u) + 0.5 * random.randn(nsamples)

    y = empty(nsamples)

    y[z > 0] = 1
    y[z <= 0] = 0

    (Q, S0) = economic_qs_linear(G)

    lik = BernoulliProdLik(LogitLink())
    lik.outcome = y
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0)
    ep.learn(progress=False)
    assert_allclose(ep.lml(), -67.67727582268618, rtol=1e-5)
    assert_allclose(ep.heritability, 0.6243068813130619, rtol=1e-5)
    assert_allclose(ep.beta[0], -0.2561108097463372, rtol=1e-5)
Example #2
0
def test_binomial_optimize():
    random = RandomState(139)
    nsamples = 30
    nfeatures = 31

    G = random.randn(nsamples, nfeatures) / sqrt(nfeatures)

    u = random.randn(nfeatures)

    z = 0.1 + 2 * dot(G, u) + random.randn(nsamples)

    ntrials = random.randint(10, 500, size=nsamples)

    y = zeros(nsamples)
    for i in range(len(ntrials)):
        y[i] = sum(
            z[i] + random.logistic(scale=pi / sqrt(3), size=ntrials[i]) > 0)
    (Q, S0) = economic_qs_linear(G)

    M = ones((nsamples, 1))
    lik = BinomialProdLik(ntrials, LogitLink())
    lik.nsuccesses = y
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0)
    ep.learn(progress=False)

    assert_allclose(ep.lml(), -144.2381842202486, rtol=1e-3)
Example #3
0
def test_binomial_gradient_over_delta():
    n = 3
    M = ones((n, 1)) * 1.
    G = array([[1.2, 3.4], [-.1, 1.2], [0.0, .2]])
    (Q, S0) = economic_qs_linear(G)
    nsuccesses = array([1., 0., 1.])
    ntrials = array([1., 1., 1.])
    lik = BinomialProdLik(ntrials, LogitLink())
    lik.nsuccesses = nsuccesses
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0 + 1.0)
    ep.beta = array([1.])
    assert_allclose(ep.beta, array([1.]))
    ep.v = 1.
    ep.delta = 0.5

    analytical_gradient = ep._gradient_over_delta()

    lml0 = ep.lml()
    step = 1e-5
    ep.delta = ep.delta + step
    lml1 = ep.lml()

    empirical_gradient = (lml1 - lml0) / step

    assert_allclose(empirical_gradient, analytical_gradient, rtol=1e-4)
Example #4
0
def test_binomial_lml():
    n = 3
    M = ones((n, 1)) * 1.
    G = array([[1.2, 3.4], [-.1, 1.2], [0.0, .2]])
    (Q, S0) = economic_qs_linear(G)
    nsuccesses = array([1., 0., 1.])
    ntrials = array([1., 1., 1.])
    lik = BinomialProdLik(ntrials, LogitLink())
    lik.nsuccesses = nsuccesses
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0 + 1)
    ep.beta = array([1.])
    assert_allclose(ep.beta, array([1.]))
    ep.v = 1.
    ep.delta = 0
    assert_allclose(ep.lml(), -2.3202659215368935)
Example #5
0
def test_bernoulli_lml():
    n = 3
    M = ones((n, 1)) * 1.
    G = array([[1.2, 3.4], [-.1, 1.2], [0.0, .2]])
    (Q, S0) = economic_qs_linear(G)
    y = array([1., 0., 1.])
    lik = BernoulliProdLik(LogitLink())
    lik.outcome = y
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0 + 1.0)
    ep.beta = array([1.])
    assert_almost_equal(ep.beta, array([1.]))
    ep.v = 1.
    ep.delta = 0.
    assert_almost_equal(ep.lml(), -2.3202659215368935)
    assert_almost_equal(ep.sigma2_epsilon, 0)
    assert_almost_equal(ep.sigma2_b, 1)
Example #6
0
def test_binomial_extreme():
    import numpy as np
    import os
    dir_path = os.path.dirname(os.path.realpath(__file__))
    data = np.load(os.path.join(dir_path, 'data', 'extreme.npz'))
    from limix_inference.glmm import ExpFamEP
    from limix_inference.lik import BinomialProdLik
    from limix_inference.link import LogitLink
    link = LogitLink()
    lik = BinomialProdLik(data['ntrials'], link)
    lik.nsuccesses = data['nsuccesses']
    ep = ExpFamEP(lik,
                  data['covariates'],
                  Q0=data['Q0'],
                  Q1=data['Q1'],
                  S0=data['S0'])
    ep.learn(progress=False)
    assert_allclose(ep.lml(), -627.8816497398326, rtol=1e-3)
Example #7
0
def test_GLMMSampler_binomial():
    random = RandomState(4503)
    X = random.randn(10, 15)
    link = LogitLink()
    lik = BinomialLik(5, link)

    mean = OffsetMean()
    mean.offset = 1.2
    mean.set_data(10, 'sample')
    cov = LinearCov()
    cov.set_data((X, X), 'sample')
    sampler = GLMMSampler(lik, mean, cov)
    assert_equal(sampler.sample(random), [0, 5, 0, 5, 1, 1, 5, 0, 5, 5])

    mean.offset = 0.
    assert_equal(sampler.sample(random), [5, 4, 1, 0, 0, 1, 4, 5, 5, 0])

    mean = OffsetMean()
    mean.offset = 0.0
    mean.set_data(10, 'sample')

    cov1 = LinearCov()
    cov1.set_data((X, X), 'sample')

    cov2 = EyeCov()
    a = arange(10)
    cov2.set_data((a, a), 'sample')

    cov1.scale = 1e-4
    cov2.scale = 1e-4

    cov = SumCov([cov1, cov2])

    lik = BinomialLik(100, link)
    sampler = GLMMSampler(lik, mean, cov)
    assert_equal(
        sampler.sample(random), [56, 56, 55, 51, 59, 45, 47, 43, 51, 38])

    cov2.scale = 100.
    sampler = GLMMSampler(lik, mean, cov)
    assert_equal(
        sampler.sample(random), [99, 93, 99, 75, 77, 0, 0, 100, 99, 12])
Example #8
0
def test_binomial_get_normal_likelihood_trick():
    random = RandomState(139)
    nsamples = 30
    nfeatures = 31

    G = random.randn(nsamples, nfeatures) / sqrt(nfeatures)

    u = random.randn(nfeatures)

    z = 0.1 + 2 * dot(G, u) + random.randn(nsamples)

    ntrials = random.randint(10, 500, size=nsamples)

    y = zeros(nsamples)
    for i in range(len(ntrials)):
        y[i] = sum(
            z[i] + random.logistic(scale=pi / sqrt(3), size=ntrials[i]) > 0)
    (Q, S0) = economic_qs_linear(G)

    M = ones((nsamples, 1))
    lik = BinomialProdLik(ntrials, LogitLink())
    lik.nsuccesses = y
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0)
    ep.learn(progress=False)

    nlt = ep.get_normal_likelihood_trick()
    assert_allclose(nlt.fast_scan(G)[0], [
        -143.48903288, -144.32031587, -144.03889888, -144.31806561,
        -143.90248659, -144.303103, -144.47854112, -144.44469341, -144.285027,
        -144.31240175, -143.11590263, -142.81623878, -141.67554141,
        -144.4780024, -144.47780285, -144.10317082, -142.10043322,
        -143.0813298, -143.99841663, -143.345783, -144.45458683, -144.37877612,
        -142.56846859, -144.32923028, -144.44116855, -144.45082936,
        -144.40932741, -143.0212886, -144.47902176, -143.94188634,
        -143.72765373
    ],
                    rtol=1e-5)
Example #9
0
def test_bernoulli_gradient_over_v():
    n = 3
    M = ones((n, 1)) * 1.
    G = array([[1.2, 3.4], [-.1, 1.2], [0.0, .2]])
    (Q, S0) = economic_qs_linear(G)
    y = array([1., 0., 1.])
    lik = BernoulliProdLik(LogitLink())
    lik.outcome = y
    ep = ExpFamEP(lik, M, Q[0], Q[1], S0 + 1.0)
    ep.beta = array([1.])
    assert_almost_equal(ep.beta, array([1.]))
    ep.v = 1.
    ep.delta = 0.

    analytical_gradient = ep._gradient_over_v()

    lml0 = ep.lml()
    step = 1e-5
    ep.v = ep.v + step
    lml1 = ep.lml()

    empirical_gradient = (lml1 - lml0) / step

    assert_almost_equal(empirical_gradient, analytical_gradient, decimal=4)
Example #10
0
def test_binomial_sampler():
    random = RandomState(4503)
    link = LogitLink()
    binom = BinomialLik(12, link)
    assert_equal(binom.sample(0, random), 7)