def test_simple_poisson_vb():

    y, exog_fe, exog_vc, ident = gen_simple_poisson(10, 10, 1)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt1 = glmm1.fit_map()

    glmm2 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt2 = glmm2.fit_vb(rslt1.params)

    rslt1.summary()
    rslt2.summary()

    assert_allclose(rslt1.params[0:5], np.r_[
        -0.07233493, -0.06706505, -0.47159649,  1.12575122, -1.02442201],
                    rtol=1e-4, atol=1e-4)

    assert_allclose(rslt1.cov_params.flat[0:5], np.r_[
        0.00790914, 0.00080666, -0.00050719, 0.00022648, 0.00046235],
                    rtol=1e-4, atol=1e-4)

    assert_allclose(rslt2.params[0:5], np.r_[
        -0.07088814, -0.06373107, -0.22770786,  1.12923746, -1.26161339],
                    rtol=1e-4, atol=1e-4)

    assert_allclose(rslt2.cov_params[0:5], np.r_[
        0.00747782, 0.0092554, 0.04508904, 0.02934488, 0.20312746],
                    rtol=1e-4, atol=1e-4)
예제 #2
0
def test_crossed_poisson_vb():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 0.5)

    glmm1 = PoissonBayesMixedGLM(y,
                                 exog_fe,
                                 exog_vc,
                                 ident,
                                 vcp_p=0.5,
                                 fe_p=0.5)
    rslt1 = glmm1.fit_map()

    glmm2 = PoissonBayesMixedGLM(y,
                                 exog_fe,
                                 exog_vc,
                                 ident,
                                 vcp_p=0.5,
                                 fe_p=0.5)
    rslt2 = glmm2.fit_vb(mean=rslt1.params)

    rslt1.summary()
    rslt2.summary()

    assert_allclose(rslt1.params[0:5],
                    np.r_[-0.54855281, 0.10458834, -0.68777741, -0.01699925,
                          0.77200546],
                    rtol=1e-4,
                    atol=1e-4)

    assert_allclose(rslt2.params[0:5],
                    np.r_[-0.54691502, 0.22297158, -0.52673802, -0.06218684,
                          0.74385237],
                    rtol=1e-4,
                    atol=1e-4)
예제 #3
0
def test_simple_poisson_map():

    y, exog_fe, exog_vc, ident = gen_simple_poisson(10, 10, 0.2)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt1 = glmm1.fit_map()
    assert_allclose(glmm1.logposterior_grad(rslt1.params),
                    np.zeros_like(rslt1.params),
                    atol=1e-3)

    # This should give the same answer as above
    glmm2 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt2 = glmm2.fit_map()
    assert_allclose(rslt1.params, rslt2.params, atol=1e-4)

    # Test the predict method
    for linear in False, True:
        for exog in None, exog_fe:
            pr1 = rslt1.predict(linear=linear, exog=exog)
            pr2 = rslt2.predict(linear=linear, exog=exog)
            pr3 = glmm1.predict(rslt1.params, linear=linear, exog=exog)
            pr4 = glmm2.predict(rslt2.params, linear=linear, exog=exog)
            assert_allclose(pr1, pr2, rtol=1e-5)
            assert_allclose(pr2, pr3, rtol=1e-5)
            assert_allclose(pr3, pr4, rtol=1e-5)
            if not linear:
                assert_equal(pr1.min() >= 0, True)
                assert_equal(pr2.min() >= 0, True)
                assert_equal(pr3.min() >= 0, True)
예제 #4
0
def test_simple_poisson_map():

    y, exog_fe, exog_vc, ident = gen_simple_poisson(10, 10, 0.2)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt1 = glmm1.fit_map()
    assert_allclose(glmm1.logposterior_grad(rslt1.params),
                    np.zeros_like(rslt1.params),
                    atol=1e-3)

    # This should give the same answer as above
    glmm2 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt2 = glmm2.fit_map()
    assert_allclose(rslt1.params, rslt2.params, atol=1e-4)
예제 #5
0
def test_poisson_formula():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 0.5)

    for vb in False, True:

        glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident)
        if vb:
            rslt1 = glmm1.fit_vb()
        else:
            rslt1 = glmm1.fit_map()

        # Build categorical variables that match exog_vc
        df = pd.DataFrame({"y": y, "x1": exog_fe[:, 0]})
        z1 = np.zeros(len(y))
        for j, k in enumerate(np.flatnonzero(ident == 0)):
            z1[exog_vc[:, k] == 1] = j
        df["z1"] = z1
        z2 = np.zeros(len(y))
        for j, k in enumerate(np.flatnonzero(ident == 1)):
            z2[exog_vc[:, k] == 1] = j
        df["z2"] = z2

        fml = "y ~ 0 + x1"
        from collections import OrderedDict
        vc_fml = OrderedDict({})
        vc_fml["z1"] = "0 + C(z1)"
        vc_fml["z2"] = "0 + C(z2)"
        glmm2 = PoissonBayesMixedGLM.from_formula(fml, vc_fml, df)
        if vb:
            rslt2 = glmm2.fit_vb()
        else:
            rslt2 = glmm2.fit_map()

        assert_allclose(rslt1.params, rslt2.params, rtol=1e-5)
def test_simple_poisson_vb():

    y, exog_fe, exog_vc, ident = gen_simple_poisson(10, 10, 1)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt1 = glmm1.fit_map()

    glmm2 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt2 = glmm2.fit_vb(rslt1.params)

    rslt1.summary()
    rslt2.summary()

    assert_allclose(rslt1.params[0:5],
                    np.r_[-0.07233493, -0.06706505, -0.47159649, 1.12575122,
                          -1.02442201],
                    rtol=1e-4,
                    atol=1e-4)

    assert_allclose(rslt1.cov_params().flat[0:5],
                    np.r_[0.00790914, 0.00080666, -0.00050719, 0.00022648,
                          0.00046235],
                    rtol=1e-4,
                    atol=1e-4)

    assert_allclose(rslt2.params[0:5],
                    np.r_[-0.07088814, -0.06373107, -0.22770786, 1.12923746,
                          -1.26161339],
                    rtol=1e-4,
                    atol=1e-4)

    assert_allclose(rslt2.cov_params()[0:5],
                    np.r_[0.00747782, 0.0092554, 0.04508904, 0.02934488,
                          0.20312746],
                    rtol=1e-4,
                    atol=1e-4)

    for rslt in rslt1, rslt2:
        cp = rslt.cov_params()
        p = len(rslt.params)
        if rslt is rslt1:
            assert_equal(cp.shape, np.r_[p, p])
            np.linalg.cholesky(cp)
        else:
            assert_equal(cp.shape, np.r_[p, ])
            assert_equal(cp > 0, True * np.ones(p))
def test_elbo_grad():

    for f in range(2):
        for j in range(2):

            if f == 0:
                if j == 0:
                    y, exog_fe, exog_vc, ident = gen_simple_logit(10, 10, 2)
                else:
                    y, exog_fe, exog_vc, ident = gen_crossed_logit(
                        10, 10, 1, 2)
            elif f == 1:
                if j == 0:
                    y, exog_fe, exog_vc, ident = gen_simple_poisson(
                        10, 10, 0.5)
                else:
                    y, exog_fe, exog_vc, ident = gen_crossed_poisson(
                        10, 10, 1, 0.5)

            exog_vc = sparse.csr_matrix(exog_vc)

            if f == 0:
                glmm1 = BinomialBayesMixedGLM(y, exog_fe, exog_vc, ident,
                                              vcp_p=0.5)
            else:
                glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident,
                                             vcp_p=0.5)

            rslt1 = glmm1.fit_map()

            for k in range(3):

                if k == 0:
                    vb_mean = rslt1.params
                    vb_sd = np.ones_like(vb_mean)
                elif k == 1:
                    vb_mean = np.zeros(len(vb_mean))
                    vb_sd = np.ones_like(vb_mean)
                else:
                    vb_mean = np.random.normal(size=len(vb_mean))
                    vb_sd = np.random.uniform(1, 2, size=len(vb_mean))

                mean_grad, sd_grad = glmm1.vb_elbo_grad(vb_mean, vb_sd)

                def elbo(vec):
                    n = len(vec) // 2
                    return glmm1.vb_elbo(vec[:n], vec[n:])

                x = np.concatenate((vb_mean, vb_sd))
                g1 = approx_fprime(x, elbo, 1e-5)
                n = len(x) // 2

                mean_grad_n = g1[:n]
                sd_grad_n = g1[n:]

                assert_allclose(mean_grad, mean_grad_n, atol=1e-2,
                                rtol=1e-2)
                assert_allclose(sd_grad, sd_grad_n, atol=1e-2,
                                rtol=1e-2)
def test_crossed_poisson_vb():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 0.5)

    glmm1 = PoissonBayesMixedGLM(y,
                                 exog_fe,
                                 exog_vc,
                                 ident,
                                 vcp_p=0.5,
                                 fe_p=0.5)
    rslt1 = glmm1.fit_map()

    glmm2 = PoissonBayesMixedGLM(y,
                                 exog_fe,
                                 exog_vc,
                                 ident,
                                 vcp_p=0.5,
                                 fe_p=0.5)
    rslt2 = glmm2.fit_vb(mean=rslt1.params)

    rslt1.summary()
    rslt2.summary()

    assert_allclose(rslt1.params[0:5],
                    np.r_[-0.54855281, 0.10458834, -0.68777741, -0.01699925,
                          0.77200546],
                    rtol=1e-4,
                    atol=1e-4)

    assert_allclose(rslt2.params[0:5],
                    np.r_[-0.54691502, 0.22297158, -0.52673802, -0.06218684,
                          0.74385237],
                    rtol=1e-4,
                    atol=1e-4)

    for rslt in rslt1, rslt2:
        cp = rslt.cov_params()
        p = len(rslt.params)
        if rslt is rslt1:
            assert_equal(cp.shape, np.r_[p, p])
            np.linalg.cholesky(cp)
        else:
            assert_equal(cp.shape, np.r_[p, ])
            assert_equal(cp > 0, True * np.ones(p))
예제 #9
0
def test_crossed_poisson_map():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 1)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt = glmm.fit_map()

    assert_allclose(glmm.logposterior_grad(rslt.params),
                    np.zeros_like(rslt.params),
                    atol=1e-4)
def test_simple_poisson_map():

    y, exog_fe, exog_vc, ident = gen_simple_poisson(10, 10, 0.2)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm1 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt1 = glmm1.fit_map()
    assert_allclose(
        glmm1.logposterior_grad(rslt1.params),
        np.zeros_like(rslt1.params),
        atol=1e-3)

    # This should give the same answer as above
    glmm2 = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt2 = glmm2.fit_map()
    assert_allclose(rslt1.params, rslt2.params, atol=1e-4)

    # Test the predict method
    for linear in False, True:
        for exog in None, exog_fe:
            pr1 = rslt1.predict(linear=linear, exog=exog)
            pr2 = rslt2.predict(linear=linear, exog=exog)
            pr3 = glmm1.predict(rslt1.params, linear=linear, exog=exog)
            pr4 = glmm2.predict(rslt2.params, linear=linear, exog=exog)
            assert_allclose(pr1, pr2, rtol=1e-5)
            assert_allclose(pr2, pr3, rtol=1e-5)
            assert_allclose(pr3, pr4, rtol=1e-5)
            if not linear:
                assert_equal(pr1.min() >= 0, True)
                assert_equal(pr2.min() >= 0, True)
                assert_equal(pr3.min() >= 0, True)

    # Check dimensions and PSD status of cov_params
    for rslt in rslt1, rslt2:
        cp = rslt.cov_params()
        p = len(rslt.params)
        assert_equal(cp.shape, np.r_[p, p])
        np.linalg.cholesky(cp)
def test_crossed_poisson_map():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 1)
    exog_vc = sparse.csr_matrix(exog_vc)

    glmm = PoissonBayesMixedGLM(y, exog_fe, exog_vc, ident, vcp_p=0.5)
    rslt = glmm.fit_map()

    assert_allclose(glmm.logposterior_grad(rslt.params),
                    np.zeros_like(rslt.params),
                    atol=1e-4)

    # Check dimensions and PSD status of cov_params
    cp = rslt.cov_params()
    p = len(rslt.params)
    assert_equal(cp.shape, np.r_[p, p])
    np.linalg.cholesky(cp)
def test_poisson_formula():

    y, exog_fe, exog_vc, ident = gen_crossed_poisson(10, 10, 1, 0.5)

    for vb in False, True:

        glmm1 = PoissonBayesMixedGLM(
            y, exog_fe, exog_vc, ident)
        if vb:
            rslt1 = glmm1.fit_vb()
        else:
            rslt1 = glmm1.fit_map()

        # Build categorical variables that match exog_vc
        df = pd.DataFrame({"y": y, "x1": exog_fe[:, 0]})
        z1 = np.zeros(len(y))
        for j,k in enumerate(np.flatnonzero(ident == 0)):
            z1[exog_vc[:, k] == 1] = j
        df["z1"] = z1
        z2 = np.zeros(len(y))
        for j,k in enumerate(np.flatnonzero(ident == 1)):
            z2[exog_vc[:, k] == 1] = j
        df["z2"] = z2

        fml = "y ~ 0 + x1"
        vc_fml = {}
        vc_fml["z1"] = "0 + C(z1)"
        vc_fml["z2"] = "0 + C(z2)"
        glmm2 = PoissonBayesMixedGLM.from_formula(fml, vc_fml, df)
        if vb:
            rslt2 = glmm2.fit_vb()
        else:
            rslt2 = glmm2.fit_map()

        assert_allclose(rslt1.params, rslt2.params, rtol=1e-5)

        for rslt in rslt1, rslt2:
            cp = rslt.cov_params()
            p = len(rslt.params)
            if vb:
                assert_equal(cp.shape, np.r_[p,])
                assert_equal(cp > 0, True*np.ones(p))
            else:
                assert_equal(cp.shape, np.r_[p, p])
                np.linalg.cholesky(cp)