Beispiel #1
0
    def txest_vcomp_1(self):
        """
        Fit the same model using constrained random effects and variance components.
        """

        np.random.seed(4279)
        exog = np.random.normal(size=(400, 1))
        exog_re = np.random.normal(size=(400, 2))
        groups = np.kron(np.arange(100), np.ones(4))
        slopes = np.random.normal(size=(100, 2))
        slopes[:, 1] *= 2
        slopes = np.kron(slopes, np.ones((4, 1))) * exog_re
        errors = slopes.sum(1) + np.random.normal(size=400)
        endog = exog.sum(1) + errors

        free = MixedLMParams(1, 2, 0)
        free.fe_params = np.ones(1)
        free.cov_re = np.eye(2)
        free.vcomp = np.zeros(0)

        model1 = MixedLM(endog, exog, groups, exog_re=exog_re)
        result1 = model1.fit(free=free)

        exog_vc = {"a": {}, "b": {}}
        for k,group in enumerate(model1.group_labels):
            ix = model1.row_indices[group]
            exog_vc["a"][group] = exog_re[ix, 0:1]
            exog_vc["b"][group] = exog_re[ix, 1:2]
        model2 = MixedLM(endog, exog, groups, exog_vc=exog_vc)
        result2 = model2.fit()
        result2.summary()

        assert_allclose(result1.fe_params, result2.fe_params, atol=1e-4)
        assert_allclose(np.diag(result1.cov_re), result2.vcomp, atol=1e-2, rtol=1e-4)
        assert_allclose(result1.bse[[0, 1, 3]], result2.bse, atol=1e-2, rtol=1e-2)
Beispiel #2
0
def do1(reml, irf, ds_ix):

    # No need to check independent random effects when there is
    # only one of them.
    if irf and ds_ix < 6:
        return

    irfs = "irf" if irf else "drf"
    meth = "reml" if reml else "ml"

    rslt = R_Results(meth, irfs, ds_ix)

    # Fit the model
    md = MixedLM(rslt.endog, rslt.exog_fe, rslt.groups, rslt.exog_re)
    if not irf:  # Free random effects covariance
        if np.any(np.diag(rslt.cov_re_r) < 1e-5):
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                mdf = md.fit(gtol=1e-7, reml=reml)
        else:
            mdf = md.fit(gtol=1e-7, reml=reml)

    else:  # Independent random effects
        k_fe = rslt.exog_fe.shape[1]
        k_re = rslt.exog_re.shape[1]
        free = MixedLMParams(k_fe, k_re, 0)
        free.fe_params = np.ones(k_fe)
        free.cov_re = np.eye(k_re)
        free.vcomp = np.array([])
        if np.any(np.diag(rslt.cov_re_r) < 1e-5):
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                mdf = md.fit(reml=reml, gtol=1e-7, free=free)
        else:
            mdf = md.fit(reml=reml, gtol=1e-7, free=free)

    assert_almost_equal(mdf.fe_params, rslt.coef, decimal=4)
    assert_almost_equal(mdf.cov_re, rslt.cov_re_r, decimal=4)
    assert_almost_equal(mdf.scale, rslt.scale_r, decimal=4)

    k_fe = md.k_fe
    assert_almost_equal(rslt.vcov_r,
                        mdf.cov_params()[0:k_fe, 0:k_fe],
                        decimal=3)

    assert_almost_equal(mdf.llf, rslt.loglike[0], decimal=2)

    # Not supported in R except for independent random effects
    if not irf:
        assert_almost_equal(mdf.random_effects[0],
                            rslt.ranef_postmean,
                            decimal=3)
        assert_almost_equal(mdf.random_effects_cov[0],
                            rslt.ranef_condvar,
                            decimal=3)
Beispiel #3
0
def do1(reml, irf, ds_ix):

    # No need to check independent random effects when there is
    # only one of them.
    if irf and ds_ix < 6:
        return

    irfs = "irf" if irf else "drf"
    meth = "reml" if reml else "ml"

    rslt = R_Results(meth, irfs, ds_ix)

    # Fit the model
    md = MixedLM(rslt.endog, rslt.exog_fe, rslt.groups,
                 rslt.exog_re)
    if not irf:  # Free random effects covariance
        if np.any(np.diag(rslt.cov_re_r) < 1e-5):
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                mdf = md.fit(gtol=1e-7, reml=reml)
        else:
            mdf = md.fit(gtol=1e-7, reml=reml)

    else:  # Independent random effects
        k_fe = rslt.exog_fe.shape[1]
        k_re = rslt.exog_re.shape[1]
        free = MixedLMParams(k_fe, k_re, 0)
        free.fe_params = np.ones(k_fe)
        free.cov_re = np.eye(k_re)
        free.vcomp = np.array([])
        if np.any(np.diag(rslt.cov_re_r) < 1e-5):
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                mdf = md.fit(reml=reml, gtol=1e-7, free=free)
        else:
            mdf = md.fit(reml=reml, gtol=1e-7, free=free)

    assert_almost_equal(mdf.fe_params, rslt.coef, decimal=4)
    assert_almost_equal(mdf.cov_re, rslt.cov_re_r, decimal=4)
    assert_almost_equal(mdf.scale, rslt.scale_r, decimal=4)

    k_fe = md.k_fe
    assert_almost_equal(rslt.vcov_r, mdf.cov_params()[0:k_fe, 0:k_fe],
                        decimal=3)

    assert_almost_equal(mdf.llf, rslt.loglike[0], decimal=2)

    # Not supported in R except for independent random effects
    if not irf:
        assert_almost_equal(mdf.random_effects[0], rslt.ranef_postmean,
                            decimal=3)
        assert_almost_equal(mdf.random_effects_cov[0],
                            rslt.ranef_condvar,
                            decimal=3)
Beispiel #4
0
    def test_vcomp_1(self):
        """
        Fit the same model using constrained random effects and
        variance components.
        """

        import scipy
        v = scipy.__version__.split(".")[1]
        v = int(v)
        if v < 16:
            return

        np.random.seed(4279)
        exog = np.random.normal(size=(400, 1))
        exog_re = np.random.normal(size=(400, 2))
        groups = np.kron(np.arange(100), np.ones(4))
        slopes = np.random.normal(size=(100, 2))
        slopes[:, 1] *= 2
        slopes = np.kron(slopes, np.ones((4, 1))) * exog_re
        errors = slopes.sum(1) + np.random.normal(size=400)
        endog = exog.sum(1) + errors

        free = MixedLMParams(1, 2, 0)
        free.fe_params = np.ones(1)
        free.cov_re = np.eye(2)
        free.vcomp = np.zeros(0)

        model1 = MixedLM(endog, exog, groups, exog_re=exog_re)
        result1 = model1.fit(free=free)

        exog_vc = {"a": {}, "b": {}}
        for k, group in enumerate(model1.group_labels):
            ix = model1.row_indices[group]
            exog_vc["a"][group] = exog_re[ix, 0:1]
            exog_vc["b"][group] = exog_re[ix, 1:2]
        model2 = MixedLM(endog, exog, groups, exog_vc=exog_vc)
        result2 = model2.fit()
        result2.summary()

        assert_allclose(result1.fe_params, result2.fe_params, atol=1e-4)
        assert_allclose(np.diag(result1.cov_re),
                        result2.vcomp,
                        atol=1e-2,
                        rtol=1e-4)
        assert_allclose(result1.bse[[0, 1, 3]],
                        result2.bse,
                        atol=1e-2,
                        rtol=1e-2)
Beispiel #5
0
 def f(x):
     params = MixedLMParams.from_packed(x,
                                        model.k_fe,
                                        model.k_re,
                                        model.use_sqrt,
                                        has_fe=has_fe)
     return -model.loglike(params, profile_fe=profile_fe)
Beispiel #6
0
    def __fit__(correctors,
                correctors_re,
                groups,
                predictors,
                observations,
                sample_weight=None,
                n_jobs=-1,
                *args,
                **kwargs):

        ncols = correctors.shape[1]
        dims = (correctors.shape[0], ncols + predictors.shape[1])
        xdata = np.zeros(dims)
        xdata[:, :ncols] = correctors.view()
        xdata[:, ncols:] = predictors.view()

        M = observations.shape[1]
        K = correctors.shape[1]

        params = np.empty((K, M), dtype=object)
        for it_m in range(M):
            free = MixedLMParams.from_components(
                fe_params=np.ones(xdata.shape[1]),
                cov_re=np.eye(correctors_re.shape[1]))
            model = MixedLM(endog=observations,
                            exog=xdata,
                            groups=groups,
                            exog_re=correctors_re)

            results = model.fit(free=free)
            params[..., it_m] = free

        return (params[:ncols], params[ncols:])
Beispiel #7
0
    def do1(self, reml, irf, ds_ix):

        # No need to check independent random effects when there is
        # only one of them.
        if irf and ds_ix < 6:
            return

        irfs = "irf" if irf else "drf"
        meth = "reml" if reml else "ml"

        rslt = R_Results(meth, irfs, ds_ix)

        # Fit the model
        md = MixedLM(rslt.endog, rslt.exog_fe, rslt.groups,
                     rslt.exog_re)
        if not irf: # Free random effects covariance
            mdf = md.fit(gtol=1e-7, reml=reml)
        else: # Independent random effects
            k_fe = rslt.exog_fe.shape[1]
            k_re = rslt.exog_re.shape[1]
            free = MixedLMParams(k_fe, k_re)
            free.set_fe_params(np.ones(k_fe))
            free.set_cov_re(np.eye(k_re))
            mdf = md.fit(reml=reml, gtol=1e-7, free=free)

        assert_almost_equal(mdf.fe_params, rslt.coef, decimal=4)
        assert_almost_equal(mdf.cov_re, rslt.cov_re_r, decimal=4)
        assert_almost_equal(mdf.scale, rslt.scale_r, decimal=4)

        pf = rslt.exog_fe.shape[1]
        assert_almost_equal(rslt.vcov_r, mdf.cov_params()[0:pf,0:pf],
                            decimal=3)

        assert_almost_equal(mdf.likeval, rslt.loglike[0], decimal=2)

        # Not supported in R
        if not irf:
            assert_almost_equal(mdf.ranef()[0], rslt.ranef_postmean,
                                decimal=3)
            assert_almost_equal(mdf.ranef_cov()[0],
                                rslt.ranef_condvar,
                                decimal=3)
Beispiel #8
0
    def test_vcomp_1(self):
        # Fit the same model using constrained random effects and
        # variance components.

        np.random.seed(4279)
        exog = np.random.normal(size=(400, 1))
        exog_re = np.random.normal(size=(400, 2))
        groups = np.kron(np.arange(100), np.ones(4))
        slopes = np.random.normal(size=(100, 2))
        slopes[:, 1] *= 2
        slopes = np.kron(slopes, np.ones((4, 1))) * exog_re
        errors = slopes.sum(1) + np.random.normal(size=400)
        endog = exog.sum(1) + errors

        free = MixedLMParams(1, 2, 0)
        free.fe_params = np.ones(1)
        free.cov_re = np.eye(2)
        free.vcomp = np.zeros(0)

        model1 = MixedLM(endog, exog, groups, exog_re=exog_re)
        result1 = model1.fit(free=free)

        exog_vc = {"a": {}, "b": {}}
        for k, group in enumerate(model1.group_labels):
            ix = model1.row_indices[group]
            exog_vc["a"][group] = exog_re[ix, 0:1]
            exog_vc["b"][group] = exog_re[ix, 1:2]
        with pytest.warns(UserWarning, match="Using deprecated variance"):
            model2 = MixedLM(endog, exog, groups, exog_vc=exog_vc)
        result2 = model2.fit()
        result2.summary()

        assert_allclose(result1.fe_params, result2.fe_params, atol=1e-4)
        assert_allclose(np.diag(result1.cov_re),
                        result2.vcomp,
                        atol=1e-2,
                        rtol=1e-4)
        assert_allclose(result1.bse[[0, 1, 3]],
                        result2.bse,
                        atol=1e-2,
                        rtol=1e-2)
    def do1(self, reml, irf, ds_ix):

        # No need to check independent random effects when there is
        # only one of them.
        if irf and ds_ix < 6:
            return

        irfs = "irf" if irf else "drf"
        meth = "reml" if reml else "ml"

        rslt = R_Results(meth, irfs, ds_ix)

        # Fit the model
        md = MixedLM(rslt.endog, rslt.exog_fe, rslt.groups, rslt.exog_re)
        if not irf:  # Free random effects covariance
            mdf = md.fit(gtol=1e-7, reml=reml)
        else:  # Independent random effects
            k_fe = rslt.exog_fe.shape[1]
            k_re = rslt.exog_re.shape[1]
            free = MixedLMParams(k_fe, k_re)
            free.set_fe_params(np.ones(k_fe))
            free.set_cov_re(np.eye(k_re))
            mdf = md.fit(reml=reml, gtol=1e-7, free=free)

        assert_almost_equal(mdf.fe_params, rslt.coef, decimal=4)
        assert_almost_equal(mdf.cov_re, rslt.cov_re_r, decimal=4)
        assert_almost_equal(mdf.scale, rslt.scale_r, decimal=4)

        pf = rslt.exog_fe.shape[1]
        assert_almost_equal(rslt.vcov_r,
                            mdf.cov_params()[0:pf, 0:pf],
                            decimal=3)

        assert_almost_equal(mdf.llf, rslt.loglike[0], decimal=2)

        # Not supported in R
        if not irf:
            assert_almost_equal(mdf.random_effects.ix[0],
                                rslt.ranef_postmean,
                                decimal=3)
            assert_almost_equal(mdf.random_effects_cov[0],
                                rslt.ranef_condvar,
                                decimal=3)
Beispiel #10
0
    def test_compare_numdiff(self):

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        for use_sqrt in False, True:
            for reml in False, True:
                for profile_fe in False, True:

                    np.random.seed(3558)
                    exog_fe = np.random.normal(size=(n_grp * grpsize, k_fe))
                    exog_re = np.random.normal(size=(n_grp * grpsize, k_re))
                    exog_re[:, 0] = 1
                    exog_vc = np.random.normal(size=(n_grp * grpsize, 3))
                    slopes = np.random.normal(size=(n_grp, k_re))
                    slopes[:, -1] *= 2
                    slopes = np.kron(slopes, np.ones((grpsize, 1)))
                    slopes_vc = np.random.normal(size=(n_grp, 3))
                    slopes_vc = np.kron(slopes_vc, np.ones((grpsize, 1)))
                    slopes_vc[:, -1] *= 2
                    re_values = (slopes * exog_re).sum(1)
                    vc_values = (slopes_vc * exog_vc).sum(1)
                    err = np.random.normal(size=n_grp * grpsize)
                    endog = exog_fe.sum(1) + re_values + vc_values + err
                    groups = np.kron(range(n_grp), np.ones(grpsize))

                    vc = {"a": {}, "b": {}}
                    for i in range(n_grp):
                        ix = np.flatnonzero(groups == i)
                        vc["a"][i] = exog_vc[ix, 0:2]
                        vc["b"][i] = exog_vc[ix, 2:3]

                    model = MixedLM(endog, exog_fe, groups, exog_re, exog_vc=vc, use_sqrt=use_sqrt)
                    rslt = model.fit(reml=reml)

                    loglike = loglike_function(model, profile_fe=profile_fe, has_fe=not profile_fe)

                    # Test the score at several points.
                    for kr in range(5):
                        fe_params = np.random.normal(size=k_fe)
                        cov_re = np.random.normal(size=(k_re, k_re))
                        cov_re = np.dot(cov_re.T, cov_re)
                        vcomp = np.random.normal(size=2) ** 2
                        params = MixedLMParams.from_components(fe_params, cov_re=cov_re, vcomp=vcomp)
                        params_vec = params.get_packed(has_fe=not profile_fe, use_sqrt=use_sqrt)

                        # Check scores
                        gr = -model.score(params, profile_fe=profile_fe)
                        ngr = nd.approx_fprime(params_vec, loglike)
                        assert_allclose(gr, ngr, rtol=1e-3)

                    # Check Hessian matrices at the MLE (we don't have
                    # the profile Hessian matrix and we don't care
                    # about the Hessian for the square root
                    # transformed parameter).
                    if (profile_fe is False) and (use_sqrt is False):
                        hess = -model.hessian(rslt.params_object)
                        params_vec = rslt.params_object.get_packed(use_sqrt=False, has_fe=True)
                        loglike_h = loglike_function(model, profile_fe=False, has_fe=True)
                        nhess = nd.approx_hess(params_vec, loglike_h)
                        assert_allclose(hess, nhess, rtol=1e-3)
Beispiel #11
0
 def f(x):
     params = MixedLMParams.from_packed(x, model.k_fe, model.k_re, model.use_sqrt, has_fe=has_fe)
     return -model.loglike(params, profile_fe=profile_fe)
    def test_compare_numdiff(self):

        import statsmodels.tools.numdiff as nd

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        for jl in 0, 1:
            for reml in False, True:
                for cov_pen_wt in 0, 10:

                    cov_pen = penalties.PSD(cov_pen_wt)

                    np.random.seed(3558)
                    exog_fe = np.random.normal(size=(n_grp * grpsize, k_fe))
                    exog_re = np.random.normal(size=(n_grp * grpsize, k_re))
                    exog_re[:, 0] = 1
                    slopes = np.random.normal(size=(n_grp, k_re))
                    slopes = np.kron(slopes, np.ones((grpsize, 1)))
                    re_values = (slopes * exog_re).sum(1)
                    err = np.random.normal(size=n_grp * grpsize)
                    endog = exog_fe.sum(1) + re_values + err
                    groups = np.kron(range(n_grp), np.ones(grpsize))

                    if jl == 0:
                        md = MixedLM(endog, exog_fe, groups, exog_re)
                        score = lambda x: -md.score_sqrt(x)
                        hessian = lambda x: -md.hessian_sqrt(x)
                    else:
                        md = MixedLM(endog,
                                     exog_fe,
                                     groups,
                                     exog_re,
                                     use_sqrt=False)
                        score = lambda x: -md.score_full(x)
                        hessian = lambda x: -md.hessian_full(x)
                    md.reml = reml
                    md.cov_pen = cov_pen
                    loglike = lambda x: -md.loglike(x)
                    rslt = md.fit()

                    # Test the score at several points.
                    for kr in range(5):
                        fe_params = np.random.normal(size=k_fe)
                        cov_re = np.random.normal(size=(k_re, k_re))
                        cov_re = np.dot(cov_re.T, cov_re)
                        params = MixedLMParams.from_components(
                            fe_params, cov_re)
                        if jl == 0:
                            params_vec = params.get_packed()
                        else:
                            params_vec = params.get_packed(use_sqrt=False)

                        # Check scores
                        gr = score(params)
                        ngr = nd.approx_fprime(params_vec, loglike)
                        assert_allclose(gr, ngr, rtol=1e-2)

                        # Hessian matrices don't agree well away from
                        # the MLE.
                        #if cov_pen_wt == 0:
                        #    hess = hessian(params)
                        #    nhess = nd.approx_hess(params_vec, loglike)
                        #    assert_allclose(hess, nhess, rtol=1e-2)

                    # Check Hessian matrices at the MLE.
                    if cov_pen_wt == 0:
                        hess = hessian(rslt.params_object)
                        params_vec = rslt.params_object.get_packed()
                        nhess = nd.approx_hess(params_vec, loglike)
                        assert_allclose(hess, nhess, rtol=1e-2)
Beispiel #13
0
    def test_compare_numdiff(self, use_sqrt, reml, profile_fe):

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        np.random.seed(3558)
        exog_fe = np.random.normal(size=(n_grp * grpsize, k_fe))
        exog_re = np.random.normal(size=(n_grp * grpsize, k_re))
        exog_re[:, 0] = 1
        exog_vc = np.random.normal(size=(n_grp * grpsize, 3))
        slopes = np.random.normal(size=(n_grp, k_re))
        slopes[:, -1] *= 2
        slopes = np.kron(slopes, np.ones((grpsize, 1)))
        slopes_vc = np.random.normal(size=(n_grp, 3))
        slopes_vc = np.kron(slopes_vc, np.ones((grpsize, 1)))
        slopes_vc[:, -1] *= 2
        re_values = (slopes * exog_re).sum(1)
        vc_values = (slopes_vc * exog_vc).sum(1)
        err = np.random.normal(size=n_grp * grpsize)
        endog = exog_fe.sum(1) + re_values + vc_values + err
        groups = np.kron(range(n_grp), np.ones(grpsize))

        vc = {"a": {}, "b": {}}
        for i in range(n_grp):
            ix = np.flatnonzero(groups == i)
            vc["a"][i] = exog_vc[ix, 0:2]
            vc["b"][i] = exog_vc[ix, 2:3]

        model = MixedLM(
            endog,
            exog_fe,
            groups,
            exog_re,
            exog_vc=vc,
            use_sqrt=use_sqrt)
        rslt = model.fit(reml=reml)

        loglike = loglike_function(
            model, profile_fe=profile_fe, has_fe=not profile_fe)

        try:
            # Test the score at several points.
            for kr in range(5):
                fe_params = np.random.normal(size=k_fe)
                cov_re = np.random.normal(size=(k_re, k_re))
                cov_re = np.dot(cov_re.T, cov_re)
                vcomp = np.random.normal(size=2)**2
                params = MixedLMParams.from_components(
                    fe_params, cov_re=cov_re, vcomp=vcomp)
                params_vec = params.get_packed(
                    has_fe=not profile_fe, use_sqrt=use_sqrt)

                # Check scores
                gr = -model.score(params, profile_fe=profile_fe)
                ngr = nd.approx_fprime(params_vec, loglike)
                assert_allclose(gr, ngr, rtol=1e-3)

            # Check Hessian matrices at the MLE (we don't have
            # the profile Hessian matrix and we don't care
            # about the Hessian for the square root
            # transformed parameter).
            if (profile_fe is False) and (use_sqrt is False):
                hess = -model.hessian(rslt.params_object)
                params_vec = rslt.params_object.get_packed(
                    use_sqrt=False, has_fe=True)
                loglike_h = loglike_function(
                    model, profile_fe=False, has_fe=True)
                nhess = nd.approx_hess(params_vec, loglike_h)
                assert_allclose(hess, nhess, rtol=1e-3)
        except AssertionError:
            # See GH#5628; because this test fails unpredictably but only on
            #  OSX, we only xfail it there.
            if PLATFORM_OSX:
                pytest.xfail("fails on OSX due to unresolved "
                             "numerical differences")
            else:
                raise
Beispiel #14
0
 def f(x):
     params = MixedLMParams.from_packed(x, model.k_fe, model.use_sqrt, has_fe=not profile_fe)
     return -model.score(params, profile_fe=profile_fe)
Beispiel #15
0
 def f(x):
     params = MixedLMParams.from_packed(x,
                                        model.k_fe,
                                        model.use_sqrt,
                                        has_fe=not profile_fe)
     return -model.score(params, profile_fe=profile_fe)
Beispiel #16
0
    def test_compare_numdiff(self):

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        for use_sqrt in False, True:
            for reml in False, True:
                for profile_fe in False, True:

                    np.random.seed(3558)
                    exog_fe = np.random.normal(size=(n_grp * grpsize, k_fe))
                    exog_re = np.random.normal(size=(n_grp * grpsize, k_re))
                    exog_re[:, 0] = 1
                    exog_vc = np.random.normal(size=(n_grp * grpsize, 3))
                    slopes = np.random.normal(size=(n_grp, k_re))
                    slopes[:, -1] *= 2
                    slopes = np.kron(slopes, np.ones((grpsize, 1)))
                    slopes_vc = np.random.normal(size=(n_grp, 3))
                    slopes_vc = np.kron(slopes_vc, np.ones((grpsize, 1)))
                    slopes_vc[:, -1] *= 2
                    re_values = (slopes * exog_re).sum(1)
                    vc_values = (slopes_vc * exog_vc).sum(1)
                    err = np.random.normal(size=n_grp * grpsize)
                    endog = exog_fe.sum(1) + re_values + vc_values + err
                    groups = np.kron(range(n_grp), np.ones(grpsize))

                    vc = {"a": {}, "b": {}}
                    for i in range(n_grp):
                        ix = np.flatnonzero(groups == i)
                        vc["a"][i] = exog_vc[ix, 0:2]
                        vc["b"][i] = exog_vc[ix, 2:3]

                    model = MixedLM(endog,
                                    exog_fe,
                                    groups,
                                    exog_re,
                                    exog_vc=vc,
                                    use_sqrt=use_sqrt)
                    rslt = model.fit(reml=reml)

                    loglike = loglike_function(model,
                                               profile_fe=profile_fe,
                                               has_fe=not profile_fe)

                    # Test the score at several points.
                    for kr in range(5):
                        fe_params = np.random.normal(size=k_fe)
                        cov_re = np.random.normal(size=(k_re, k_re))
                        cov_re = np.dot(cov_re.T, cov_re)
                        vcomp = np.random.normal(size=2)**2
                        params = MixedLMParams.from_components(fe_params,
                                                               cov_re=cov_re,
                                                               vcomp=vcomp)
                        params_vec = params.get_packed(has_fe=not profile_fe,
                                                       use_sqrt=use_sqrt)

                        # Check scores
                        gr = -model.score(params, profile_fe=profile_fe)
                        ngr = nd.approx_fprime(params_vec, loglike)
                        assert_allclose(gr, ngr, rtol=1e-3)

                    # Check Hessian matrices at the MLE (we don't have
                    # the profile Hessian matrix and we don't care
                    # about the Hessian for the square root
                    # transformed parameter).
                    if (profile_fe is False) and (use_sqrt is False):
                        hess = -model.hessian(rslt.params_object)
                        params_vec = rslt.params_object.get_packed(
                            use_sqrt=False, has_fe=True)
                        loglike_h = loglike_function(model,
                                                     profile_fe=False,
                                                     has_fe=True)
                        nhess = nd.approx_hess(params_vec, loglike_h)
                        assert_allclose(hess, nhess, rtol=1e-3)
Beispiel #17
0
    def test_compare_numdiff(self, use_sqrt, reml, profile_fe):

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        np.random.seed(3558)
        exog_fe = np.random.normal(size=(n_grp * grpsize, k_fe))
        exog_re = np.random.normal(size=(n_grp * grpsize, k_re))
        exog_re[:, 0] = 1
        exog_vc = np.random.normal(size=(n_grp * grpsize, 3))
        slopes = np.random.normal(size=(n_grp, k_re))
        slopes[:, -1] *= 2
        slopes = np.kron(slopes, np.ones((grpsize, 1)))
        slopes_vc = np.random.normal(size=(n_grp, 3))
        slopes_vc = np.kron(slopes_vc, np.ones((grpsize, 1)))
        slopes_vc[:, -1] *= 2
        re_values = (slopes * exog_re).sum(1)
        vc_values = (slopes_vc * exog_vc).sum(1)
        err = np.random.normal(size=n_grp * grpsize)
        endog = exog_fe.sum(1) + re_values + vc_values + err
        groups = np.kron(range(n_grp), np.ones(grpsize))

        vc = {"a": {}, "b": {}}
        for i in range(n_grp):
            ix = np.flatnonzero(groups == i)
            vc["a"][i] = exog_vc[ix, 0:2]
            vc["b"][i] = exog_vc[ix, 2:3]

        model = MixedLM(endog,
                        exog_fe,
                        groups,
                        exog_re,
                        exog_vc=vc,
                        use_sqrt=use_sqrt)
        rslt = model.fit(reml=reml)

        loglike = loglike_function(model,
                                   profile_fe=profile_fe,
                                   has_fe=not profile_fe)

        try:
            # Test the score at several points.
            for kr in range(5):
                fe_params = np.random.normal(size=k_fe)
                cov_re = np.random.normal(size=(k_re, k_re))
                cov_re = np.dot(cov_re.T, cov_re)
                vcomp = np.random.normal(size=2)**2
                params = MixedLMParams.from_components(fe_params,
                                                       cov_re=cov_re,
                                                       vcomp=vcomp)
                params_vec = params.get_packed(has_fe=not profile_fe,
                                               use_sqrt=use_sqrt)

                # Check scores
                gr = -model.score(params, profile_fe=profile_fe)
                ngr = nd.approx_fprime(params_vec, loglike)
                assert_allclose(gr, ngr, rtol=1e-3)

            # Check Hessian matrices at the MLE (we do not have
            # the profile Hessian matrix and we do not care
            # about the Hessian for the square root
            # transformed parameter).
            if (profile_fe is False) and (use_sqrt is False):
                hess = -model.hessian(rslt.params_object)
                params_vec = rslt.params_object.get_packed(use_sqrt=False,
                                                           has_fe=True)
                loglike_h = loglike_function(model,
                                             profile_fe=False,
                                             has_fe=True)
                nhess = nd.approx_hess(params_vec, loglike_h)
                assert_allclose(hess, nhess, rtol=1e-3)
        except AssertionError:
            # See GH#5628; because this test fails unpredictably but only on
            #  OSX, we only xfail it there.
            if PLATFORM_OSX:
                pytest.xfail("fails on OSX due to unresolved "
                             "numerical differences")
            else:
                raise
Beispiel #18
0
    def test_compare_numdiff(self):

        import statsmodels.tools.numdiff as nd

        n_grp = 200
        grpsize = 5
        k_fe = 3
        k_re = 2

        for jl in 0,1:
            for reml in False,True:
                for cov_pen_wt in 0,10:

                    cov_pen = penalties.PSD(cov_pen_wt)

                    np.random.seed(3558)
                    exog_fe = np.random.normal(size=(n_grp*grpsize, k_fe))
                    exog_re = np.random.normal(size=(n_grp*grpsize, k_re))
                    exog_re[:, 0] = 1
                    slopes = np.random.normal(size=(n_grp, k_re))
                    slopes = np.kron(slopes, np.ones((grpsize,1)))
                    re_values = (slopes * exog_re).sum(1)
                    err = np.random.normal(size=n_grp*grpsize)
                    endog = exog_fe.sum(1) + re_values + err
                    groups = np.kron(range(n_grp), np.ones(grpsize))

                    if jl == 0:
                        md = MixedLM(endog, exog_fe, groups, exog_re)
                        score = lambda x: -md.score_sqrt(x)
                        hessian = lambda x : -md.hessian_sqrt(x)
                    else:
                        md = MixedLM(endog, exog_fe, groups, exog_re, use_sqrt=False)
                        score = lambda x: -md.score_full(x)
                        hessian = lambda x: -md.hessian_full(x)
                    md.reml = reml
                    md.cov_pen = cov_pen
                    loglike = lambda x: -md.loglike(x)
                    rslt = md.fit()

                    # Test the score at several points.
                    for kr in range(5):
                        fe_params = np.random.normal(size=k_fe)
                        cov_re = np.random.normal(size=(k_re, k_re))
                        cov_re = np.dot(cov_re.T, cov_re)
                        params = MixedLMParams.from_components(fe_params, cov_re)
                        if jl == 0:
                            params_vec = params.get_packed()
                        else:
                            params_vec = params.get_packed(use_sqrt=False)

                        # Check scores
                        gr = score(params)
                        ngr = nd.approx_fprime(params_vec, loglike)
                        assert_allclose(gr, ngr, rtol=1e-2)

                        # Hessian matrices don't agree well away from
                        # the MLE.
                        #if cov_pen_wt == 0:
                        #    hess = hessian(params)
                        #    nhess = nd.approx_hess(params_vec, loglike)
                        #    assert_allclose(hess, nhess, rtol=1e-2)

                    # Check Hessian matrices at the MLE.
                    if cov_pen_wt == 0:
                        hess = hessian(rslt.params_object)
                        params_vec = rslt.params_object.get_packed()
                        nhess = nd.approx_hess(params_vec, loglike)
                        assert_allclose(hess, nhess, rtol=1e-2)
Beispiel #19
0
    # priors.setD3(result.fe_params.values.reshape(mousediet.p, 1))
    # priors.setD4(pinv(result.cov_params().iloc[:mousediet.p,
    #                                            :mousediet.p].values))
    # priors.setPai(0.5*np.ones(mousediet.grp))
    # priors.setSigma2(result.scale)


    ## quadratic
    mousediet.setParams(p=3)

    data = mousediet.rawdata[mousediet.rawdata['diet'] == 99]
    data['days2'] = data['days']**2
    model = sm.MixedLM.from_formula('weight ~ days + days2', data,
                                    re_formula='1 + days + days2',
                                    groups=data['id'])
    free = MixedLMParams(3, 3)
    free.set_fe_params(np.ones(3))
    free.set_cov_re(np.eye(3))
    result = model.fit(free=free)

    # uninformative prior
    priors.setD1(0.001)
    priors.setD2(0.001)

    priors.setD3(result.fe_params.values.reshape(mousediet.p, 1))
    priors.setD4(pinv(result.cov_params().iloc[:mousediet.p,
                                               :mousediet.p].values))
    priors.setPai(0.5*np.ones(mousediet.grp))
    priors.setSigma2(result.scale)

Beispiel #20
0
    # priors.setD3(result.fe_params.values.reshape(mousediet.p, 1))
    # priors.setD4(pinv(result.cov_params().iloc[:mousediet.p,
    #                                            :mousediet.p].values))
    # priors.setPai(0.5*np.ones(mousediet.grp))
    # priors.setSigma2(result.scale)

    ## quadratic
    mousediet.setParams(p=3)

    data = mousediet.rawdata[mousediet.rawdata['diet'] == 99]
    data['days2'] = data['days']**2
    model = sm.MixedLM.from_formula('weight ~ days + days2',
                                    data,
                                    re_formula='1 + days + days2',
                                    groups=data['id'])
    free = MixedLMParams(3, 3)
    free.set_fe_params(np.ones(3))
    free.set_cov_re(np.eye(3))
    result = model.fit(free=free)

    # uninformative prior
    priors.setD1(0.001)
    priors.setD2(0.001)

    priors.setD3(result.fe_params.values.reshape(mousediet.p, 1))
    priors.setD4(
        pinv(result.cov_params().iloc[:mousediet.p, :mousediet.p].values))
    priors.setPai(0.5 * np.ones(mousediet.grp))
    priors.setSigma2(result.scale)

    mcmcrun(mousediet, priors, dirname)