def fitNB(X, Y):
    X = add_constant(X)
    model = NegativeBinomial(Y, X).fit(disp=0, skip_hessian=True, maxiter=500)
    if not model.mle_retvals['converged']:
        model = NegativeBinomial(Y, X).fit_regularized(disp=0,
                                                       skip_hessian=True,
                                                       maxiter=500)
    return model.params
Esempio n. 2
0
 def setupClass(cls):
     data = sm.datasets.randhie.load()
     exog = sm.add_constant(data.exog, prepend=False)
     cls.res1 = NegativeBinomial(data.endog, exog, 'nb2').fit(method='newton', disp=0)
     res2 = RandHIE()
     res2.negativebinomial_nb2_bfgs()
     cls.res2 = res2
Esempio n. 3
0
    def setup_class(cls):
        # here we don't need to check convergence from default start_params
        start_params = [
            13.1996, 0.8582, -2.8005, -1.5031, 2.3849, -8.5552, -2.88, 1.14
        ]
        mod = NegativeBinomial(endog, exog)
        res = mod.fit(start_params=start_params, method='nm', maxiter=2000)
        marge = res.get_margeff()
        cls.res = res
        cls.margeff = marge

        cls.res1_slice = slice(None, None, None)
        cls.res1 = res_stata.results_negbin_margins_cont
        cls.rtol_fac = 5e1