Ejemplo n.º 1
0
def fit_nbinom(x):
    print("Fitting total read count to negative binomial distribution...")
    params = dm.NegativeBinomial(x, np.ones_like(x)).fit(maxiter=200000,
                                                         disp=0).params
    mu = np.exp(params[0])
    alpha = params[1]
    r = alpha**-1
    p = r / (r + mu)
    return r, p
    def setup_class(cls):
        cls.res2 = results_st.results_negbin_exposure_clu #nonrobust
        mod = smd.NegativeBinomial(endog, exog, exposure=exposure)
        cls.res1 = res1 = mod.fit(disp=False, cov_type='cluster',
                                  cov_kwds=dict(groups=group,
                                                use_correction=True,
                                                df_correction=True),  #TODO has no effect
                                  use_t=False, #True,
                                  )
        cls.bse_rob = cls.res1.bse

        cls.corr_fact = cls.get_correction_factor(cls.res1)
    def setup_class(cls):
        cls.res2 = results_st.results_negbin_clu
        mod = smd.NegativeBinomial(endog, exog)
        cls.res1 = res1 = mod.fit(disp=False, gtol=1e-7)

        get_robustcov_results(cls.res1._results, 'cluster',
                                                  groups=group,
                                                  use_correction=True,
                                                  df_correction=True,  #TODO has no effect
                                                  use_t=False, #True,
                                                  use_self=True)
        cls.bse_rob = cls.res1.bse

        cls.corr_fact = cls.get_correction_factor(cls.res1)
Ejemplo n.º 4
0
    def setup_class(cls):
        cls.res2 = results_st.results_negbin_exposure_clu #nonrobust
        mod = smd.NegativeBinomial(endog, exog, exposure=exposure)
        cls.res1 = res1 = mod.fit(disp=False, cov_type='cluster',
                                  cov_kwds=dict(groups=group,
                                                use_correction=True,
                                                df_correction=True),  #TODO has no effect
                                  use_t=False, #True,
                                  )
        cls.bse_rob = cls.res1.bse

        nobs, k_vars = mod.exog.shape
        k_params = len(cls.res1.params)
        #n_groups = len(np.unique(group))
        corr_fact = (nobs-1.) / float(nobs - k_params)
        # for bse we need sqrt of correction factor
        cls.corr_fact = np.sqrt(corr_fact)
Ejemplo n.º 5
0
    def setup_class(cls):
        cls.res2 = results_st.results_negbin_clu
        mod = smd.NegativeBinomial(endog, exog)
        cls.res1 = res1 = mod.fit(disp=False, gtol=1e-7)

        get_robustcov_results(cls.res1._results, 'cluster',
                                                  groups=group,
                                                  use_correction=True,
                                                  df_correction=True,  #TODO has no effect
                                                  use_t=False, #True,
                                                  use_self=True)
        cls.bse_rob = cls.res1.bse

        nobs, k_vars = mod.exog.shape
        k_params = len(cls.res1.params)
        #n_groups = len(np.unique(group))
        corr_fact = (nobs-1.) / float(nobs - k_params)
        # for bse we need sqrt of correction factor
        cls.corr_fact = np.sqrt(corr_fact)
Ejemplo n.º 6
0
 def setup_class(cls):
     cls.res2 = results_st.results_negbin_exposure_clu  #nonrobust
     mod = smd.NegativeBinomial(endog, exog, exposure=exposure)
     cls.res1 = mod.fit(disp=False)
     cls.get_robust_clu()
Ejemplo n.º 7
0
 def setup_class(cls):
     cls.res2 = results_st.results_negbin_clu
     mod = smd.NegativeBinomial(endog, exog)
     cls.res1 = mod.fit(disp=False, gtol=1e-7)
     cls.get_robust_clu()
Ejemplo n.º 8
0
def get_model_params_pergene(
    gene_umi, model_matrix, method="theta_ml"
):
    gene_umi = _process_y(gene_umi)
    if method == "sm_nb":
        model = dm.NegativeBinomial(gene_umi, model_matrix, loglike_method="nb2")
        params = model.fit(maxiter=50, tol=1e-3, disp=0).params
        theta = 1 / params[-1]
        if theta >= 1e5:
            theta = npy.inf
        params = dict(zip(model_matrix.design_info.column_names, params[:-1]))
        params["theta"] = theta
    elif method == "theta_ml":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = theta_ml(y=gene_umi, mu=mu)
        if theta >= 1e5:
            theta = npy.inf
        params["theta"] = theta
    elif method == "jax_jit":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        gene_umi_jax = jax.device_put(gene_umi)
        mu_jax = jax.device_put(mu)
        theta = float(
            fit_nbinom_bfgs_jit(y=gene_umi_jax, mu=mu_jax).block_until_ready()
        )
        if theta < 0:
            # replace with moment based estimator
            theta = mu ** 2 / (npy.var(gene_umi) - mu)
            if theta < 0:
                theta = npy.inf
        params["theta"] = theta
    elif method == "jax_alpha_jit":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        gene_umi_jax = jax.device_put(gene_umi_jax)
        mu_jax = jax.device_put(mu)
        theta = float(
            fit_nbinom_bfgs_alpha_jit(y=gene_umi_jax, mu=mu_jax).block_until_ready()
        )
        if theta < 0:
            # replace with moment based estimator
            theta = mu ** 2 / (npy.var(gene_umi) - mu)
            if theta < 0:
                theta = npy.inf
        params["theta"] = theta
    elif method == "jax_theta_ml":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = jax_theta_ml(y=gene_umi, mu=mu)
        params["theta"] = theta
    elif method == "alpha_lbfgs":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = alpha_lbfgs(y=gene_umi, mu=mu)
        params["theta"] = theta
    elif method == "jax_alpha_lbfgs":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = jax_alpha_lbfgs(y=gene_umi, mu=mu)
        params["theta"] = theta
    elif method == "theta_lbfgs":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = theta_lbfgs(y=gene_umi, mu=mu)
        params["theta"] = theta
    elif method == "autograd":
        params = estimate_mu_poisson(gene_umi, model_matrix)
        coef = params["coef"]
        mu = params["mu"]
        params = dict(zip(model_matrix.design_info.column_names, coef))
        theta = fit_nbinom_lbfgs_autograd(y=gene_umi, mu=mu)
        params["theta"] = theta
    return params