Esempio n. 1
0
    def fit(self, framework='GLM', Quasi=False):
        """
        Method that fits a particular count model usign the appropriate
        estimation technique. Models include Poisson GLM, Negative Binomial GLM,
        Quasi-Poisson - at the moment Poisson GLM is the only option.

        TODO: add zero inflated variants and hurdle variants.

        Parameters
        ----------
        framework           : string
                            estimation framework; default is GLM
                             "GLM" | "QUASI" |
        """
        if (framework.lower() == 'glm'):
            if not Quasi:
                results = GLM(self.y,
                              self.X,
                              family=Poisson(),
                              constant=self.constant).fit()
            else:
                results = GLM(self.y,
                              self.X,
                              family=QuasiPoisson(),
                              constant=self.constant).fit()
            return CountModelResults(results)

        else:
            raise NotImplemented(
                'Poisson GLM is the only count model currently implemented')
Esempio n. 2
0
 def __init__(self,
              coords,
              y,
              X,
              bw,
              family=Gaussian(),
              offset=None,
              sigma2_v1=False,
              kernel='bisquare',
              fixed=False,
              constant=True):
     """
     Initialize class
     """
     GLM.__init__(self, y, X, family, constant=constant)
     self.constant = constant
     self.sigma2_v1 = sigma2_v1
     self.coords = coords
     self.bw = bw
     self.kernel = kernel
     self.fixed = fixed
     if offset is None:
         self.offset = np.ones((self.n, 1))
     else:
         self.offset = offset * 1.0
     self.fit_params = {}
     self.W = self._build_W(fixed, kernel, coords, bw)
     self.points = None
     self.exog_scale = None
     self.exog_resid = None
     self.P = None
Esempio n. 3
0
def summaryGLM(self):
    
    XNames = ["X"+str(i) for i in range(self.k)]
    glm_rslt = GLM(self.model.y,self.model.X,constant=False,family=self.family).fit()

    summary = "%s\n" %('Global Regression Results')
    summary += '-' * 75 + '\n'
    
    if isinstance(self.family, Gaussian):
        summary += "%-62s %12.3f\n" %  ('Residual sum of squares:', glm_rslt.deviance)
        summary += "%-62s %12.3f\n" %  ('Log-likelihood:', glm_rslt.llf)
        summary += "%-62s %12.3f\n" %  ('AIC:', glm_rslt.aic)
        summary += "%-62s %12.3f\n" %  ('AICc:', get_AICc(glm_rslt))
        summary += "%-62s %12.3f\n" %  ('BIC:', glm_rslt.bic)
        summary += "%-62s %12.3f\n" %  ('R2:', glm_rslt.D2)
        summary += "%-62s %12.3f\n\n" % ('Adj. R2:', glm_rslt.adj_D2)
    else:
        summary += "%-62s %12.3f\n" %  ('Deviance:', glm_rslt.deviance)
        summary += "%-62s %12.3f\n" %  ('Log-likelihood:', glm_rslt.llf)
        summary += "%-62s %12.3f\n" %  ('AIC:', glm_rslt.aic)
        summary += "%-62s %12.3f\n" %  ('AICc:', get_AICc(glm_rslt))
        summary += "%-62s %12.3f\n" %  ('BIC:', glm_rslt.bic)
        summary += "%-62s %12.3f\n" %  ('Percent deviance explained:', glm_rslt.D2)
        summary += "%-62s %12.3f\n\n" % ('Adj. percent deviance explained:', glm_rslt.adj_D2)
    
    summary += "%-31s %10s %10s %10s %10s\n" % ('Variable', 'Est.', 'SE' ,'t(Est/SE)', 'p-value')
    summary += "%-31s %10s %10s %10s %10s\n" % ('-'*31, '-'*10 ,'-'*10, '-'*10,'-'*10)
    for i in range(self.k):
        summary += "%-31s %10.3f %10.3f %10.3f %10.3f\n" % (XNames[i], glm_rslt.params[i], glm_rslt.bse[i], glm_rslt.tvalues[i], glm_rslt.pvalues[i])
    summary += "\n"
    return summary
Esempio n. 4
0
def alpha_disp(model, alt_var=lambda x: x):
    """
    Test the hypothesis that var[y] = mu (equidispersion) against the
    alternative hypothesis that var[y] = mu + alpha * alt_var(mu) where mu
    is the expected value of y, alpha is an estimated coefficient, and
    alt_var() specifies an alternative variance as a function of mu.
    alt_var=lambda x:x corresponds to an alternative hypothesis of a negative
    binomimal model with a linear variance function and alt_var=lambda
    x:x**2 correspinds to an alternative hypothesis of a negative binomial
    model with a quadratic varaince function. 

    alpha > 0: overdispersion
    alpha = 1: equidispersion
    alpha < 0: underdispersion

    Parameters
    ----------
    model       : Model results class
                  function can only be called on a sucessfully fitted model
                  which has a valid response variable, y, and a valid
                  predicted response variable, yhat.
    alt_var     : function
                  specifies an alternative varaince as a function of mu.
                  Function must take a single scalar as input and return a
                  single scalar as output
    Returns
    -------
    array       : [alpha coefficient, tvalue of alpha, pvalue of alpha]
        
    """
    try:
        y = model.y.reshape((-1, 1))
        yhat = model.yhat.reshape((-1, 1))
        ytest = (((y - yhat)**2 - y) / yhat).reshape((-1, 1))
    except:
        raise AttributeError(
            "Make sure model passed has been estimated and has a valid 'y' and 'yhat' attribute"
        )

    if isinstance(alt_var, FunctionType):
        X = (alt_var(yhat) / yhat).reshape((-1, 1))
        test_results = GLM(ytest, X, constant=False).fit()
        alpha = test_results.params[0]
        zval = test_results.tvalues[0]
        pval = stats.norm.sf(zval)
    else:
        raise TypeError(
            "The alternative variance function, 'alt_var', must be a valid function'"
        )

    return np.array([alpha, zval, pval])