Ejemplo n.º 1
0
class TimeSeriesModelResults(base.LikelihoodModelResults):
    def __init__(self, model, params, normalized_cov_params, scale=1.):
        self.data = model.data
        super(TimeSeriesModelResults,
                self).__init__(model, params, normalized_cov_params, scale)


class TimeSeriesResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_attrs,
                                    _attrs)
    _methods = {'predict' : 'dates'}
    _wrap_methods = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(TimeSeriesResultsWrapper,  # noqa:E305
                      TimeSeriesModelResults)


if __name__ == "__main__":
    import statsmodels.api as sm
    import pandas

    data = sm.datasets.macrodata.load(as_pandas=False)

    #make a DataFrame
    #TODO: attach a DataFrame to some of the datasets, for quicker use
    dates = [str(int(x[0])) +':'+ str(int(x[1])) \
             for x in data.data[['year','quarter']]]

    df = pandas.DataFrame(data.data[['realgdp','realinv','realcons']], index=dates)
    ex_mod = TimeSeriesModel(df)
Ejemplo n.º 2
0
        Notes
        -----
        Support for mutli-link and multi-exog models is still experimental
        in MLEInfluence. Interface and some definitions might still change.

        Note: Difference to R betareg: Betareg has the same general leverage
        as this model. However, they use a linear approximation hat matrix
        to scale and studentize influence and residual statistics.
        MLEInfluence uses the generalized leverage as hat_matrix_diag.
        Additionally, MLEInfluence uses pearson residuals for residual
        analusis.

        References
        ----------
        todo

        """
        from statsmodels.stats.outliers_influence import MLEInfluence
        return MLEInfluence(self)

    def bootstrap(self, *args, **kwargs):
        raise NotImplementedError


class BetaResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(BetaResultsWrapper, BetaResults)
Ejemplo n.º 3
0
    if f1 <= f + L1_wt * np.abs(x) + 1e-10:
        return x + h

    # Fallback for models where the loss is not quadratic
    from scipy.optimize import brent
    x_opt = brent(func, args=(model, ), brack=(x - 1, x + 1), tol=tol)
    return x_opt


class RegularizedResults(Results):
    def __init__(self, model, params):
        super(RegularizedResults, self).__init__(model, params)

    @cache_readonly
    def fittedvalues(self):
        return self.model.predict(self.params)


class RegularizedResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
        'resid': 'rows',
        'fittedvalues': 'rows',
    }
    _wrap_attrs = _attrs


wrap.populate_wrapper(
    RegularizedResultsWrapper,  # noqa:E305
    RegularizedResults)
Ejemplo n.º 4
0
    params : array
        Fitted parameters
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    cov_type : string
        The type of covariance matrix estimator to use. Can be one of 'approx',
        'opg', 'robust', or 'none'.

    Attributes
    ----------
    model : Model instance
        A reference to the model that was fit.
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    nobs : float
        The number of observations used to fit the model.
    params : array
        The parameters of the model.
    scale : float
        This is currently set to 1.0 and not used by the model or its results.

    """
    pass


class MarkovRegressionResultsWrapper(
        markov_switching.MarkovSwitchingResultsWrapper):
    pass
wrap.populate_wrapper(MarkovRegressionResultsWrapper,  # noqa:E305
                      MarkovRegressionResults)
Ejemplo n.º 5
0
            dates = self.data.dates._mpl_repr()
        else:
            dates = np.arange(self.nobs)
        d = max(self.nobs_diffuse, self.loglikelihood_burn)

        # Plot cusum series and reference line
        ax.plot(dates[d:], self.cusum_squares, label='CUSUM of squares')
        ref_line = (np.arange(d, self.nobs) - d) / (self.nobs - d)
        ax.plot(dates[d:], ref_line, 'k', alpha=0.3)

        # Plot significance bounds
        lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
        ax.plot([dates[d], dates[-1]], upper_line, 'k--',
                label='%d%% significance' % (alpha * 100))
        ax.plot([dates[d], dates[-1]], lower_line, 'k--')

        ax.legend(loc=legend_loc)

        return fig


class RecursiveLSResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper,  # noqa:E305
                      RecursiveLSResults)
Ejemplo n.º 6
0
            summary.tables.append(table)

            # Add a table for all other parameters
            masks = []
            for m in (endog_masks, [state_cov_mask]):
                m = np.array(m).flatten()
                if len(m) > 0:
                    masks.append(m)
            masks = np.concatenate(masks)
            inverse_mask = np.array(list(set(indices).difference(set(masks))))
            if len(inverse_mask) > 0:
                table = make_table(self,
                                   inverse_mask,
                                   "Other parameters",
                                   strip_end=False)
                summary.tables.append(table)

        return summary

    summary.__doc__ = MLEResults.summary.__doc__


class VARMAXResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(VARMAXResultsWrapper, VARMAXResults)
Ejemplo n.º 7
0
#add warnings/notes, added to text format only
        etext =[]
        wstr = \
'''If the model instance has been used for another fit with different fit
parameters, then the fit options might not be the correct ones anymore .'''
        etext.append(wstr)

        if etext:
            smry.add_extra_txt(etext)

        return smry

class RLMResultsWrapper(lm.RegressionResultsWrapper):
    pass
wrap.populate_wrapper(RLMResultsWrapper, RLMResults)

if __name__=="__main__":
#NOTE: This is to be removed
#Delivery Time Data is taken from Montgomery and Peck
    import statsmodels.api as sm

#delivery time(minutes)
    endog = np.array([16.68, 11.50, 12.03, 14.88, 13.75, 18.11, 8.00, 17.83,
    79.24, 21.50, 40.33, 21.00, 13.50, 19.75, 24.00, 29.00, 15.35, 19.00,
    9.50, 35.10, 17.90, 52.32, 18.75, 19.83, 10.75])

#number of cases, distance (Feet)
    exog = np.array([[7, 3, 3, 4, 6, 7, 2, 7, 30, 5, 16, 10, 4, 6, 9, 10, 6,
    7, 3, 17, 10, 26, 9, 8, 4], [560, 220, 340, 80, 150, 330, 110, 210, 1460,
    605, 688, 215, 255, 462, 448, 776, 200, 132, 36, 770, 140, 810, 450, 635,
Ejemplo n.º 8
0
            av2 = k1 * av - k2 * vn

            vm = np.eye(p) - 2 * sum(cv) / len(cv) + av2

        a, b = np.linalg.eigh(vm)
        jj = np.argsort(-a)
        a = a[jj]
        b = b[:, jj]
        params = np.linalg.solve(self._covxr.T, b)

        results = DimReductionResults(self, params, eigs=a)
        return DimReductionResultsWrapper(results)


class DimReductionResults(model.Results):

    def __init__(self, model, params, eigs):
        super(DimReductionResults, self).__init__(
              model, params)
        self.eigs = eigs


class DimReductionResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
    }
    _wrap_attrs = _attrs

wrap.populate_wrapper(DimReductionResultsWrapper,
                      DimReductionResults)
Ejemplo n.º 9
0
        a coefficient is constrained to be zero (in which case it is zero).
    polynomial_trend : array
        Array containing trend polynomial coefficients, ordered from lowest
        degree to highest. Initialized with ones, unless a coefficient is
        constrained to be zero (in which case it is zero).
    model_orders : list of int
        The orders of each of the polynomials in the model.
    param_terms : list of str
        List of parameters actually included in the model, in sorted order.

    See Also
    --------
    dismalpy.ssm.mlemodel.MLEResults
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """
    pass


class SARIMAXResultsWrapper(mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_methods,
                                     _methods)


wrap.populate_wrapper(SARIMAXResultsWrapper, SARIMAXResults)
Ejemplo n.º 10
0
    ----------
    model : UnobservedComponents instance
        The fitted model instance

    Attributes
    ----------
    specification : dictionary
        Dictionary including all attributes from the unobserved components
        model instance.

    See Also
    --------
    dismalpy.ssm.mlemodel.MLEResults
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """
    pass


class UnobservedComponentsResultsWrapper(
        mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(
        mlemodel.MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(
        mlemodel.MLEResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(UnobservedComponentsResultsWrapper,
                      UnobservedComponentsResults)
Ejemplo n.º 11
0
    model : DynamicFactor instance
        The fitted model instance

    Attributes
    ----------
    specification : dictionary
        Dictionary including all attributes from the DynamicFactor model
        instance.
    coefficient_matrices_var : array
        Array containing autoregressive lag polynomial coefficient matrices,
        ordered from lowest degree to highest.

    See Also
    --------
    dismalpy.ssm.mlemodel.MLEResults
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """
    pass


class DynamicFactorResultsWrapper(mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(
        mlemodel.MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(
        mlemodel.MLEResultsWrapper._wrap_methods, _methods)
wrap.populate_wrapper(DynamicFactorResultsWrapper, DynamicFactorResults)
Ejemplo n.º 12
0
		if not title is None:
			title = "Nonlinear Quantile Regression Results"
	
		from statsmodels.iolib.summary import Summary
		smry = Summary()
		smry.add_table_2cols(self, gleft=top_left, gright=top_right, yname=yname, xname=xname, title=title)
		smry.add_table_params(self, yname=yname, xname=xname, alpha=alpha, use_t=False)
		
		return smry


class NLRQResultsWrapper(lm.RegressionResultsWrapper):
	pass

wrap.populate_wrapper(NLRQResultsWrapper, NLRQResults)	

def Polynomial1(x, x0, par):
	K = x.shape[1]-1
	mu0 = par[0]
	mu1 = par[1:K+1].reshape(K, 1)
	return (mu0 + np.dot(x-x0, mu1)).reshape(x.shape[0])

def DPolynomial1(x, x0, par):
	return np.concatenate([np.ones((x.shape[0], 1)), x-x0], axis=1), True	

def Polynomial2(x, x0, par):
	K = int(1/2+np.sqrt(x.shape[1]-3/4))
	mu0 = par[0]
	mu1 = par[1:K+1].reshape(K, 1)
	mu2 = par[K+1:].reshape(K, K)
Ejemplo n.º 13
0
    Attributes
    ----------
    specification : dictionary
        Dictionary including all attributes from the DynamicFactor model
        instance.
    coefficient_matrices_var : array
        Array containing autoregressive lag polynomial coefficient matrices,
        ordered from lowest degree to highest.

    See Also
    --------
    dismalpy.ssm.mlemodel.MLEResults
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """
    pass


class DynamicFactorResultsWrapper(mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_methods,
                                     _methods)


wrap.populate_wrapper(DynamicFactorResultsWrapper, DynamicFactorResults)
Ejemplo n.º 14
0
        return x + h
    f1 = func(x + h, model) + L1_wt*np.abs(x + h)
    if f1 <= f + L1_wt*np.abs(x) + 1e-10:
        return x + h

    # Fallback for models where the loss is not quadratic
    from scipy.optimize import brent
    x_opt = brent(func, args=(model,), brack=(x-1, x+1), tol=tol)
    return x_opt


class RegularizedResults(Results):

    def __init__(self, model, params):
        super(RegularizedResults, self).__init__(model, params)

    @cache_readonly
    def fittedvalues(self):
        return self.model.predict(self.params)


class RegularizedResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
        'resid': 'rows',
        'fittedvalues': 'rows',
    }
    _wrap_attrs = _attrs
wrap.populate_wrapper(RegularizedResultsWrapper,  # noqa:E305
                      RegularizedResults)
Ejemplo n.º 15
0

class HoltWintersResultsWrapper(ResultsWrapper):
    _attrs = {
        'fittedvalues': 'rows',
        'level': 'rows',
        'resid': 'rows',
        'season': 'rows',
        'slope': 'rows'
    }
    _wrap_attrs = union_dicts(ResultsWrapper._wrap_attrs, _attrs)
    _methods = {'predict': 'dates', 'forecast': 'dates'}
    _wrap_methods = union_dicts(ResultsWrapper._wrap_methods, _methods)


populate_wrapper(HoltWintersResultsWrapper, HoltWintersResults)


class ExponentialSmoothing(TimeSeriesModel):
    """
    Holt Winter's Exponential Smoothing

    Parameters
    ----------
    endog : array_like
        Time series
    trend : {"add", "mul", "additive", "multiplicative", None}, optional
        Type of trend component.
    damped : bool, optional
        Should the trend component be damped.
    seasonal : {"add", "mul", "additive", "multiplicative", None}, optional
            if True display the fitted values
        coefficients : bool
            if True display the estimated coefficients
        """
        if which == "filtered":
            state = self.filtered_state
            fitted_values = self.filter_results.forecasts[0]
        else:
            state = self.smoothed_state
            fitted_values = self.smoother_results.smoothed_forecasts[0]
        endog = self.model.endog
        if fitted:
            pd.DataFrame({"endog": endog[:, 0], "fitted_values": fitted_values}
                         ).plot(figsize=figsize)
        if coefficients:
            design = block_diag(*tuple(self.model.exog_design(mod)
                                for mod in self.model.exog_models)
                                )@state
            pd.DataFrame(design.transpose()).plot(figsize=figsize)


class DynamicRegressionResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(DynamicRegressionResultsWrapper,  # noqa:E305
                      DynamicRegressionResults)
Ejemplo n.º 17
0
        smry = summary2.Summary()
        smry.add_base(results=self,
                      alpha=alpha,
                      float_format=float_format,
                      xname=xname,
                      yname=yname,
                      title=title)

        return smry


class RLMResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(RLMResultsWrapper, RLMResults)

if __name__ == "__main__":
    #NOTE: This is to be removed
    #Delivery Time Data is taken from Montgomery and Peck
    import statsmodels.api as sm

    #delivery time(minutes)
    endog = np.array([
        16.68, 11.50, 12.03, 14.88, 13.75, 18.11, 8.00, 17.83, 79.24, 21.50,
        40.33, 21.00, 13.50, 19.75, 24.00, 29.00, 15.35, 19.00, 9.50, 35.10,
        17.90, 52.32, 18.75, 19.83, 10.75
    ])

    #number of cases, distance (Feet)
    exog = np.array([[
Ejemplo n.º 18
0
        if hasattr(self.data, 'dates') and self.data.dates is not None:
            dates = self.data.dates._mpl_repr()
        else:
            dates = np.arange(self.nobs)
        llb = self.loglikelihood_burn

        # Plot cusum series and reference line
        ax.plot(dates[llb:], self.cusum_squares, label='CUSUM of squares')
        ref_line = (np.arange(llb, self.nobs) - llb) / (self.nobs - llb)
        ax.plot(dates[llb:], ref_line, 'k', alpha=0.3)

        # Plot significance bounds
        lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
        ax.plot([dates[llb], dates[-1]], upper_line, 'k--',
                label='%d%% significance' % (alpha * 100))
        ax.plot([dates[llb], dates[-1]], lower_line, 'k--')

        ax.legend(loc=legend_loc)

        return fig


class RecursiveLSResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper, RecursiveLSResults)
Ejemplo n.º 19
0
    def get_margeff(self, at='overall', method='dydx', atexog=None,
            dummy=False, count=False):
        """Get marginal effects of the fitted model.

        Not yet implemented for Zero Inflated Models
        """
        raise NotImplementedError("not yet implemented for zero inflation")


class L1ZeroInflatedPoissonResults(L1CountResults, ZeroInflatedPoissonResults):
    pass


class ZeroInflatedPoissonResultsWrapper(lm.RegressionResultsWrapper):
    pass
wrap.populate_wrapper(ZeroInflatedPoissonResultsWrapper,
                      ZeroInflatedPoissonResults)


class L1ZeroInflatedPoissonResultsWrapper(lm.RegressionResultsWrapper):
    pass
wrap.populate_wrapper(L1ZeroInflatedPoissonResultsWrapper,
                      L1ZeroInflatedPoissonResults)


class ZeroInflatedGeneralizedPoissonResults(CountResults):
    __doc__ = _discrete_results_docs % {
        "one_line_description" : "A results class for Zero Inflated Generalized Poisson",
                    "extra_attr" : ""}

    @cache_readonly
    def _dispersion_factor(self):
Ejemplo n.º 20
0
                             xname=xname,
                             title=title)
        smry.add_table_params(self,
                              yname=yname,
                              xname=xname,
                              alpha=alpha,
                              use_t=False)

        return smry


class RQResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(RQResultsWrapper, RQResults)


def main_2():
    data = sm.datasets.anes96.load()
    data.exog = sm.add_constant(data.exog, prepend=False)
    print(
        sm.OLS(data.endog, data.exog).fit().summary(xname=data.exog_name,
                                                    yname=data.endog_name))
    qrresults = quantreg(data.endog, data.exog,
                         tau=0.5).fit(excludeconstant=True)
    print(qrresults.summary(xname=data.exog_name, yname=data.endog_name))


def main():
    import pylab as plot
Ejemplo n.º 21
0
    in each basis direction.
    """
    def __init__(self, model, params, eigs):
        super(DimReductionResults, self).__init__(model, params)
        self.eigs = eigs


class DimReductionResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
    }
    _wrap_attrs = _attrs


wrap.populate_wrapper(
    DimReductionResultsWrapper,  # noqa:E305
    DimReductionResults)


def _grass_opt(params, fun, grad, maxiter, gtol):
    """
    Minimize a function on a Grassmann manifold.

    Parameters
    ----------
    params : array_like
        Starting value for the optimization.
    fun : function
        The function to be minimized.
    grad : function
        The gradient of fun.
Ejemplo n.º 22
0
        A reference to the model that was fit.
    filter_results : KalmanFilter instance
        The underlying state space model and Kalman filter output
    nobs : float
        The number of observations used to fit the model.
    params : array
        The parameters of the model.
    scale : float
        This is currently set to 1.0 and not used by the model or its results.

    See Also
    --------
    MLEModel
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """
    pass


class MLEResultsWrapper(mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_methods,
                                     _methods)


wrap.populate_wrapper(MLEResultsWrapper, MLEResults)
Ejemplo n.º 23
0
    #ret_doc = """
    #    fcasterr : array-like
    #    confint : array-like
    #"""
    predict.__doc__ = '\n'.join(preddoc[:5] + preddoc[7:20] + extra_doc +
                                preddoc[20:])


class ARResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(ARResultsWrapper, ARResults)


if __name__ == "__main__":
    import statsmodels.api as sm
    sunspots = sm.datasets.sunspots.load()
# Why does R demean the data by defaut?
    ar_ols = AR(sunspots.endog)
    res_ols = ar_ols.fit(maxlag=9)
    ar_mle = AR(sunspots.endog)
    res_mle_bfgs = ar_mle.fit(maxlag=9, method="mle", solver="bfgs",
                              maxiter=500, gtol=1e-10)
#    res_mle2 = ar_mle.fit(maxlag=1, method="mle", maxiter=500, penalty=True,
#            tol=1e-13)

#    ar_yw = AR(sunspots.endog)
Ejemplo n.º 24
0
        return smry


class HoltWintersResultsWrapper(ResultsWrapper):
    _attrs = {'fittedvalues': 'rows',
              'level': 'rows',
              'resid': 'rows',
              'season': 'rows',
              'slope': 'rows'}
    _wrap_attrs = union_dicts(ResultsWrapper._wrap_attrs, _attrs)
    _methods = {'predict': 'dates',
                'forecast': 'dates'}
    _wrap_methods = union_dicts(ResultsWrapper._wrap_methods, _methods)


populate_wrapper(HoltWintersResultsWrapper, HoltWintersResults)


class ExponentialSmoothing(TimeSeriesModel):
    """
    Holt Winter's Exponential Smoothing

    Parameters
    ----------
    endog : array-like
        Time series
    trend : {"add", "mul", "additive", "multiplicative", None}, optional
        Type of trend component.
    damped : bool, optional
        Should the trend component be damped.
    seasonal : {"add", "mul", "additive", "multiplicative", None}, optional
Ejemplo n.º 25
0
        self.data.predict_dates = dates

class TimeSeriesModelResults(base.LikelihoodModelResults):
    def __init__(self, model, params, normalized_cov_params, scale=1.):
        self.data = model.data
        super(TimeSeriesModelResults,
                self).__init__(model, params, normalized_cov_params, scale)

class TimeSeriesResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_attrs,
                                    _attrs)
    _methods = {'predict' : 'dates'}
    _wrap_methods = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(TimeSeriesResultsWrapper,
                      TimeSeriesModelResults)

if __name__ == "__main__":
    import statsmodels.api as sm
    import datetime
    import pandas

    data = sm.datasets.macrodata.load()

    #make a DataFrame
    #TODO: attach a DataFrame to some of the datasets, for quicker use
    dates = [str(int(x[0])) +':'+ str(int(x[1])) \
             for x in data.data[['year','quarter']]]

    df = pandas.DataFrame(data.data[['realgdp','realinv','realcons']], index=dates)
    ex_mod = TimeSeriesModel(df)
Ejemplo n.º 26
0
        if self.model.initialization_method != 'estimated':
            params = np.array(self.initial_state)
            if params.ndim > 1:
                params = params[0]
            names = self.model.state_names[1:]
            param_header = ['initialization method: %s'
                            % self.model.initialization_method]
            params_stubs = names
            params_data = [[forg(params[i], prec=4)]
                           for i in range(len(params))]

            initial_state_table = SimpleTable(params_data,
                                              param_header,
                                              params_stubs,
                                              txt_fmt=fmt_params)
            summary.tables.insert(-1, initial_state_table)

        return summary


class ExponentialSmoothingResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(ExponentialSmoothingResultsWrapper,  # noqa:E305
                      ExponentialSmoothingResults)
Ejemplo n.º 27
0
                np.arange(len(self.params))[self.model._params_state_cov])
            table = make_table(self, state_cov_mask, "Error covariance matrix",
                               strip_end=False)
            summary.tables.append(table)

            # Add a table for all other parameters
            masks = []
            for m in (endog_masks, [state_cov_mask]):
                m = np.array(m).flatten()
                if len(m) > 0:
                    masks.append(m)
            masks = np.concatenate(masks)
            inverse_mask = np.array(list(set(indices).difference(set(masks))))
            if len(inverse_mask) > 0:
                table = make_table(self, inverse_mask, "Other parameters",
                                   strip_end=False)
                summary.tables.append(table)

        return summary
    summary.__doc__ = MLEResults.summary.__doc__


class VARMAXResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(VARMAXResultsWrapper, VARMAXResults)  # noqa:E305
Ejemplo n.º 28
0
            dates = self.data.dates._mpl_repr()
        else:
            dates = np.arange(self.nobs)
        d = max(self.nobs_diffuse, self.loglikelihood_burn)

        # Plot cusum series and reference line
        ax.plot(dates[d:], self.cusum_squares, label='CUSUM of squares')
        ref_line = (np.arange(d, self.nobs) - d) / (self.nobs - d)
        ax.plot(dates[d:], ref_line, 'k', alpha=0.3)

        # Plot significance bounds
        lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
        ax.plot([dates[d], dates[-1]], upper_line, 'k--',
                label='%d%% significance' % (alpha * 100))
        ax.plot([dates[d], dates[-1]], lower_line, 'k--')

        ax.legend(loc=legend_loc)

        return fig


class RecursiveLSResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper,  # noqa:E305
                      RecursiveLSResults)
Ejemplo n.º 29
0
        smry = summary2.Summary()
        smry.add_base(results=self,
                      alpha=alpha,
                      float_format=float_format,
                      xname=xname,
                      yname=yname,
                      title=title)

        return smry


class RLMResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(RLMResultsWrapper, RLMResults)  # noqa:E305

if __name__ == "__main__":
    #NOTE: This is to be removed
    #Delivery Time Data is taken from Montgomery and Peck
    import statsmodels.api as sm

    #delivery time(minutes)
    endog = np.array([
        16.68, 11.50, 12.03, 14.88, 13.75, 18.11, 8.00, 17.83, 79.24, 21.50,
        40.33, 21.00, 13.50, 19.75, 24.00, 29.00, 15.35, 19.00, 9.50, 35.10,
        17.90, 52.32, 18.75, 19.83, 10.75
    ])

    #number of cases, distance (Feet)
    exog = np.array([[
Ejemplo n.º 30
0
                                     15))[:21].rjust(8)
            t[6][2].data = 'Dev. Multi | Ordinal'[:21].rjust(21)
            t[6][3].data = (
                str(round(self.evidence_against_multinomial_for_ordinal, 6)) +
                ' | ' + str(round(self.evidence_against, 6)))[:21].rjust(8)
        except:
            t[4][2].data = 'Log-Likelihood Multinomial'[:21].rjust(21)
            t[4][3].data = ''[:21].rjust(8)
            t[5][2].data = 'Dev. Null vs Multi'[:21].rjust(21)
            t[5][3].data = ''[:21].rjust(8)
            t[6][2].data = 'Dev. Multi | Ordinal'[:21].rjust(21)
            t[6][3].data = ''[:21].rjust(8)

            warn = 'Warning: Run check_parallel_lines_assumption() first for deviance against multinomial'

        t[7][2].data = 'Dev. Null vs Ordinal'[:21].rjust(21)
        t[7][3].data = str(round(self.llr_pvalue, 15))[:21].rjust(8)
        s.extra_txt = 'Dev. Null vs Ordinal: Evidence against intercept only model in favour of proportional odds model'
        s.extra_txt = s.extra_txt + '\n' + 'Dev. Multinomial | Ordinal: Evidence against multinomial in favour of proportional odds model | Evidence against proportional odds model in favour of multinomial'
        s.extra_txt = s.extra_txt + '\n' + 'Dev. Null vs Multinomial: Evidence against intercept only model in favor of Multinomial'
        s.extra_txt = s.extra_txt + '\n' + warn

        return s


class OrderedResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(OrderedResultsWrapper, OrderedResults)
Ejemplo n.º 31
0
    f1 = func(x + h, model) + L1_wt * np.abs(x + h)
    if f1 <= f + L1_wt * np.abs(x) + 1e-10:
        return x + h

    # Fallback for models where the loss is not quadratic
    from scipy.optimize import brent
    x_opt = brent(func, args=(model, ), brack=(x - 1, x + 1), tol=tol)
    return x_opt


class RegularizedResults(Results):
    def __init__(self, model, params):
        super(RegularizedResults, self).__init__(model, params)

    @cache_readonly
    def fittedvalues(self):
        return self.model.predict(self.params)


class RegularizedResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
        'resid': 'rows',
        'fittedvalues': 'rows',
    }

    _wrap_attrs = _attrs


wrap.populate_wrapper(RegularizedResultsWrapper, RegularizedResults)
Ejemplo n.º 32
0
        else:
            dates = np.arange(self.nobs)
        llb = self.loglikelihood_burn

        # Plot cusum series and reference line
        ax.plot(dates[llb:], self.cusum_squares, label='CUSUM of squares')
        ref_line = (np.arange(llb, self.nobs) - llb) / (self.nobs - llb)
        ax.plot(dates[llb:], ref_line, 'k', alpha=0.3)

        # Plot significance bounds
        lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
        ax.plot([dates[llb], dates[-1]],
                upper_line,
                'k--',
                label='%d%% significance' % (alpha * 100))
        ax.plot([dates[llb], dates[-1]], lower_line, 'k--')

        ax.legend(loc=legend_loc)

        return fig


class RecursiveLSResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(RecursiveLSResultsWrapper, RecursiveLSResults)
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    cov_type : string
        The type of covariance matrix estimator to use. Can be one of 'approx',
        'opg', 'robust', or 'none'.

    Attributes
    ----------
    model : Model instance
        A reference to the model that was fit.
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    nobs : float
        The number of observations used to fit the model.
    params : array
        The parameters of the model.
    scale : float
        This is currently set to 1.0 and not used by the model or its results.

    """
    pass


class MarkovAutoregressionResultsWrapper(
        markov_regression.MarkovRegressionResultsWrapper):
    pass


wrap.populate_wrapper(MarkovAutoregressionResultsWrapper,
                      MarkovAutoregressionResults)
Ejemplo n.º 34
0
        # argument.
        if xname is None:
            xname = self.model.exog_names

        if yname is None:
            yname = self.model.endog_names

        # Create summary table instance
        from statsmodels.iolib.summary import Summary
        smry = Summary()
        smry.add_table_2cols(self,
                             gleft=top_left,
                             gright=top_right,
                             yname=yname,
                             xname=xname,
                             title=title)
        smry.add_table_params(self,
                              yname=yname,
                              xname=xname,
                              alpha=alpha,
                              use_t=False)

        return smry


class QIFResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(QIFResultsWrapper, QIFResults)
Ejemplo n.º 35
0
                np.arange(len(self.params))[self.model._params_state_cov])
            table = make_table(self, state_cov_mask, "Error covariance matrix",
                               strip_end=False)
            summary.tables.append(table)

            # Add a table for all other parameters
            masks = []
            for m in (endog_masks, [state_cov_mask]):
                m = np.array(m).flatten()
                if len(m) > 0:
                    masks.append(m)
            masks = np.concatenate(masks)
            inverse_mask = np.array(list(set(indices).difference(set(masks))))
            if len(inverse_mask) > 0:
                table = make_table(self, inverse_mask, "Other parameters",
                                   strip_end=False)
                summary.tables.append(table)

        return summary
    summary.__doc__ = MLEResults.summary.__doc__


class VARMAXResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(VARMAXResultsWrapper, VARMAXResults)
Ejemplo n.º 36
0
        grad = np.zeros((q, c))
        for ii in self._grp_ix:
            x = lpr[ii, :]
            jj = np.arange(x.shape[0], dtype=np.int)
            y = self.endog[ii]
            denom = 0.0
            denomg = np.zeros((q, c))
            for p in itertools.permutations(y):
                v = np.exp(x[(jj, p)].sum())
                denom += v
                for i, r in enumerate(p):
                    if r != 0:
                        denomg[:, r - 1] += v * self.exog[ii[i], :]

            for i, r in enumerate(y):
                if r != 0:
                    grad[:, r - 1] += self.exog[ii[i], :]

            grad -= denomg / denom

        return grad.flatten()



class ConditionalResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(ConditionalResultsWrapper, ConditionalResults)
Ejemplo n.º 37
0
            summary.tables.append(table)

            # Add a table for all other parameters
            masks = []
            for m in (endog_masks, [state_cov_mask]):
                m = np.array(m).flatten()
                if len(m) > 0:
                    masks.append(m)
            masks = np.concatenate(masks)
            inverse_mask = np.array(list(set(indices).difference(set(masks))))
            if len(inverse_mask) > 0:
                table = make_table(self,
                                   inverse_mask,
                                   "Other parameters",
                                   strip_end=False)
                summary.tables.append(table)

        return summary

    summary.__doc__ = MLEResults.summary.__doc__


class VARMAXResultsWrapper(MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(VARMAXResultsWrapper, VARMAXResults)  # noqa:E305
Ejemplo n.º 38
0
    #ret_doc = """
    #    fcasterr : array-like
    #    confint : array-like
    #"""
    predict.__doc__ = '\n'.join(preddoc[:5] + preddoc[7:20] + extra_doc +
                                preddoc[20:])


class ARResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(ARResultsWrapper, ARResults)  # noqa:E305


if __name__ == "__main__":
    import statsmodels.api as sm
    sunspots = sm.datasets.sunspots.load(as_pandas=False)
# Why does R demean the data by defaut?
    ar_ols = AR(sunspots.endog)
    res_ols = ar_ols.fit(maxlag=9)
    ar_mle = AR(sunspots.endog)
    res_mle_bfgs = ar_mle.fit(maxlag=9, method="mle", solver="bfgs",
                              maxiter=500, gtol=1e-10)
#    res_mle2 = ar_mle.fit(maxlag=1, method="mle", maxiter=500, penalty=True,
#            tol=1e-13)

#    ar_yw = AR(sunspots.endog)
Ejemplo n.º 39
0
        # Add warnings/notes, added to text format only
        etext = []
        if hasattr(self, 'cov_type'):
            etext.append(self.cov_kwds['description'])

        if etext:
            etext = ["[{0}] {1}".format(i + 1, text)
                     for i, text in enumerate(etext)]
            etext.insert(0, "Warnings:")
            summary.add_extra_txt(etext)

        return summary


class MLEResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_attrs,
                                   _attrs)
    # TODO right now, predict with full_results=True can return something other
    # than a time series, so the `attach_dates` call will fail if we have
    # 'predict': 'dates' here. In the future, remove `full_results` and replace
    # it with new methods, e.g. get_prediction, get_forecast, and likely will
    # want those to be a subclass of FilterResults with e.g. confidence
    # intervals calculated and dates attached.
    # Also, need to modify `attach_dates` to account for DataFrames.
    _methods = {'predict': None}
    _wrap_methods = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(MLEResultsWrapper, MLEResults)
Ejemplo n.º 40
0
        ordered from lowest degree to highest. Initialized with ones, unless
        a coefficient is constrained to be zero (in which case it is zero).
    polynomial_trend : array
        Array containing trend polynomial coefficients, ordered from lowest
        degree to highest. Initialized with ones, unless a coefficient is
        constrained to be zero (in which case it is zero).
    model_orders : list of int
        The orders of each of the polynomials in the model.
    param_terms : list of str
        List of parameters actually included in the model, in sorted order.

    See Also
    --------
    dismalpy.ssm.mlemodel.MLEResults
    dismalpy.ssm.kalman_smoother.SmootherResults
    dismalpy.ssm.kalman_filter.FilterResults
    dismalpy.ssm.representation.FrozenRepresentation
    """

    pass


class SARIMAXResultsWrapper(mlemodel.MLEResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_attrs, _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(mlemodel.MLEResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(SARIMAXResultsWrapper, SARIMAXResults)
Ejemplo n.º 41
0
        if exog is not None:
            orig_exog = self.model.data.orig_exog
            exog_names = self.model.exog_names
            self.model.data.orig_exog = self.model._input_exog
            self.model.exog_names = self.model._input_exog_names

        # Perform the appending procedure
        out = super().append(endog,
                             exog=exog,
                             refit=refit,
                             fit_kwargs=fit_kwargs,
                             **kwargs)

        # Now we reverse the temporary change made above
        if exog is not None:
            self.model.data.orig_exog = orig_exog
            self.model.exog_names = exog_names
        return out


class ARIMAResultsWrapper(sarimax.SARIMAXResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(sarimax.SARIMAXResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(
        sarimax.SARIMAXResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(ARIMAResultsWrapper, ARIMAResults)  # noqa:E305
Ejemplo n.º 42
0
    if f1 <= f + L1_wt*np.abs(x) + 1e-10:
        return x + h

    # Fallback for models where the loss is not quadratic
    from scipy.optimize import brent
    x_opt = brent(func, args=(model,), brack=(x-1, x+1), tol=tol)
    return x_opt


class RegularizedResults(Results):

    def __init__(self, model, params):
        super(RegularizedResults, self).__init__(model, params)

    @cache_readonly
    def fittedvalues(self):
        return self.model.predict(self.params)


class RegularizedResultsWrapper(wrap.ResultsWrapper):
    _attrs = {
        'params': 'columns',
        'resid': 'rows',
        'fittedvalues': 'rows',
    }

    _wrap_attrs = _attrs

wrap.populate_wrapper(RegularizedResultsWrapper,
                      RegularizedResults)
Ejemplo n.º 43
0
        self.data = model.data
        super(TimeSeriesModelResults,
              self).__init__(model, params, normalized_cov_params, scale)


class TimeSeriesResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {'predict': 'dates'}
    _wrap_methods = wrap.union_dicts(
        base.LikelihoodResultsWrapper._wrap_methods, _methods)


wrap.populate_wrapper(
    TimeSeriesResultsWrapper,  # noqa:E305
    TimeSeriesModelResults)

if __name__ == "__main__":
    import statsmodels.api as sm
    import pandas

    mdata = sm.datasets.macrodata.load(as_pandas=False)

    #make a DataFrame
    #TODO: attach a DataFrame to some of the datasets, for quicker use
    dates = [str(int(x[0])) +':'+ str(int(x[1])) \
             for x in mdata.data[['year','quarter']]]

    df = pandas.DataFrame(mdata.data[['realgdp', 'realinv', 'realcons']],
                          index=dates)
Ejemplo n.º 44
0
        See Also
        --------
        statsmodels.iolib.summary2.Summary : class to hold summary results
        """
        from statsmodels.iolib import summary2
        smry = summary2.Summary()
        smry.add_base(results=self, alpha=alpha, float_format=float_format,
                      xname=xname, yname=yname, title=title)

        return smry


class RLMResultsWrapper(lm.RegressionResultsWrapper):
    pass
wrap.populate_wrapper(RLMResultsWrapper, RLMResults)  # noqa:E305


if __name__=="__main__":
#NOTE: This is to be removed
#Delivery Time Data is taken from Montgomery and Peck
    import statsmodels.api as sm

#delivery time(minutes)
    endog = np.array([16.68, 11.50, 12.03, 14.88, 13.75, 18.11, 8.00, 17.83,
    79.24, 21.50, 40.33, 21.00, 13.50, 19.75, 24.00, 29.00, 15.35, 19.00,
    9.50, 35.10, 17.90, 52.32, 18.75, 19.83, 10.75])

#number of cases, distance (Feet)
    exog = np.array([[7, 3, 3, 4, 6, 7, 2, 7, 30, 5, 16, 10, 4, 6, 9, 10, 6,
    7, 3, 17, 10, 26, 9, 8, 4], [560, 220, 340, 80, 150, 330, 110, 210, 1460,
Ejemplo n.º 45
0
        self.data.predict_dates = dates

class TimeSeriesModelResults(base.LikelihoodModelResults):
    def __init__(self, model, params, normalized_cov_params, scale=1.):
        self.data = model.data
        super(TimeSeriesModelResults,
                self).__init__(model, params, normalized_cov_params, scale)

class TimeSeriesResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_attrs,
                                    _attrs)
    _methods = {'predict' : 'dates'}
    _wrap_methods = wrap.union_dicts(base.LikelihoodResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(TimeSeriesResultsWrapper,
                      TimeSeriesModelResults)

if __name__ == "__main__":
    import statsmodels.api as sm
    import datetime
    import pandas

    data = sm.datasets.macrodata.load()

    #make a DataFrame
    #TODO: attach a DataFrame to some of the datasets, for quicker use
    dates = [str(int(x[0])) +':'+ str(int(x[1])) \
             for x in data.data[['year','quarter']]]

    df = pandas.DataFrame(data.data[['realgdp','realinv','realcons']], index=dates)
    ex_mod = TimeSeriesModel(df)
                xname=xname, yname=yname, title=title)

        return smry


class GLMResultsWrapper(lm.RegressionResultsWrapper):
    _attrs = {
        'resid_anscombe' : 'rows',
        'resid_deviance' : 'rows',
        'resid_pearson' : 'rows',
        'resid_response' : 'rows',
        'resid_working' : 'rows'
    }
    _wrap_attrs = wrap.union_dicts(lm.RegressionResultsWrapper._wrap_attrs,
                                   _attrs)
wrap.populate_wrapper(GLMResultsWrapper, GLMResults)

if __name__ == "__main__":
    import statsmodels.api as sm
    data = sm.datasets.longley.load()
    #data.exog = add_constant(data.exog)
    GLMmod = GLM(data.endog, data.exog).fit()
    GLMT = GLMmod.summary(returns='tables')
##    GLMT[0].extend_right(GLMT[1])
##    print(GLMT[0])
##    print(GLMT[2])
    GLMTp = GLMmod.summary(title='Test GLM')


    """
From Stata
Ejemplo n.º 47
0
    params : array
        Fitted parameters
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    cov_type : string
        The type of covariance matrix estimator to use. Can be one of 'approx',
        'opg', 'robust', or 'none'.

    Attributes
    ----------
    model : Model instance
        A reference to the model that was fit.
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    nobs : float
        The number of observations used to fit the model.
    params : array
        The parameters of the model.
    scale : float
        This is currently set to 1.0 and not used by the model or its results.

    """
    pass


class MarkovAutoregressionResultsWrapper(
        markov_regression.MarkovRegressionResultsWrapper):
    pass
wrap.populate_wrapper(MarkovAutoregressionResultsWrapper,  # noqa:E305
                      MarkovAutoregressionResults)
        grad = np.zeros((q, c))
        for ii in self._grp_ix:
            x = lpr[ii, :]
            jj = np.arange(x.shape[0], dtype=int)
            y = self.endog[ii]
            denom = 0.0
            denomg = np.zeros((q, c))
            for p in itertools.permutations(y):
                v = np.exp(x[(jj, p)].sum())
                denom += v
                for i, r in enumerate(p):
                    if r != 0:
                        denomg[:, r - 1] += v * self.exog[ii[i], :]

            for i, r in enumerate(y):
                if r != 0:
                    grad[:, r - 1] += self.exog[ii[i], :]

            grad -= denomg / denom

        return grad.flatten()



class ConditionalResultsWrapper(lm.RegressionResultsWrapper):
    pass


wrap.populate_wrapper(ConditionalResultsWrapper, ConditionalResults)
Ejemplo n.º 49
0
    @cache_readonly
    def gcv(self):
        return self.scale / (1. - self.hat_matrix_trace / self.nobs)**2

    @cache_readonly
    def cv(self):
        cv_ = ((self.resid_pearson / (1. - self.hat_matrix_diag))**2).sum()
        cv_ /= self.nobs
        return cv_


class GLMGamResultsWrapper(GLMResultsWrapper):
    pass


wrap.populate_wrapper(GLMGamResultsWrapper, GLMGamResults)


class GLMGam(PenalizedMixin, GLM):
    """Model class for generalized additive models, GAM.

    This inherits from `GLM`.

    Warning: Not all inherited methods might take correctly account of the
    penalization. Not all options including offset and exposure have been
    verified yet.

    Parameters
    ----------
    endog : array_like
    exog : array_like or None
Ejemplo n.º 50
0
    params : array
        Fitted parameters
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    cov_type : string
        The type of covariance matrix estimator to use. Can be one of 'approx',
        'opg', 'robust', or 'none'.

    Attributes
    ----------
    model : Model instance
        A reference to the model that was fit.
    filter_results : HamiltonFilterResults or KimSmootherResults instance
        The underlying filter and, optionally, smoother output
    nobs : float
        The number of observations used to fit the model.
    params : array
        The parameters of the model.
    scale : float
        This is currently set to 1.0 and not used by the model or its results.

    """
    pass


class MarkovAutoregressionResultsWrapper(
        markov_regression.MarkovRegressionResultsWrapper):
    pass
wrap.populate_wrapper(MarkovAutoregressionResultsWrapper,
                      MarkovAutoregressionResults)
Ejemplo n.º 51
0
    #ret_doc = """
    #    fcasterr : array-like
    #    confint : array-like
    #"""
    predict.__doc__ = '\n'.join(preddoc[:5] + preddoc[7:20] + extra_doc +
                                preddoc[20:])


class ARResultsWrapper(wrap.ResultsWrapper):
    _attrs = {}
    _wrap_attrs = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_attrs,
                                   _attrs)
    _methods = {}
    _wrap_methods = wrap.union_dicts(tsbase.TimeSeriesResultsWrapper._wrap_methods,
                                     _methods)
wrap.populate_wrapper(ARResultsWrapper, ARResults)


if __name__ == "__main__":
    import statsmodels.api as sm
    sunspots = sm.datasets.sunspots.load()
# Why does R demean the data by defaut?
    ar_ols = AR(sunspots.endog)
    res_ols = ar_ols.fit(maxlag=9)
    ar_mle = AR(sunspots.endog)
    res_mle_bfgs = ar_mle.fit(maxlag=9, method="mle", solver="bfgs",
                              maxiter=500, gtol=1e-10)
#    res_mle2 = ar_mle.fit(maxlag=1, method="mle", maxiter=500, penalty=True,
#            tol=1e-13)

#    ar_yw = AR(sunspots.endog)
Ejemplo n.º 52
0
			('Df Model:', None),
			('Outer Iterations:', ["%d" % self.fit_history['outer_iterations']]), 
			('Inner Iterations:', ["%d" % self.fit_history['avg_inner_iterations']]) ]

		if not title is None:
			title = "Quantile Regression Results"
	
		from statsmodels.iolib.summary import Summary
		smry = Summary()
		smry.add_table_2cols(self, gleft=top_left, gright=top_right, yname=yname, xname=xname, title=title)
		smry.add_table_params(self, yname=yname, xname=xname, alpha=alpha, use_t=False)
		
		return smry


class RQResultsWrapper(lm.RegressionResultsWrapper):
	pass

wrap.populate_wrapper(RQResultsWrapper, RQResults)	

def main():
	data = sm.datasets.longley.load()
	data.exog = sm.add_constant(data.exog, prepend=False)
	xname = ["x1", "x2", "x3", "x4", "x5", "x6", "x7"]
	yname = ["y"]
	qrresults = RQ(data.endog, data.exog, tau=0.9).fit() 
	print(qrresults.summary(xname=xname, yname=yname))

if __name__ == '__main__':
	main()