Exemple #1
0
    def _fit_start_params_hr(self, order):
        """
        Get starting parameters for fit.

        Parameters
        ----------
        order : iterable
            (p,q,k) - AR lags, MA lags, and number of exogenous variables
            including the constant.

        Returns
        -------
        start_params : array
            A first guess at the starting parameters.

        Notes
        -----
        If necessary, fits an AR process with the laglength selected according
        to best BIC.  Obtain the residuals.  Then fit an ARMA(p,q) model via
        OLS using these residuals for a first approximation.  Uses a separate
        OLS regression to find the coefficients of exogenous variables.

        References
        ----------
        Hannan, E.J. and Rissanen, J.  1982.  "Recursive estimation of mixed
            autoregressive-moving average order."  `Biometrika`.  69.1.
        """
        p,q,k = order
        start_params = zeros((p+q+k))
        endog = self.endog.copy() # copy because overwritten
        exog = self.exog
        if k != 0:
            ols_params = GLS(endog, exog).fit().params
            start_params[:k] = ols_params
            endog -= np.dot(exog, ols_params).squeeze()
        if q != 0:
            if p != 0:
                armod = AR(endog).fit(ic='bic', trend='nc')
                arcoefs_tmp = armod.params
                p_tmp = armod.k_ar
                resid = endog[p_tmp:] - np.dot(lagmat(endog, p_tmp,
                                trim='both'), arcoefs_tmp)
                if p < p_tmp + q:
                    endog_start = p_tmp + q - p
                    resid_start = 0
                else:
                    endog_start = 0
                    resid_start = p - p_tmp - q
                lag_endog = lagmat(endog, p, 'both')[endog_start:]
                lag_resid = lagmat(resid, q, 'both')[resid_start:]
                # stack ar lags and resids
                X = np.column_stack((lag_endog, lag_resid))
                coefs = GLS(endog[max(p_tmp+q,p):], X).fit().params
                start_params[k:k+p+q] = coefs
            else:
                start_params[k+p:k+p+q] = yule_walker(endog, order=q)[0]
        if q==0 and p != 0:
            arcoefs = yule_walker(endog, order=p)[0]
            start_params[k:k+p] = arcoefs
        return start_params
Exemple #2
0
    def _fit_start_params_hr(self, order):
        """
        Get starting parameters for fit.

        Parameters
        ----------
        order : iterable
            (p,q,k) - AR lags, MA lags, and number of exogenous variables
            including the constant.

        Returns
        -------
        start_params : array
            A first guess at the starting parameters.

        Notes
        -----
        If necessary, fits an AR process with the laglength selected according
        to best BIC.  Obtain the residuals.  Then fit an ARMA(p,q) model via
        OLS using these residuals for a first approximation.  Uses a separate
        OLS regression to find the coefficients of exogenous variables.

        References
        ----------
        Hannan, E.J. and Rissanen, J.  1982.  "Recursive estimation of mixed
            autoregressive-moving average order."  `Biometrika`.  69.1.
        """
        p, q, k = order
        start_params = zeros((p + q + k))
        endog = self.endog.copy()  # copy because overwritten
        exog = self.exog
        if k != 0:
            ols_params = GLS(endog, exog).fit().params
            start_params[:k] = ols_params
            endog -= np.dot(exog, ols_params).squeeze()
        if q != 0:
            if p != 0:
                armod = AR(endog).fit(ic='bic', trend='nc')
                arcoefs_tmp = armod.params
                p_tmp = armod.k_ar
                resid = endog[p_tmp:] - np.dot(
                    lagmat(endog, p_tmp, trim='both'), arcoefs_tmp)
                X = np.column_stack(
                    (lagmat(endog, p, 'both')[p_tmp + (q - p):],
                     lagmat(resid, q, 'both')))  # stack ar lags and resids
                coefs = GLS(endog[p_tmp + q:], X).fit().params
                start_params[k:k + p + q] = coefs
            else:
                start_params[k + p:k + p + q] = yule_walker(endog, order=q)[0]
        if q == 0 and p != 0:
            arcoefs = yule_walker(endog, order=p)[0]
            start_params[k:k + p] = arcoefs
        return start_params
 def setupClass(cls):
     from scikits.statsmodels.datasets.sunspots import load
     data = load()
     cls.rho, cls.sigma = yule_walker(data.endog, order=4,
             method="mle")
     cls.R_params = [1.2831003105694765, -0.45240924374091945,
             -0.20770298557575195, 0.047943648089542337]
Exemple #4
0
def pacf_yw(x, nlags=40, method='unbiased'):
    '''Partial autocorrelation estimated with non-recursive yule_walker

    Parameters
    ----------
    x : 1d array
        observations of time series for which pacf is calculated
    maxlag : int
        largest lag for which pacf is returned
    method : 'unbiased' (default) or 'mle'
        method for the autocovariance calculations in yule walker

    Returns
    -------
    pacf : 1d array
        partial autocorrelations, maxlag+1 elements

    Notes
    -----
    This solves yule_walker for each desired lag and contains
    currently duplicate calculations.
    '''
    xm = x - x.mean()
    pacf = [1.]
    for k in range(1, nlags + 1):
        pacf.append(yule_walker(x, k, method=method)[0][-1])
    return np.array(pacf)
def pacf_yw(x, nlags=40, method='unbiased'):
    '''Partial autocorrelation estimated with non-recursive yule_walker

    Parameters
    ----------
    x : 1d array
        observations of time series for which pacf is calculated
    maxlag : int
        largest lag for which pacf is returned
    method : 'unbiased' (default) or 'mle'
        method for the autocovariance calculations in yule walker

    Returns
    -------
    pacf : 1d array
        partial autocorrelations, maxlag+1 elements

    Notes
    -----
    This solves yule_walker for each desired lag and contains
    currently duplicate calculations.
    '''
    xm = x - x.mean()
    pacf = [1.]
    for k in range(1, nlags+1):
        pacf.append(yule_walker(x, k, method=method)[0][-1])
    return np.array(pacf)
Exemple #6
0
 def setupClass(cls):
     from scikits.statsmodels.datasets.sunspots import load
     data = load()
     cls.rho, cls.sigma = yule_walker(data.endog, order=4,
             method="mle")
     cls.R_params = [1.2831003105694765, -0.45240924374091945,
             -0.20770298557575195, 0.047943648089542337]
examples_all = range(10) + ['test_copy']

examples = examples_all #[5]

if 0 in examples:
    print '\n Example 0'
    X = np.arange(1,8)
    X = sm.add_constant(X)
    Y = np.array((1, 3, 4, 5, 8, 10, 9))
    rho = 2
    model = GLSAR(Y, X, 2)
    for i in range(6):
        results = model.fit()
        print "AR coefficients:", model.rho
        rho, sigma = yule_walker(results.resid, order = model.order)
        model = GLSAR(Y, X, rho)

    par0 = results.params
    print par0
    model0if = GLSAR(Y, X, 2)
    res = model0if.iterative_fit(6)
    print 'iterativefit beta', res.params
    results.tvalues # is this correct? it does equal params/bse
    # but isn't the same as the AR example (which was wrong in the first place..)
    print results.t_test([0,1])  # are sd and t correct? vs
    print results.f_test(np.eye(2))


rhotrue = [0.5, 0.2]
rhotrue = np.asarray(rhotrue)
Exemple #8
0
examples_all = range(10) + ['test_copy']

examples = examples_all  #[5]

if 0 in examples:
    print '\n Example 0'
    X = np.arange(1, 8)
    X = sm.add_constant(X)
    Y = np.array((1, 3, 4, 5, 8, 10, 9))
    rho = 2
    model = GLSAR(Y, X, 2)
    for i in range(6):
        results = model.fit()
        print "AR coefficients:", model.rho
        rho, sigma = yule_walker(results.resid, order=model.order)
        model = GLSAR(Y, X, rho)

    par0 = results.params
    print par0
    model0if = GLSAR(Y, X, 2)
    res = model0if.iterative_fit(6)
    print 'iterativefit beta', res.params
    results.tvalues  # is this correct? it does equal params/bse
    # but isn't the same as the AR example (which was wrong in the first place..)
    print results.t_test([0, 1])  # are sd and t correct? vs
    print results.f_test(np.eye(2))

rhotrue = [0.5, 0.2]
rhotrue = np.asarray(rhotrue)
nlags = np.size(rhotrue)