Ejemplo n.º 1
0
def test_misspecifications():
    # Tests for model specification and misspecification exceptions
    endog = np.arange(20).reshape(10, 2)

    # Bad trend specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(1, 0), trend='')

    # Bad error_cov_type specification
    assert_raises(ValueError,
                  varmax.VARMAX,
                  endog,
                  order=(1, 0),
                  error_cov_type='')

    # Bad order specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(0, 0))

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        varmax.VARMAX(endog, order=(1, 1))

    # Warning with VARMA specification
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')

        varmax.VARMAX(endog, order=(1, 1))

        message = ('Estimation of VARMA(p,q) models is not generically robust,'
                   ' due especially to identification issues.')
        assert_equal(str(w[0].message), message)
    warnings.resetwarnings()
Ejemplo n.º 2
0
def test_apply_results():
    endog = np.arange(200).reshape(100, 2)
    exog = np.ones(100)
    params = [0.1, 0.2, 0.5, -0.1, 0.0, 0.2, 1., 2., 1., 0., 1.]

    mod1 = varmax.VARMAX(endog[:50], order=(1, 0), trend='t', exog=exog[:50])
    res1 = mod1.smooth(params)

    mod2 = varmax.VARMAX(endog[50:], order=(1, 0), trend='t', exog=exog[50:])
    res2 = mod2.smooth(params)

    res3 = res2.apply(endog[:50], exog=exog[:50])

    assert_equal(res1.specification, res3.specification)

    assert_allclose(res3.cov_params_default, res2.cov_params_default)
    for attr in ['nobs', 'llf', 'llf_obs', 'loglikelihood_burn']:
        assert_equal(getattr(res3, attr), getattr(res1, attr))

    for attr in [
            'filtered_state', 'filtered_state_cov', 'predicted_state',
            'predicted_state_cov', 'forecasts', 'forecasts_error',
            'forecasts_error_cov', 'standardized_forecasts_error',
            'forecasts_error_diffuse_cov', 'predicted_diffuse_state_cov',
            'scaled_smoothed_estimator', 'scaled_smoothed_estimator_cov',
            'smoothing_error', 'smoothed_state', 'smoothed_state_cov',
            'smoothed_state_autocov', 'smoothed_measurement_disturbance',
            'smoothed_state_disturbance',
            'smoothed_measurement_disturbance_cov',
            'smoothed_state_disturbance_cov'
    ]:
        assert_equal(getattr(res3, attr), getattr(res1, attr))

    assert_allclose(res3.forecast(10, exog=np.ones(10)),
                    res1.forecast(10, exog=np.ones(10)))
Ejemplo n.º 3
0
def test_recreate_model():
    nobs = 100
    endog = np.ones((nobs, 3)) * 2.0
    exog = np.ones(nobs)

    orders = [(1, 0), (1, 1)]
    trends = ['t', 'n']
    error_cov_types = ['diagonal', 'unstructured']
    measurement_errors = [False, True]
    enforce_stationarities = [False, True]
    enforce_invertibilities = [False, True]

    import itertools
    names = [
        'order', 'trend', 'error_cov_type', 'measurement_error',
        'enforce_stationarity', 'enforce_invertibility'
    ]
    for element in itertools.product(orders, trends, error_cov_types,
                                     measurement_errors,
                                     enforce_stationarities,
                                     enforce_invertibilities):
        kwargs = dict(zip(names, element))

        with warnings.catch_warnings(record=False):
            warnings.simplefilter('ignore')
            mod = varmax.VARMAX(endog, exog=exog, **kwargs)
            mod2 = varmax.VARMAX(endog, exog=exog, **mod._get_init_kwds())
        check_equivalent_models(mod, mod2)
Ejemplo n.º 4
0
def test_concatenated_predict_varmax(use_exog, trend):
    endog = np.arange(200).reshape(100, 2) * 1.0
    exog = np.ones(100) if use_exog else None

    trend_params = [0.1, 0.2]
    var_params = [0.5, -0.1, 0.0, 0.2]
    exog_params = [1., 2.]
    cov_params = [1., 0., 1.]

    params = []
    if trend in ['c', 't']:
        params += trend_params
    params += var_params
    if use_exog:
        params += exog_params
    params += cov_params

    y1 = endog.copy()
    y1[-50:] = np.nan
    mod1 = varmax.VARMAX(y1, order=(1, 0), trend=trend, exog=exog)
    res1 = mod1.smooth(params)
    p1 = res1.get_prediction()
    pr1 = p1.prediction_results

    x2 = exog[:50] if use_exog else None
    mod2 = varmax.VARMAX(endog[:50], order=(1, 0), trend=trend, exog=x2)
    res2 = mod2.smooth(params)
    x2f = exog[50:] if use_exog else None
    p2 = res2.get_prediction(start=0, end=99, exog=x2f)
    pr2 = p2.prediction_results

    attrs = (pr1.representation_attributes + pr1.filter_attributes +
             pr1.smoother_attributes)
    for key in attrs:
        assert_allclose(getattr(pr2, key), getattr(pr1, key))
Ejemplo n.º 5
0
def test_extend_results():
    endog = np.arange(200).reshape(100, 2)
    exog = np.ones(100)
    params = [0.1, 0.2, 0.5, -0.1, 0.0, 0.2, 1., 2., 1., 0., 1.]

    mod1 = varmax.VARMAX(endog, order=(1, 0), trend='t', exog=exog)
    res1 = mod1.smooth(params)

    mod2 = varmax.VARMAX(endog[:50], order=(1, 0), trend='t', exog=exog[:50])
    res2 = mod2.smooth(params)
    res3 = res2.extend(endog[50:], exog=exog[50:])

    assert_allclose(res3.llf_obs, res1.llf_obs[50:])

    for attr in [
            'filtered_state', 'filtered_state_cov', 'predicted_state',
            'predicted_state_cov', 'forecasts', 'forecasts_error',
            'forecasts_error_cov', 'standardized_forecasts_error',
            'forecasts_error_diffuse_cov', 'predicted_diffuse_state_cov',
            'scaled_smoothed_estimator', 'scaled_smoothed_estimator_cov',
            'smoothing_error', 'smoothed_state', 'smoothed_state_cov',
            'smoothed_state_autocov', 'smoothed_measurement_disturbance',
            'smoothed_state_disturbance',
            'smoothed_measurement_disturbance_cov',
            'smoothed_state_disturbance_cov'
    ]:
        desired = getattr(res1, attr)
        if desired is not None:
            desired = desired[..., 50:]
        assert_equal(getattr(res3, attr), desired)

    assert_allclose(res3.forecast(10, exog=np.ones(10)),
                    res1.forecast(10, exog=np.ones(10)))
Ejemplo n.º 6
0
def test_param_names_trend():
    endog = np.zeros((3, 2))
    base_names = [
        'L1.y1.y1', 'L1.y2.y1', 'L1.y1.y2', 'L1.y2.y2', 'sqrt.var.y1',
        'sqrt.cov.y1.y2', 'sqrt.var.y2'
    ]
    base_params = [0.5, 0, 0, 0.4, 1.0, 0.0, 1.0]

    # No trend
    mod = varmax.VARMAX(endog, order=(1, 0), trend='n')
    desired = base_names
    assert_equal(mod.param_names, desired)

    # Intercept
    mod = varmax.VARMAX(endog, order=(1, 0), trend=[1])
    desired = ['intercept.y1', 'intercept.y2'] + base_names
    assert_equal(mod.param_names, desired)
    mod.update([1.2, -0.5] + base_params)
    assert_allclose(mod['state_intercept'], [1.2, -0.5])

    # Intercept + drift
    mod = varmax.VARMAX(endog, order=(1, 0), trend=[1, 1])
    desired = (['intercept.y1', 'drift.y1', 'intercept.y2', 'drift.y2'] +
               base_names)
    assert_equal(mod.param_names, desired)
    mod.update([1.2, 0, -0.5, 0] + base_params)
    assert_allclose(mod['state_intercept', 0], 1.2)
    assert_allclose(mod['state_intercept', 1], -0.5)
    mod.update([0, 1, 0, 1.1] + base_params)
    assert_allclose(mod['state_intercept', 0], np.arange(2, 5))
    assert_allclose(mod['state_intercept', 1], 1.1 * np.arange(2, 5))
    mod.update([1.2, 1, -0.5, 1.1] + base_params)
    assert_allclose(mod['state_intercept', 0], 1.2 + np.arange(2, 5))
    assert_allclose(mod['state_intercept', 1], -0.5 + 1.1 * np.arange(2, 5))

    # Drift only
    mod = varmax.VARMAX(endog, order=(1, 0), trend=[0, 1])
    desired = ['drift.y1', 'drift.y2'] + base_names
    assert_equal(mod.param_names, desired)
    mod.update([1, 1.1] + base_params)
    assert_allclose(mod['state_intercept', 0], np.arange(2, 5))
    assert_allclose(mod['state_intercept', 1], 1.1 * np.arange(2, 5))

    # Intercept + third order
    mod = varmax.VARMAX(endog, order=(1, 0), trend=[1, 0, 1])
    desired = (['intercept.y1', 'trend.2.y1', 'intercept.y2', 'trend.2.y2'] +
               base_names)
    assert_equal(mod.param_names, desired)
    mod.update([1.2, 0, -0.5, 0] + base_params)
    assert_allclose(mod['state_intercept', 0], 1.2)
    assert_allclose(mod['state_intercept', 1], -0.5)
    mod.update([0, 1, 0, 1.1] + base_params)
    assert_allclose(mod['state_intercept', 0], np.arange(2, 5)**2)
    assert_allclose(mod['state_intercept', 1], 1.1 * np.arange(2, 5)**2)
    mod.update([1.2, 1, -0.5, 1.1] + base_params)
    assert_allclose(mod['state_intercept', 0], 1.2 + np.arange(2, 5)**2)
    assert_allclose(mod['state_intercept', 1], -0.5 + 1.1 * np.arange(2, 5)**2)
Ejemplo n.º 7
0
def test_specifications():
    # Tests for model specification and state space creation
    endog = np.arange(20).reshape(10,2)
    exog = np.arange(10)
    exog2 = pd.Series(exog, index=pd.date_range('2000-01-01', '2009-01-01', freq='AS'))

    # Test successful model creation
    mod = varmax.VARMAX(endog, exog=exog, order=(1,0))

    # Test successful model creation with pandas exog
    mod = varmax.VARMAX(endog, exog=exog2, order=(1,0))
Ejemplo n.º 8
0
def test_misc_exog():
    # Tests for missing data
    nobs = 20
    k_endog = 2
    np.random.seed(1208)
    endog = np.random.normal(size=(nobs, k_endog))
    endog[:4, 0] = np.nan
    endog[2:6, 1] = np.nan
    exog1 = np.random.normal(size=(nobs, 1))
    exog2 = np.random.normal(size=(nobs, 2))

    index = pd.date_range('1970-01-01', freq='QS', periods=nobs)
    endog_pd = pd.DataFrame(endog, index=index)
    exog1_pd = pd.Series(exog1.squeeze(), index=index)
    exog2_pd = pd.DataFrame(exog2, index=index)

    models = [
        varmax.VARMAX(endog, exog=exog1, order=(1, 0)),
        varmax.VARMAX(endog, exog=exog2, order=(1, 0)),
        varmax.VARMAX(endog_pd, exog=exog1_pd, order=(1, 0)),
        varmax.VARMAX(endog_pd, exog=exog2_pd, order=(1, 0)),
    ]

    for mod in models:
        # Smoke tests
        mod.start_params
        res = mod.fit(disp=False)
        res.summary()
        res.predict()
        res.predict(dynamic=True)
        res.get_prediction()

        oos_exog = np.random.normal(size=(1, mod.k_exog))
        res.forecast(steps=1, exog=oos_exog)
        res.get_forecast(steps=1, exog=oos_exog)

        # Smoke tests for invalid exog
        oos_exog = np.random.normal(size=(1))
        assert_raises(ValueError, res.forecast, steps=1, exog=oos_exog)

        oos_exog = np.random.normal(size=(2, mod.k_exog))
        assert_raises(ValueError, res.forecast, steps=1, exog=oos_exog)

        oos_exog = np.random.normal(size=(1, mod.k_exog + 1))
        assert_raises(ValueError, res.forecast, steps=1, exog=oos_exog)

    # Test invalid model specifications
    assert_raises(ValueError,
                  varmax.VARMAX,
                  endog,
                  exog=np.zeros((10, 4)),
                  order=(1, 0))
Ejemplo n.º 9
0
    def __init__(self,
                 true,
                 order,
                 trend,
                 error_cov_type,
                 cov_type='oim',
                 **kwargs):
        self.true = true
        # 1960:Q1 - 1982:Q4
        dta = webuse('manufac', 'http://www.stata-press.com/data/r12/')

        dta.index = dta.month
        dta['dlncaputil'] = dta['lncaputil'].diff()
        dta['dlnhours'] = dta['lnhours'].diff()

        endog = dta.ix['1972-02-01':, ['dlncaputil', 'dlnhours']]

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always')
            self.model = varmax.VARMAX(endog,
                                       order=order,
                                       trend=trend,
                                       error_cov_type=error_cov_type,
                                       **kwargs)

        self.results = self.model.filter(true['params'], cov_type=cov_type)
Ejemplo n.º 10
0
    def __init__(self,
                 true,
                 order,
                 trend,
                 error_cov_type,
                 cov_type='oim',
                 included_vars=['dln_inv', 'dln_inc', 'dln_consump'],
                 **kwargs):
        self.true = true
        # 1960:Q1 - 1982:Q4
        dta = pd.DataFrame(results_varmax.lutkepohl_data,
                           columns=['inv', 'inc', 'consump'],
                           index=pd.date_range('1960-01-01',
                                               '1982-10-01',
                                               freq='QS'))

        dta['dln_inv'] = np.log(dta['inv']).diff()
        dta['dln_inc'] = np.log(dta['inc']).diff()
        dta['dln_consump'] = np.log(dta['consump']).diff()

        endog = dta.ix['1960-04-01':'1978-10-01', included_vars]

        self.model = varmax.VARMAX(endog,
                                   order=order,
                                   trend=trend,
                                   error_cov_type=error_cov_type,
                                   **kwargs)

        self.results = self.model.filter(true['params'], cov_type=cov_type)
Ejemplo n.º 11
0
    def setup_class(cls,
                    true,
                    order,
                    trend,
                    error_cov_type,
                    cov_type='approx',
                    **kwargs):
        cls.true = true
        # 1960:Q1 - 1982:Q4
        dta = webuse('manufac', 'http://www.stata-press.com/data/r12/')

        dta.index = dta.month
        dta['dlncaputil'] = dta['lncaputil'].diff()
        dta['dlnhours'] = dta['lnhours'].diff()

        endog = dta.ix['1972-02-01':, ['dlncaputil', 'dlnhours']]

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always')
            cls.model = varmax.VARMAX(endog,
                                      order=order,
                                      trend=trend,
                                      error_cov_type=error_cov_type,
                                      **kwargs)

        cls.results = cls.model.smooth(true['params'], cov_type=cov_type)
Ejemplo n.º 12
0
    def setup_class(cls,
                    true,
                    order,
                    trend,
                    error_cov_type,
                    cov_type='approx',
                    included_vars=['dln_inv', 'dln_inc', 'dln_consump'],
                    **kwargs):
        cls.true = true
        # 1960:Q1 - 1982:Q4
        dta = pd.DataFrame(results_varmax.lutkepohl_data,
                           columns=['inv', 'inc', 'consump'],
                           index=pd.date_range('1960-01-01',
                                               '1982-10-01',
                                               freq='QS'))

        dta['dln_inv'] = np.log(dta['inv']).diff()
        dta['dln_inc'] = np.log(dta['inc']).diff()
        dta['dln_consump'] = np.log(dta['consump']).diff()

        endog = dta.ix['1960-04-01':'1978-10-01', included_vars]

        cls.model = varmax.VARMAX(endog,
                                  order=order,
                                  trend=trend,
                                  error_cov_type=error_cov_type,
                                  **kwargs)

        cls.results = cls.model.smooth(true['params'], cov_type=cov_type)
Ejemplo n.º 13
0
def test_forecast_exog():
    # Test forecasting with various shapes of `exog`
    nobs = 100
    endog = np.ones((nobs, 2)) * 2.0
    exog = np.ones(nobs)

    mod = varmax.VARMAX(endog, order=(1, 0), exog=exog, trend='n')
    res = mod.smooth(np.r_[[0] * 4, 2.0, 2.0, 1, 0, 1])

    # 1-step-ahead, valid
    exog_fcast_scalar = 1.
    exog_fcast_1dim = np.ones(1)
    exog_fcast_2dim = np.ones((1, 1))

    assert_allclose(res.forecast(1, exog=exog_fcast_scalar), 2.)
    assert_allclose(res.forecast(1, exog=exog_fcast_1dim), 2.)
    assert_allclose(res.forecast(1, exog=exog_fcast_2dim), 2.)

    # h-steps-ahead, valid
    h = 10
    exog_fcast_1dim = np.ones(h)
    exog_fcast_2dim = np.ones((h, 1))

    assert_allclose(res.forecast(h, exog=exog_fcast_1dim), 2.)
    assert_allclose(res.forecast(h, exog=exog_fcast_2dim), 2.)

    # h-steps-ahead, invalid
    assert_raises(ValueError, res.forecast, h, exog=1.)
    assert_raises(ValueError, res.forecast, h, exog=[1, 2])
    assert_raises(ValueError, res.forecast, h, exog=np.ones((h, 2)))
Ejemplo n.º 14
0
def test_predict_custom_index():
    np.random.seed(328423)
    endog = pd.DataFrame(np.random.normal(size=(50, 2)))
    mod = varmax.VARMAX(endog, order=(1, 0))
    res = mod.smooth(mod.start_params)
    out = res.predict(start=1, end=1, index=['a'])
    assert out.index.equals(pd.Index(['a']))
Ejemplo n.º 15
0
def test_var_ct_as_exog1():
    test = 'ct'

    # VAR(2), no built-in trend, constant and time trend as exog
    # Here we start the time-trend at 1 and so we can compare to the built-in
    # trend results "res_ct"
    results = results_var_R.res_ct
    mod = varmax.VARMAX(endog,
                        order=(2, 0),
                        exog=exog1[:, :2],
                        trend='n',
                        loglikelihood_burn=2)
    # Since the params were given for the built-in trend case, we need to
    # re-order them
    params = results['params']
    params = np.r_[params[6:-6], params[:6], params[-6:]]
    res = mod.smooth(params)

    assert_allclose(res.llf, results['llf'])

    # Forecast
    columns = ['%s.fcast.%s.fcst' % (test, name) for name in endog.columns]
    assert_allclose(res.forecast(10, exog=exog1_fcast[:, :2]),
                    results_var_R_output[columns].iloc[:10])

    # IRF
    check_irf(test, mod, results, params)
Ejemplo n.º 16
0
    def setup_class(cls,
                    true,
                    order,
                    trend,
                    error_cov_type,
                    cov_type='approx',
                    **kwargs):
        cls.true = true
        # 1960:Q1 - 1982:Q4
        with open(current_path + os.sep + 'results' + os.sep + 'manufac.dta',
                  'rb') as test_data:
            dta = pd.read_stata(test_data)
        dta.index = dta.month
        dta['dlncaputil'] = dta['lncaputil'].diff()
        dta['dlnhours'] = dta['lnhours'].diff()

        endog = dta.ix['1972-02-01':, ['dlncaputil', 'dlnhours']]

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always')
            cls.model = varmax.VARMAX(endog,
                                      order=order,
                                      trend=trend,
                                      error_cov_type=error_cov_type,
                                      **kwargs)

        cls.results = cls.model.smooth(true['params'], cov_type=cov_type)
Ejemplo n.º 17
0
def test_known_initialization():
    # Need to test that "known" initialization is taken into account in
    # time series simulation
    np.random.seed(38947)
    nobs = 100
    eps = np.random.normal(size=nobs)

    eps1 = np.zeros(nobs)
    eps2 = np.zeros(nobs)
    eps2[49] = 1
    eps3 = np.zeros(nobs)
    eps3[50:] = 1

    # SARIMAX
    # (test that when state shocks are shut down, the initial state
    # geometrically declines according to the AR parameter)
    mod = sarimax.SARIMAX([0], order=(1, 0, 0))
    mod.ssm.initialize_known([100], [[0]])
    actual = mod.simulate([0.5, 1.], nobs, state_shocks=eps1)
    assert_allclose(actual, 100 * 0.5**np.arange(nobs))

    # Unobserved components
    # (test that the initial level shifts the entire path)
    mod = structural.UnobservedComponents([0], 'local level')
    mod.ssm.initialize_known([100], [[0]])
    actual = mod.simulate([1., 1.],
                          nobs,
                          measurement_shocks=eps,
                          state_shocks=eps2)
    assert_allclose(actual, 100 + eps + eps3)

    # VARMAX
    # (here just test that with an independent VAR we have each initial state
    # geometrically declining at the appropriate rate)
    transition = np.diag([0.5, 0.2])
    mod = varmax.VARMAX([[0, 0]], order=(1, 0), trend='nc')
    mod.initialize_known([100, 50], np.diag([0, 0]))
    actual = mod.simulate(np.r_[transition.ravel(), 1., 0, 1.],
                          nobs,
                          measurement_shocks=np.c_[eps1, eps1],
                          state_shocks=np.c_[eps1, eps1])

    assert_allclose(
        actual, np.c_[100 * 0.5**np.arange(nobs), 50 * 0.2**np.arange(nobs)])

    # Dynamic factor
    # (test that the initial state declines geometrically and then loads
    # correctly onto the series)
    mod = dynamic_factor.DynamicFactor([[0, 0]], k_factors=1, factor_order=1)
    mod.initialize_known([100], [[0]])
    print(mod.param_names)
    actual = mod.simulate([0.8, 0.2, 1.0, 1.0, 0.5],
                          nobs,
                          measurement_shocks=np.c_[eps1, eps1],
                          state_shocks=eps1)
    tmp = 100 * 0.5**np.arange(nobs)
    assert_allclose(actual, np.c_[0.8 * tmp, 0.2 * tmp])
Ejemplo n.º 18
0
def gen_k_factor2(nobs=10000,
                  k=2,
                  idiosyncratic_ar1=False,
                  idiosyncratic_var=0.4,
                  k_ar=6):
    # Simulate bivariate VAR(6) for the factor
    ix = pd.period_range(start='1950-01', periods=1, freq='M')
    faux = pd.DataFrame([[0, 0]], index=ix, columns=['f1', 'f2'])
    mod = varmax.VARMAX(faux, order=(k_ar, 0), trend='n')
    A = np.zeros((2, 2 * k_ar))
    A[:, -2:] = np.array([[0.5, -0.2], [0.1, 0.3]])
    Q = np.array([[1.5, 0.2], [0.2, 0.5]])
    L = np.linalg.cholesky(Q)
    params = np.r_[A.ravel(), L[np.tril_indices_from(L)]]

    # Simulate the factors
    factors = mod.simulate(params, nobs)

    # Add in the idiosyncratic part
    faux = pd.Series([0], index=ix)
    mod_idio = sarimax.SARIMAX(faux, order=(1, 0, 0))
    phi = [0.7, -0.2] if idiosyncratic_ar1 else [0, 0.]
    tmp = factors.iloc[:, 0] + factors.iloc[:, 1]

    # Monthly variables
    endog_M = pd.concat([tmp.copy() for i in range(k)], axis=1)
    columns = []
    for i in range(k):
        endog_M.iloc[:, i] = (
            endog_M.iloc[:, i] +
            mod_idio.simulate([phi[0], idiosyncratic_var], nobs))
        columns += [f'yM{i + 1}_f2']
    endog_M.columns = columns

    # Monthly versions of quarterly variables
    endog_Q_M = pd.concat([tmp.copy() for i in range(k)], axis=1)
    columns = []
    for i in range(k):
        endog_Q_M.iloc[:, i] = (
            endog_Q_M.iloc[:, i] +
            mod_idio.simulate([phi[0], idiosyncratic_var], nobs))
        columns += [f'yQ{i + 1}_f2']
    endog_Q_M.columns = columns

    # Create quarterly versions of quarterly variables
    levels_M = 1 + endog_Q_M / 100
    levels_M.iloc[0] = 100
    levels_M = levels_M.cumprod()
    # log_levels_M = np.log(levels_M) * 100
    log_levels_Q = (
        np.log(levels_M).resample('Q', convention='e').sum().iloc[:-1] * 100)

    # Compute the quarterly growth rate series
    endog_Q = log_levels_Q.diff()

    return endog_M, endog_Q, factors
Ejemplo n.º 19
0
def test_pandas_multivariate_rangeindex():
    # Impulse responses have RangeIndex
    endog = pd.DataFrame(np.zeros((1, 2)))
    mod = varmax.VARMAX(endog, trend='n')
    res = mod.filter([0.5, 0., 0., 0.2, 1., 0., 1.])

    actual = res.impulse_responses(2)
    desired = pd.DataFrame([[1., 0.5, 0.25], [0., 0., 0.]]).T
    assert_allclose(actual, desired)
    assert_(actual.index.equals(desired.index))
Ejemplo n.º 20
0
def test_pandas_multivariate_dateindex():
    # Impulse responses still have RangeIndex (i.e. aren't wrapped with dates)
    ix = pd.date_range(start='2000', periods=1, freq='M')
    endog = pd.DataFrame(np.zeros((1, 2)), index=ix)
    mod = varmax.VARMAX(endog, trend='n')
    res = mod.filter([0.5, 0., 0., 0.2, 1., 0., 1.])

    actual = res.impulse_responses(2)
    desired = pd.DataFrame([[1., 0.5, 0.25], [0., 0., 0.]]).T
    assert_allclose(actual, desired)
    assert_(actual.index.equals(desired.index))
Ejemplo n.º 21
0
def test_varmax(temp_filename):
    mod = varmax.VARMAX(macrodata[['realgdp',
                                   'realcons']].diff().iloc[1:].values,
                        order=(1, 0))
    res = mod.smooth(mod.start_params)
    res.summary()
    res.save(temp_filename)
    res2 = varmax.VARMAXResults.load(temp_filename)
    assert_allclose(res.params, res2.params)
    assert_allclose(res.bse, res2.bse)
    assert_allclose(res.llf, res2.llf)
Ejemplo n.º 22
0
def check_multivariate_chandrasekhar(filter_univariate=False,
                                     gen_obs_cov=False,
                                     memory_conserve=False,
                                     **kwargs):
    # Test that Chandrasekhar recursions don't change the output
    index = pd.date_range('1960-01-01', '1982-10-01', freq='QS')
    dta = pd.DataFrame(results_varmax.lutkepohl_data,
                       columns=['inv', 'inc', 'consump'],
                       index=index)
    dta['dln_inv'] = np.log(dta['inv']).diff()
    dta['dln_inc'] = np.log(dta['inc']).diff()
    dta['dln_consump'] = np.log(dta['consump']).diff()

    endog = dta.loc['1960-04-01':'1978-10-01', ['dln_inv', 'dln_inc']]

    mod_orig = varmax.VARMAX(endog, **kwargs)
    mod_chand = varmax.VARMAX(endog, **kwargs)
    mod_chand.ssm.filter_chandrasekhar = True

    params = mod_orig.start_params

    mod_orig.ssm.filter_univariate = filter_univariate
    mod_chand.ssm.filter_univariate = filter_univariate

    if gen_obs_cov:
        mod_orig['obs_cov'] = np.array([[1., 0.5], [0.5, 1.]])
        mod_chand['obs_cov'] = np.array([[1., 0.5], [0.5, 1.]])

    if memory_conserve:
        mod_orig.ssm.set_conserve_memory(MEMORY_CONSERVE
                                         & ~MEMORY_NO_LIKELIHOOD)
        mod_chand.ssm.set_conserve_memory(MEMORY_CONSERVE
                                          & ~MEMORY_NO_LIKELIHOOD)

        res_chand = mod_chand.filter(params)
        res_orig = mod_orig.filter(params)
    else:
        res_chand = mod_chand.smooth(params)
        res_orig = mod_orig.smooth(params)

    check_output(res_chand, res_orig, memory_conserve=memory_conserve)
Ejemplo n.º 23
0
def test_varmax_pickle():
    mod = varmax.VARMAX(macrodata[['realgdp',
                                   'realcons']].diff().iloc[1:].values,
                        order=(1, 0))
    res = mod.smooth(mod.start_params)

    res.summary()
    res.save('test_save_varmax.p')
    res2 = varmax.VARMAXResults.load('test_save_varmax.p')
    assert_allclose(res.params, res2.params)
    assert_allclose(res.bse, res2.bse)
    assert_allclose(res.llf, res2.llf)
    os.unlink('test_save_varmax.p')
Ejemplo n.º 24
0
def test_vma1_exog():
    # Test the VMAX(1) case against univariate MAX(1) models
    dta = pd.DataFrame(results_varmax.lutkepohl_data,
                       columns=['inv', 'inc', 'consump'],
                       index=pd.date_range('1960-01-01',
                                           '1982-10-01',
                                           freq='QS'))
    dta = np.log(dta).diff().iloc[1:]

    endog = dta.iloc[:, :2]
    exog = dta.iloc[:, 2]

    ma_params1 = [-0.01, 1.4, -0.3, 0.002]
    ma_params2 = [0.004, 0.8, -0.5, 0.0001]

    vma_params = [
        ma_params1[0], ma_params2[0], ma_params1[2], 0, 0, ma_params2[2],
        ma_params1[1], ma_params2[1], ma_params1[3], ma_params2[3]
    ]

    # Joint VMA model
    mod_vma = varmax.VARMAX(endog,
                            exog=exog,
                            order=(0, 1),
                            error_cov_type='diagonal')
    mod_vma.ssm.initialize_diffuse()
    res_mva = mod_vma.smooth(vma_params)

    # Smoke test that start_params does not raise an error
    sp = mod_vma.start_params
    assert_equal(len(sp), len(mod_vma.param_names))

    # Univariate MA models
    mod_ma1 = sarimax.SARIMAX(endog.iloc[:, 0],
                              exog=exog,
                              order=(0, 0, 1),
                              trend='c')
    mod_ma1.ssm.initialize_diffuse()
    mod_ma2 = sarimax.SARIMAX(endog.iloc[:, 1],
                              exog=exog,
                              order=(0, 0, 1),
                              trend='c')
    mod_ma2.ssm.initialize_diffuse()
    res_ma1 = mod_ma1.smooth(ma_params1)
    res_ma2 = mod_ma2.smooth(ma_params2)

    # Have to ignore first 2 observations due to differences in initialization
    assert_allclose(res_mva.llf_obs[2:],
                    (res_ma1.llf_obs + res_ma2.llf_obs)[2:])
def test_varmax(missing, periods):
    endog = np.array([[0.5, 1.2, -0.2, 0.3, -0.1, 0.4, 1.4, 0.9],
                      [-0.2, -0.3, -0.1, 0.1, 0.01, 0.05, -0.13, -0.2]]).T
    exog = np.ones_like(endog[:, 0])
    if missing == 'init':
        endog[0:2, :] = np.nan
    elif missing == 'mixed':
        endog[2:4, 0] = np.nan
        endog[3:6, 1] = np.nan
    elif missing == 'all':
        endog[:] = np.nan

    mod = varmax.VARMAX(endog, order=(1, 0), trend='t', exog=exog)
    mod.update([0.1, -0.1, 0.5, 0.1, -0.05, 0.2, 0.4, 0.25, 1.2, 0.4, 2.3])
    check_filter_output(mod, periods, atol=1e-12)
    check_smoother_output(mod, periods)
Ejemplo n.º 26
0
def test_var_basic():
    test = 'basic'

    # VAR(2), no trend or exog
    results = results_var_R.res_basic
    mod = varmax.VARMAX(endog, order=(2, 0), trend='n', loglikelihood_burn=2)
    res = mod.smooth(results['params'])

    assert_allclose(res.llf, results['llf'])

    # Forecast
    columns = ['%s.fcast.%s.fcst' % (test, name) for name in endog.columns]
    assert_allclose(res.forecast(10), results_var_R_output[columns].iloc[:10])

    # IRF
    check_irf(test, mod, results)
Ejemplo n.º 27
0
def test_extend_results(trend, forecast):
    endog = np.arange(200).reshape(100, 2)
    trend_params = []
    if trend == 'c':
        trend_params = [0.1, 0.2]
    if trend == 'ct':
        trend_params = [0.1, 0.2, 1., 2.]
    params = np.r_[trend_params, 0.5, -0.1, 0.0, 0.2, 1., 0., 1.]

    mod1 = varmax.VARMAX(endog, order=(1, 0), trend=trend)
    res1 = mod1.smooth(params)
    if forecast:
        # Call `forecast` to trigger the _set_final_exog and
        # _set_final_predicted_state context managers
        res1.forecast()

    mod2 = mod1.clone(endog[:50])
    res2 = mod2.smooth(params)
    if forecast:
        # Call `forecast` to trigger the _set_final_exog and
        # _set_final_predicted_state context managers
        res2.forecast()
    res3 = res2.extend(endog[50:])

    assert_allclose(res3.llf_obs, res1.llf_obs[50:])

    for attr in [
            'filtered_state', 'filtered_state_cov', 'predicted_state',
            'predicted_state_cov', 'forecasts', 'forecasts_error',
            'forecasts_error_cov', 'standardized_forecasts_error',
            'scaled_smoothed_estimator', 'scaled_smoothed_estimator_cov',
            'smoothing_error', 'smoothed_state', 'smoothed_state_cov',
            'smoothed_state_autocov', 'smoothed_measurement_disturbance',
            'smoothed_state_disturbance',
            'smoothed_measurement_disturbance_cov',
            'smoothed_state_disturbance_cov'
    ]:
        desired = getattr(res1, attr)
        if desired is not None:
            desired = desired[..., 50:]
        assert_allclose(getattr(res3, attr), desired, atol=1e-12)

    assert_allclose(res3.forecast(10), res1.forecast(10))
Ejemplo n.º 28
0
def test_dynamic_factor_pickle(temp_filename):
    mod = varmax.VARMAX(macrodata[['realgdp',
                                   'realcons']].diff().iloc[1:].values,
                        order=(1, 0))
    pkl_mod = pickle.loads(pickle.dumps(mod))

    res = mod.smooth(mod.start_params)
    pkl_res = pkl_mod.smooth(mod.start_params)

    assert_allclose(res.params, pkl_res.params)
    assert_allclose(res.bse, pkl_res.bse)
    assert_allclose(res.llf, pkl_res.llf)

    res.summary()
    res.save(temp_filename)
    res2 = varmax.VARMAXResults.load(temp_filename)
    assert_allclose(res.params, res2.params)
    assert_allclose(res.bse, res2.bse)
    assert_allclose(res.llf, res2.llf)
Ejemplo n.º 29
0
def test_var_ctt():
    test = 'ctt_as_exog1'

    # VAR(2), constant, trend, and trend**2, no exog
    # Note that this is comparing against trend as exog in the R package,
    # since it doesn't have a built-in option for trend**2
    results = results_var_R.res_ctt_as_exog1
    mod = varmax.VARMAX(endog, order=(2, 0), trend='ctt', loglikelihood_burn=2)
    params = results['params']
    params = np.r_[params[-(6 + 9):-6], params[:-(6 + 9)], params[-6:]]
    res = mod.smooth(params)

    assert_allclose(res.llf, results['llf'])

    # Forecast
    columns = ['%s.fcast.%s.fcst' % (test, name) for name in endog.columns]
    assert_allclose(res.forecast(10), results_var_R_output[columns].iloc[:10])

    # IRF
    check_irf(test, mod, results, params)
Ejemplo n.º 30
0
def test_var_ct_as_exog0():
    test = 'ct_as_exog0'

    # VAR(2), no built-in trend, constant and time trend as exog
    # Here we start the time-trend at 0
    results = results_var_R.res_ct_as_exog0
    mod = varmax.VARMAX(endog,
                        order=(2, 0),
                        exog=exog0[:, :2],
                        trend='n',
                        loglikelihood_burn=2)
    res = mod.smooth(results['params'])

    assert_allclose(res.llf, results['llf'])

    # Forecast
    columns = ['%s.fcast.%s.fcst' % (test, name) for name in endog.columns]
    assert_allclose(res.forecast(10, exog=exog0_fcast[:, :2]),
                    results_var_R_output[columns].iloc[:10])

    # IRF
    check_irf(test, mod, results)