Пример #1
0
def test_against_wls_inference(data, use_t, cov_type):
    y, x, w = data
    mod = RollingWLS(y, x, window=100, weights=w)
    res = mod.fit(use_t=use_t, cov_type=cov_type)
    ci = res.conf_int()

    # This is a smoke test of cov_params to make sure it works
    res.cov_params()

    # Skip to improve performance
    for i in range(100, y.shape[0]):
        _y = get_sub(y, i, 100)
        _x = get_sub(x, i, 100)
        wls = WLS(_y, _x, missing="drop").fit(use_t=use_t, cov_type=cov_type)
        assert_allclose(get_single(res.tvalues, i - 1), wls.tvalues)
        assert_allclose(get_single(res.bse, i - 1), wls.bse)
        assert_allclose(get_single(res.pvalues, i - 1), wls.pvalues, atol=1e-8)
        assert_allclose(get_single(res.fvalue, i - 1), wls.fvalue)
        with np.errstate(invalid="ignore"):
            assert_allclose(get_single(res.f_pvalue, i - 1),
                            wls.f_pvalue,
                            atol=1e-8)
        assert res.cov_type == wls.cov_type
        assert res.use_t == wls.use_t
        wls_ci = wls.conf_int()
        if isinstance(ci, pd.DataFrame):
            ci_val = ci.iloc[i - 1]
            ci_val = np.asarray(ci_val).reshape((-1, 2))
        else:
            ci_val = ci[i - 1].T
        assert_allclose(ci_val, wls_ci)
Пример #2
0
def rolling_wls_model():
    # Rolling Weighted Least Squares (Rolling WLS)
    from statsmodels.regression.rolling import RollingWLS

    data = get_dataset("longley")
    exog = sm.add_constant(data.exog, prepend=False)
    rolling_wls = RollingWLS(data.endog, exog)
    model = rolling_wls.fit(reset=50)

    return ModelWithResults(model=model, alg=rolling_wls, inference_dataframe=exog)
Пример #3
0
def test_error():
    y, x, _ = gen_data(250, 2, True)
    with pytest.raises(ValueError, match="reset must be a positive integer"):
        RollingWLS(y, x,).fit(reset=-1)
    with pytest.raises(ValueError):
        RollingWLS(y, x).fit(method="unknown")
    with pytest.raises(ValueError, match="min_nobs must be larger"):
        RollingWLS(y, x, min_nobs=1)
    with pytest.raises(ValueError, match="min_nobs must be larger"):
        RollingWLS(y, x, window=60, min_nobs=100)
Пример #4
0
def test_formula():
    y, x, w = gen_data(250, 3, True, pandas=True)
    fmla = "y ~ 1 + x0 + x1 + x2"
    data = pd.concat([y, x], axis=1)
    mod = RollingWLS.from_formula(fmla, window=100, data=data, weights=w)
    res = mod.fit()
    alt = RollingWLS(y, x, window=100)
    alt_res = alt.fit()
    assert_allclose(res.params, alt_res.params)
    ols_mod = RollingOLS.from_formula(fmla, window=100, data=data)
    ols_mod.fit()
Пример #5
0
def test_plot():
    import matplotlib.pyplot as plt

    y, x, w = gen_data(250, 3, True, pandas=True)
    fmla = "y ~ 1 + x0 + x1 + x2"
    data = pd.concat([y, x], axis=1)
    mod = RollingWLS.from_formula(fmla, window=100, data=data, weights=w)
    res = mod.fit()
    fig = res.plot_recursive_coefficient()
    assert isinstance(fig, plt.Figure)
    res.plot_recursive_coefficient(variables=2, alpha=None, figsize=(30, 7))
    res.plot_recursive_coefficient(variables="x0", alpha=None, figsize=(30, 7))
    res.plot_recursive_coefficient(variables=[0, 2],
                                   alpha=None,
                                   figsize=(30, 7))
    res.plot_recursive_coefficient(variables=["x0"],
                                   alpha=None,
                                   figsize=(30, 7))
    res.plot_recursive_coefficient(variables=["x0", "x1", "x2"],
                                   alpha=None,
                                   figsize=(30, 7))
    with pytest.raises(ValueError, match="variable x4 is not an integer"):
        res.plot_recursive_coefficient(variables="x4")

    fig = plt.Figure()
    # Just silence the warning
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        out = res.plot_recursive_coefficient(fig=fig)
    assert out is fig
    res.plot_recursive_coefficient(alpha=None, figsize=(30, 7))
Пример #6
0
def test_plot():
    import matplotlib.pyplot as plt

    y, x, w = gen_data(250, 3, True, pandas=True)
    fmla = 'y ~ 1 + x0 + x1 + x2'
    data = pd.concat([y, x], axis=1)
    mod = RollingWLS.from_formula(fmla, window=100, data=data, weights=w)
    res = mod.fit()
    fig = res.plot_recursive_coefficient()
    assert isinstance(fig, plt.Figure)
    res.plot_recursive_coefficient(variables=2, alpha=None, figsize=(30, 7))
    res.plot_recursive_coefficient(variables='x0', alpha=None, figsize=(30, 7))
    res.plot_recursive_coefficient(variables=[0, 2],
                                   alpha=None,
                                   figsize=(30, 7))
    res.plot_recursive_coefficient(variables=['x0'],
                                   alpha=None,
                                   figsize=(30, 7))
    res.plot_recursive_coefficient(variables=['x0', 'x1', 'x2'],
                                   alpha=None,
                                   figsize=(30, 7))
    with pytest.raises(ValueError, match='variable x4 is not an integer'):
        res.plot_recursive_coefficient(variables='x4')

    fig = plt.Figure()
    with pytest.warns(UserWarning, match="tight_layout"):
        out = res.plot_recursive_coefficient(fig=fig)
    assert out is fig
    res.plot_recursive_coefficient(alpha=None, figsize=(30, 7))
Пример #7
0
def test_has_nan(data):
    y, x, w = data
    mod = RollingWLS(y, x, window=100, weights=w)
    has_nan = np.zeros(y.shape[0], dtype=bool)
    for i in range(100, y.shape[0] + 1):
        _y = get_sub(y, i, 100)
        _x = get_sub(x, i, 100)
        has_nan[i - 1] = np.squeeze((np.any(np.isnan(_y))
                                     or np.any(np.isnan(_x))))
    assert_array_equal(mod._has_nan, has_nan)
Пример #8
0
def test_weighted_against_wls(weighted_data):
    y, x, w = weighted_data
    mod = RollingWLS(y, x, weights=w, window=100)
    res = mod.fit(use_t=True)
    for i in range(100, y.shape[0]):
        _y = get_sub(y, i, 100)
        _x = get_sub(x, i, 100)
        if w is not None:
            _w = get_sub(w, i, 100)
        else:
            _w = np.ones_like(_y)
        wls = WLS(_y, _x, weights=_w, missing="drop").fit()
        rolling_params = get_single(res.params, i - 1)
        rolling_nobs = get_single(res.nobs, i - 1)
        assert_allclose(rolling_params, wls.params)
        assert_allclose(rolling_nobs, wls.nobs)
        assert_allclose(get_single(res.ssr, i - 1), wls.ssr)
        assert_allclose(get_single(res.llf, i - 1), wls.llf)
        assert_allclose(get_single(res.aic, i - 1), wls.aic)
        assert_allclose(get_single(res.bic, i - 1), wls.bic)
        assert_allclose(get_single(res.centered_tss, i - 1), wls.centered_tss)
        assert_allclose(res.df_model, wls.df_model)
        assert_allclose(get_single(res.df_resid, i - 1), wls.df_resid)
        assert_allclose(get_single(res.ess, i - 1), wls.ess, atol=1e-8)
        assert_allclose(res.k_constant, wls.k_constant)
        assert_allclose(get_single(res.mse_model, i - 1), wls.mse_model)
        assert_allclose(get_single(res.mse_resid, i - 1), wls.mse_resid)
        assert_allclose(get_single(res.mse_total, i - 1), wls.mse_total)
        assert_allclose(
            get_single(res.rsquared, i - 1), wls.rsquared, atol=1e-8
        )
        assert_allclose(
            get_single(res.rsquared_adj, i - 1), wls.rsquared_adj, atol=1e-8
        )
        assert_allclose(
            get_single(res.uncentered_tss, i - 1), wls.uncentered_tss
        )
Пример #9
0
def test_raise(data):
    y, x, w = data

    mod = RollingWLS(y, x, window=100, missing="drop", weights=w)
    res = mod.fit()
    params = np.asarray(res.params)
    assert np.all(np.isfinite(params[99:]))

    if not np.any(np.isnan(y)):
        return
    mod = RollingWLS(y, x, window=100, missing="skip")
    res = mod.fit()
    params = np.asarray(res.params)
    assert np.any(np.isnan(params[100:]))