Example #1
0
def nproph_check_consistent_usage(y: Y_TYPE, s, k, a):
    if y is not None:
        assert dimension(y) - 1 == s['immutable']['dim_exog']
    if k is not None:
        assert k == s['immutable']['k']
    if a is not None:
        assert dimension(a) == s['immutable']['dim_a']
Example #2
0
def pmd_set_immutable(y: Y_TYPE, k: int, a: A_TYPE = None, n_warm: int = 20):
    """ Set on the first invocation, when s={} is passed """
    return {
        'k': k,
        'alpha': 0.25,  # Determines confidence interval
        'n_fit': 250,
        'n_warm': n_warm,
        'dim_exog': dimension(y) - 1,
        'dim_a': dimension(a)
    }
Example #3
0
def dlm_exogenous_r3(y, s, k, a, t, e, r):
    """ One way to use dlm
        :returns: x, s', w
    """
    if not s:
        s = dict()
        s['dim'] = dimension(y)
        s = dlm_set_exog_hyperparams(s=s, r=r)
        y0, exog = split_exogenous(y=y)
        s['n_obs'] = 0
        s['model'] = quietDlm([], printInfo=False) + trend(
            s['trend_degree'], s['discount']) + seasonality(
                s['period'], s['discount'])
        s['model'] = s['model'] + fixedAutoReg(
            degree=s['auto_degree'], name='ar', w=1.0)
        if exog:
            exog_wrapped = [[None if np.isnan(ex0) else ex0 for ex0 in exog]]
            s['model'] = s['model'] + dynamic(features=exog_wrapped,
                                              discount=0.99,
                                              name='exog')  # Set's first exog

    if y is not None:
        y = wrap(y)
        assert dimension(y) == s['dim'], 'Cannot change dimension of data sent'
        s['n_obs'] += 1
        y0, exog = split_exogenous(y=y)
        y0_passed_in = None if np.isnan(
            y0) else y0  # pydlm uses None for missing values
        s['model'].append([y0_passed_in])
        if exog:
            exog_wrapped = [[None if np.isnan(ex0) else ex0 for ex0 in exog]]
            if s['n_obs'] > 1:
                s['model'].append(
                    data=exog_wrapped,
                    component='exog')  # Don't get first exog twice
        num_obs = len(s['model'].data) if s.get('model') else 0
        if num_obs % s['n_fit'] == s['n_fit'] - 1:
            _, _, s = dlm_exogenous_r3(y=None, s=s, k=k, a=a, t=t, e=10, r=r)
        s['model'].fitForwardFilter()
        return _dlm_exog_prediction_helper(s=s, k=k, y=y)

    if y is None:
        if dimension(y) == 1:
            s['model'].tune(maxit=20)
            # Don't tune if exogenous ... haven't got this to work
        s['model'].fit()
        return None, None, s
Example #4
0
def dlm_univariate_r3(y, s: dict, k: int, a=None, t=None, e=None, r=None):
    """ Univariate filter

            - Uses the discounting method of H/W so, doesn't need to be fit as often
            - Discount factors are periodically tuned
            - The hyper-parameter controls 'auto_degree', 'trend_degree',  'period'

        :returns: x, x_std, s
    """
    assert r is not None, 'Requires hyper-parameter (interpreted in dimension 3) '
    if not s:
        s = dict()
        s = dlm_set_univariate_params(s=s, r=r)
        s['dim'] = dimension(y)
        s['n_obs'] = 0
        s['model'] = dlm([], printInfo=False) + trend(
            s['trend_degree'], s['discount']) + seasonality(
                s['period'], s['discount'])
        s['model'] = s['model'] + fixedAutoReg(
            degree=s['auto_degree'], name='ar', w=1.0)

    if y is not None:
        s['n_obs'] += 1
        assert isinstance(y, float) or len(
            y) == s['dim'], ' Cannot change dimension of input in flight '
        y0, exog = split_exogenous(y=y)
        y0_passed_in = None if np.isnan(
            y0) else y0  # pydlm uses None for missing values
        s['model'].append([y0_passed_in])
        num_obs = len(s['model'].data) if s.get('model') else 0
        if num_obs % s['n_fit'] == s['n_fit'] - 1:
            # Perform periodic tuning of discount factors
            _, _, s = dlm_univariate_r3(y=None,
                                        s=s,
                                        k=k,
                                        a=a,
                                        t=t,
                                        e=1000,
                                        r=r)
        s['model'].fitForwardFilter()
        return _dlm_prediction_helper(s=s, k=k, y=y)

    if y is None and e > 60:
        s['model'].tune()  # Tunes discount factors
        s['model'].fit()
        return None, None, s