Esempio n. 1
0
def dlm_exogenous_r3(y, s, k, a, t, e, r):
    """ One way to use dlm
        :returns: x, s', w
    """
    if not s:
        s = dict()
        s['dim'] = dimension(y)
        s = dlm_set_exog_hyperparams(s=s, r=r)
        y0, exog = split_exogenous(y=y)
        s['n_obs'] = 0
        s['model'] = quietDlm([], printInfo=False) + trend(
            s['trend_degree'], s['discount']) + seasonality(
                s['period'], s['discount'])
        s['model'] = s['model'] + fixedAutoReg(
            degree=s['auto_degree'], name='ar', w=1.0)
        if exog:
            exog_wrapped = [[None if np.isnan(ex0) else ex0 for ex0 in exog]]
            s['model'] = s['model'] + dynamic(features=exog_wrapped,
                                              discount=0.99,
                                              name='exog')  # Set's first exog

    if y is not None:
        y = wrap(y)
        assert dimension(y) == s['dim'], 'Cannot change dimension of data sent'
        s['n_obs'] += 1
        y0, exog = split_exogenous(y=y)
        y0_passed_in = None if np.isnan(
            y0) else y0  # pydlm uses None for missing values
        s['model'].append([y0_passed_in])
        if exog:
            exog_wrapped = [[None if np.isnan(ex0) else ex0 for ex0 in exog]]
            if s['n_obs'] > 1:
                s['model'].append(
                    data=exog_wrapped,
                    component='exog')  # Don't get first exog twice
        num_obs = len(s['model'].data) if s.get('model') else 0
        if num_obs % s['n_fit'] == s['n_fit'] - 1:
            _, _, s = dlm_exogenous_r3(y=None, s=s, k=k, a=a, t=t, e=10, r=r)
        s['model'].fitForwardFilter()
        return _dlm_exog_prediction_helper(s=s, k=k, y=y)

    if y is None:
        if dimension(y) == 1:
            s['model'].tune(maxit=20)
            # Don't tune if exogenous ... haven't got this to work
        s['model'].fit()
        return None, None, s
Esempio n. 2
0
def _dlm_exog_prediction_helper(s, k: int, y: Y_TYPE):
    num_obs = len(s['model'].data) if s.get('model') else 0
    if num_obs < s['n_burn']:
        y0, exog = split_exogenous(y)
        return [y0] * k, [abs(y0)] * k, s
    else:
        assert k == 1, 'only k==1 for now'  # TODO: Fix to allow for k>1
        y0, exog = split_exogenous(y)
        if exog:
            exog_passed_in = [None if np.isnan(ex0) else ex0 for ex0 in exog]
            x_mean, x_var = s['model'].predict(
                featureDict={'exog': exog_passed_in[0]})
        else:
            x_mean, x_var = s['model'].predict()
        x = [x_mean[0, 0]]
        x_std = [math.sqrt(x_var[0, 0])]

        return x, x_std, s
Esempio n. 3
0
def _dlm_prediction_helper(s, k: int, y: Y_TYPE):
    """ Calls down to predictN, if we are passed the warm-up stage """
    num_obs = len(s['model'].data) if s.get('model') else 0
    if num_obs < s['n_warm']:
        y0, _ = split_exogenous(y)
        return [y0] * k, [abs(y0)] * k, s
    else:
        x_mean, x_var = s['model'].predictN(N=k)
        x = list(x_mean)
        x_std = [math.sqrt(v) for v in x_var]
        return x, x_std, s
Esempio n. 4
0
def dlm_univariate_r3(y, s: dict, k: int, a=None, t=None, e=None, r=None):
    """ Univariate filter

            - Uses the discounting method of H/W so, doesn't need to be fit as often
            - Discount factors are periodically tuned
            - The hyper-parameter controls 'auto_degree', 'trend_degree',  'period'

        :returns: x, x_std, s
    """
    assert r is not None, 'Requires hyper-parameter (interpreted in dimension 3) '
    if not s:
        s = dict()
        s = dlm_set_univariate_params(s=s, r=r)
        s['dim'] = dimension(y)
        s['n_obs'] = 0
        s['model'] = dlm([], printInfo=False) + trend(
            s['trend_degree'], s['discount']) + seasonality(
                s['period'], s['discount'])
        s['model'] = s['model'] + fixedAutoReg(
            degree=s['auto_degree'], name='ar', w=1.0)

    if y is not None:
        s['n_obs'] += 1
        assert isinstance(y, float) or len(
            y) == s['dim'], ' Cannot change dimension of input in flight '
        y0, exog = split_exogenous(y=y)
        y0_passed_in = None if np.isnan(
            y0) else y0  # pydlm uses None for missing values
        s['model'].append([y0_passed_in])
        num_obs = len(s['model'].data) if s.get('model') else 0
        if num_obs % s['n_fit'] == s['n_fit'] - 1:
            # Perform periodic tuning of discount factors
            _, _, s = dlm_univariate_r3(y=None,
                                        s=s,
                                        k=k,
                                        a=a,
                                        t=t,
                                        e=1000,
                                        r=r)
        s['model'].fitForwardFilter()
        return _dlm_prediction_helper(s=s, k=k, y=y)

    if y is None and e > 60:
        s['model'].tune()  # Tunes discount factors
        s['model'].fit()
        return None, None, s
Esempio n. 5
0
def flux_auto(y, s, k, a, t, e, r):
    """ One way to use flux package

            - Contemporaneous y[1:] variables are used as exogenous 'X' in pmdarima
            - This only works for k=1

        :returns: x, s', w
    """
    if s is None:
        s = dict()
        s = flux_hyperparams(s=s, r=r)
        s = initialize_buffers(s=s, y=y)

    if y is not None:
        # Process observation and return prediction
        assert isinstance(y, float) or len(
            y) == s['dim'], ' Cannot change dimension of input in flight '
        y0, exog = split_exogenous(y=y, dim=s['dim'])
        s = update_buffers(s=s, a=a, exog=exog, y0=y0)
        if True:  # Always fit prior to prediction
            none_, s, _ = flux_auto(y=None, s=s, k=k, a=a, t=t, e=e,
                                    r=r)  # Fit the model
            assert none_ is None
        return flux_or_last_value(s=s, k=k, exog=exog, y0=y0)

    if y is None:
        if len(s.get('buffer')) < s['n_burn']:
            s['model'] = None
        else:
            data = pd.DataFrame(columns=['y'], data=s.get('buffer'))
            s['model'] = pf.ARIMA(data=data,
                                  ar=s['ar'],
                                  ma=s['ma'],
                                  target='y',
                                  family=s['family'])
            _ = s['model'].fit("MLE")
        return None, s, None  # Acknowledge that a fit was requested by returning x=None, w=None
def pmd_skater_factory(y:Y_TYPE, s:dict, k:int=1, a:A_TYPE=None, t:T_TYPE=None, e:E_TYPE=None,
                       method: str= 'default', n_warm=50,
                       model_params:dict=None)->(Union[List[float],None],
                                                 Union[List[float],None], Any):
    """ Predict using both simultaneously observed and known in advance variables
        y: Y_TYPE    scalar or list where y[1:] are interpreted as contemporaneously observed exogenous variables
        s:           state
        k:           Number of steps ahead to predict
        a:           (optional) scalar or list of variables known k-steps in advance.
                     When calling, provide the known variable k steps ahead, not the contemporaneous one.
        t:           (optional) Time of observation.
        e:           (optional) Maximum computation time (supply e>60 to give hint to do fitting)

        :returns: x [float] , s', scale [float]

        Remarks:
           - Model params cannot be changed after the first invocation.
           - Allows y=None to be used
    """
    y = wrap(y)
    a = wrap(a)

    if not s.get('n_obs'):
        # Initialize
        s['n_obs'] = 0
        s['model'] = None
        s['immutable'] = pmd_set_immutable(k=k, y=y, a=a, n_warm=n_warm)
        s['params'] = pmd_params(method=method)
        if model_params:
            s['params'].update(model_params)
        s['o'] = dict()                         # Observance
    else:
        pmd_check_consistent_usage(y=y,s=s,a=a,k=k)

    tick(s)
    if t is not None:
        pass # Other models might perform an evolution step here. Not applicable to PMDARIMA

    if y is not None:
        # Receive observation y[0], possibly exogenous y[1:] and possibly k-in-advance a[:]
        # Collect from queues the contemporaneous variables
        s['n_obs']+=1
        y_t, z = split_exogenous(y)
        x_t, s['o'] = observance(y=y,o=s['o'],k=k,a=a)

        # Update the pmdarima model itself
        if x_t is not None:
            if s['model'] is not None:
                if x_t:
                    s['model'].update([y_t], [x_t])
                else:
                    s['model'].update([y_t])

        # Predict
        if s['model'] is None:
            # Fall back to last value if there is no model calibrated as yet
            x = [y_t]*k
            if len(s['o']['x']) > 5 + 2*k:
                Y = s['o']['y'][k+1:]
                X = s['o']['x'][k+1:]
                x_std = [ np.nanstd( [ xi[0]-yk[0] for xi, yk in zip( X, Y[j:] ) ] ) for j in range(1,k+1) ]
            else:
                x_std = [1.0]*k   # Fallback to dreadful estimate
        else:
            # Predict forward, supplying known data if it exists
            if not a and not z:
                z_forward = None
            else:
                if not a:
                    z_forward = [z]*k
                else:
                    z_forward = [ list(z) + list(ai) for ai in s['o']['a'] ]  # Add known k-steps ahead
                                    # This estimate could be improved by predicting z's and attenuating
                                    # It is only really a good idea for k=1
            x, ntvls = s['model'].predict(n_periods=k, X=z_forward, return_conf_int=True, alpha=s['immutable']['alpha'])
            x_std = list([ ntvl[1] - ntvl[0] for ntvl in ntvls ])

    # Fit
    tock(s)
    if pmd_it_is_time_to_fit(s=s, e=e):
        tick(s)
        X = s['o'].get('x') or None
        Y = s['o']['y']
        s['model'] = pm.auto_arima(y=Y, X=X, **s['params'])
        print(s['model'])
        print(s['model'].arima_res_.params)
        pprint(tocks(s))
        tock(s,'fit')
        pprint(tocks(s))

    if y is not None:
        return list(x), list(x_std), s
    else:
        return None, None, s
Esempio n. 7
0
def observance(y: [float], o: dict, k: int, a: [float] = None):
    """
    This marshals the k-step ahead vector a and the contemporaneous y[1:] and
    returns a combined vector of all exogenous variables.

    It tracks a list of x and corresponding y, by putting a's in a FIFO queue and
    by caching the previous value of y[1:]

    :param o:  state
    :param k:  Number of steps ahead that a is provided
    :param y:
    :param a:
    :returns:  x_t:[float] vector combining y[1:] with previously supplied a's
    """
    yw = wrap(y)
    aw = wrap(a)

    if not o:
        o = {
            'a': [None for _ in range(k)],
            'z': None,  # Stores the previous value of y[1:]
            'x': list(),
            'y': list()
        }

    y_t, z = split_exogenous(yw)

    # Get the contemporaneous variables from last observation
    if z:
        z_t = o.get('z')  # The previously revealed exogenous variables
        o['z'] = z  # Store for next time
    else:
        z = None
        z_t = None

    # Determine the known in advance variable pertaining to the present
    if aw:
        a_t = o['a'].pop(
            0)  # The known in advance variable pertaining to this time step
        o['a'].append(aw)  # Put the k-ahead received a value(s) on the queue
    else:
        a = None
        a_t = None

    # Combine into exogenous variables ... but only if both arrived
    if aw and z:
        x_t = z_t + a_t if (z_t and a_t) else None
    elif aw and not z:
        x_t = a_t if a_t else None
    elif (not aw) and z:
        x_t = z_t if z_t else None
    elif (not aw) and not z:
        x_t = None

    if (not z) and (not aw):
        o['y'].append([y_t])  # Special case, no need to wait
    else:
        if x_t:
            o['x'].append(x_t)
            o['y'].append([y_t])
        assert len(o['x']) == len(o['y']), "post-condition"
    return x_t, o