コード例 #1
0
def empirical_ema_r1(y: Y_TYPE,
                     s,
                     k: int,
                     a: A_TYPE = None,
                     t: T_TYPE = None,
                     e: E_TYPE = None,
                     r: R_TYPE = None):
    """ Exponential moving average, with empirical std

          r      weight to place on existing anchor point

    """
    assert r is not None
    y0 = wrap(y)[0]
    if not s.get('p'):
        s = {'p': {}, 'x': y0, 'rho': r}
        assert 0 <= s['rho'] <= 1, 'Expecting rho=r to be between 0 and 1'
    else:
        assert abs(r - s['rho']) < 1e-6, 'rho=r is immutable'

    if y0 is None:
        return None, s, None
    else:
        s['x'] = s['rho'] * s['x'] + (1 - s['rho']) * y0  # Make me better !
        x = [s['x']] * k
        _we_ignore_bias, x_std, s['p'] = parade(p=s['p'], x=x, y=y0)
        x_std_fallback = nonecast(x_std, fill_value=1.0)
        return [s['x']] * k, x_std_fallback, s
コード例 #2
0
ファイル: linear.py プロジェクト: nguyensu/timemachines
def regress_level_on_first_known(y:Y_TYPE, s:dict, k, a:A_TYPE=None, t:T_TYPE =None, e:E_TYPE =None)->([float] , Any , Any):
    """ Very basic online regression skater, mostly for testing
           - Only one known in advance variable is utilized
           - Last value is ignored, unless a is None in which case we return 0.0
           - Empirical std is returned
    """
    y0 = wrap(y)[0]  # Ignore contemporaneous, exogenous variables
    if a:
        a0 = wrap(a)[0]  # Ignore all but the first known-in-advance variable

    if not s.get('k'):
        # First invocation
        s = {'p': {}} # Prediction parade
        s['r'] = {}   # Regression state, not to be confused with hyper-param r
        s['k'] = k
        s['o'] = {}   # The "observance" will quarantine 'a' until it can be matched
    else:
        assert s['k']==k  # Immutability

    if a is None:
        return [0]*k, [1.0]*k, s
    else:
        a_t, s['o'] = observance( y=[y0],o=s['o'], k=k, a= [a0])  # Update the observance
        if a_t is not None: # This is the contemporaneous 'a', which was supplied k calls ago.
            if not s['r']:
                # When first calling the online regression algorithm we avoid the degenerate case
                # by sending it two observations.
                y_noise = 0.1*(1e-6+abs(y0))*np.random.randn()
                x_noise = 0.1*(1e-6+abs(a0))*np.random.randn()
                x = [ a_t[0]-x_noise, a_t[0]+x_noise  ]
                y = [ y0-y_noise,  y0+y_noise  ]
                s['r'] = regress_one_helper(x=x, y=y, r=s['r'])
            else:
                s['r'] = regress_one_helper(x=a_t, y=[y0], r=s['r'])

            # Predict using contemporaneous alpha's
            x = [ s['r']['alpha'] + s['r']['beta']*ak[0] for ak in s['o']['a'] ]

            # Push prediction into the parade and get the current bias/stderr
            bias, x_std, s['p'] = parade(p=s['p'], x=x, y=y0)
            return x, x_std, s    # TODO: Use the std implied by regression instead
        else:
            x = [y0]*k
            bias, x_std, s['p'] = parade(p=s['p'], x=x, y=y0)
            return x , x_std, s
コード例 #3
0
ファイル: tsaconstant.py プロジェクト: nguyensu/timemachines
def tsa_factory(y: Y_TYPE,
                s: dict,
                k: int,
                a: A_TYPE = None,
                t: T_TYPE = None,
                e: E_TYPE = None,
                p: int = TSA_P_DEFAULT,
                d: int = TSA_D_DEFAULT,
                q: int = TSA_D_DEFAULT) -> ([float], Any, Any):
    """ Extremely simple univariate, fixed p,d,q ARIMA model that is re-fit each time """

    # TODO: FIX THIS TO USE EMPIRICAL STD, OTHERWISE ENSEMBLES ARE DREADFUL

    y = wrap(y)
    a = wrap(a)

    if not s.get('y'):
        s = {'y': list(), 'a': list(), 'k': k, 'p': {}}
    else:
        # Assert immutability of k, dimensions
        if s['y']:
            assert len(y) == len(s['y'][0])
            assert k == s['k']
        if s['a']:
            assert len(a) == len(s['a'][0])

    if y is None:
        return None, s, None
    else:
        s['y'].append(y)
        if a is not None:
            s['a'].append(a)
        if len(s['y']) > max(2 * k + 5, TSA_META['n_warm']):
            y0s = [y_[0] for y_ in s['y']]
            model = ARIMA(y0s, order=(p, d, q))
            try:
                x = list(model.fit().forecast(steps=k))
            except:
                x = [wrap(y)[0]] * k
        else:
            x = [y[0]] * k

        y0 = wrap(y)[0]
        _we_ignore_bias, x_std, s['p'] = parade(p=s['p'], x=x, y=y0)
        x_std_fallback = nonecast(x_std, fill_value=1.0)
        return x, x_std_fallback, s
コード例 #4
0
def empirical_last_value(y: Y_TYPE,
                         s: dict,
                         k: int = 1,
                         a: A_TYPE = None,
                         t: T_TYPE = None,
                         e: E_TYPE = None) -> ([float], Any, Any):
    """ Last value cache, with empirical std """

    if not s.get('p'):
        s = {'p': {}}  # Initialize prediction parade

    if y is None:
        return None, None, s
    else:
        y0 = wrap(y)[0]  # Ignore the rest
        x = [y0] * k  # What a great prediction !
        bias, x_std, s['p'] = parade(p=s['p'], x=x,
                                     y=y0)  # update residual queue
        return x, x_std, s
コード例 #5
0
def fbprophet_skater_factory(y: Y_TYPE, s: dict, k: int, a: A_TYPE = None,
                             t: T_TYPE = None, e: E_TYPE = None,
                             emp_mass: float = 0.0, emp_std_mass: float = 0.0,
                             freq=None, recursive: bool = False,
                             model_params: dict = None,
                             n_max: int = None) -> ([float], Any, Any):
    """ Prophet skater with running prediction error moments
        Hyper-parameters are explicit here, whereas they are determined from r in actual skaters.
        Params of note:

             a: value of known-in-advance vars k step in advance (not contemporaneous with y)

    """

    assert 0 <= emp_mass <= 1
    assert 0 <= emp_std_mass <= 1

    if freq is None:
        freq = PROPHET_META['freq']
    if n_max is None:
        n_max = PROPHET_META['n_max']

    y = wrap(y)
    a = wrap(a)

    if not s.get('y'):
        s = {'p': {},     # parade
             'y': list(), # historical y
             'a': list(), # list of a known k steps in advance
             't': list(),
             'k': k}
    else:
        # Assert immutability of k, dimensions of y,a
        if s['y']:
            assert len(y) == len(s['y'][0])
            assert k == s['k']
        if s['a']:
            assert len(a) == len(s['a'][0])

    if y is None:
        return None, s, None
    else:
        s['y'].append(y)
        if a is not None:
            s['a'].append(a)
        if t is not None:
            assert isinstance(t,float), 'epoch time please'
            s['t'].append(t)

        if len(s['y']) > max(2 * k + 5, PROPHET_META['n_warm']):
            # Offset y, t, a are supplied to prophet interface
            t_arg = s['t'][k:] if t is not None else None
            a_arg = s['a']
            y_arg = s['y'][k:]
            x, x_std, forecast, model = prophet_iskater_factory(y=y_arg, k=k, a=a_arg, t=t_arg,
                                                                freq=freq, n_max=n_max,
                                                                recursive=recursive, model_params=model_params)
            s['m'] = True # Flag indicating a model has been fit (there is no point keeping the model itself, however)
        else:
            x = [y[0]] * k
            x_std = None

        # Get running mean prediction errors from the prediction parade
        x_resid, x_resid_std, s['p'] = parade(p=s['p'], x=x, y=y[0])
        x_resid = nonecast(x_resid,y[0])
        x_resid_std = nonecast(x_resid_std,1.0)

        # Compute center of mass between bias-corrected and uncorrected predictions
        x_corrected = np.array(x_resid) + np.array(x)
        x_center = nonecenter(m=[emp_mass, 1 - emp_mass], x=[x_corrected, x])
        x_std_center = nonecenter(m=[emp_std_mass, 1 - emp_std_mass], x=[x_resid_std, x_std])

        return x_center, x_std_center, s