Пример #1
0
        x = list(x_mean)
        x_std = [math.sqrt(v) for v in x_var]
        return x, x_std, s


def dlm_set_univariate_params(s, r: R_TYPE):
    period_choices = [3, 5, 7, 10, 12, 16, 24, 32]
    s['auto_degree'], s['trend_degree'], period_choice = to_int_log_space(
        r,
        bounds=[
            (0.5, 3.2),  # auto-reg degree. 1 means linear trend, 2 quadratic
            (0.1, 3.4),  # seasonality degree
            (0, len(period_choices))  # seasonality period
        ])
    s['discount'] = 1 - (
        r % 0.05)  # Shouldn't matter too much as these get tuned eventually
    s['period'] = period_choices[period_choice]
    s['n_warm'] = 100
    s['n_fit'] = 500  # How often to tune discounts subsequently
    return s


if __name__ == '__main__':
    err = prior_plot_exogenous(f=dlm_univariate_r3,
                               k=2,
                               n=1000,
                               r=np.random.rand())
    plt.figure()
    print('done')
    pass
Пример #2
0
    from timemachines.skaters.evaluation import evaluate_mean_absolute_error
    from timemachines.data.real import hospital_with_exog
    y, a = hospital_with_exog(k=k)
    y0 = [yi[0] for yi in y]

    r = 0.1  # Doesn't matter?
    err1 = evaluate_mean_absolute_error(f=f, k=k, y=y0, r=r, n_burn=250)
    err2 = evaluate_mean_absolute_error(f=f, k=k, y=y, r=r, n_burn=250)
    err3 = evaluate_mean_absolute_error(f=f, k=k, y=y, r=r, a=a, n_burn=250)
    errlv = evaluate_mean_absolute_error(f=last_value,
                                         k=k,
                                         y=y,
                                         r=r,
                                         a=a,
                                         n_burn=250)

    print('----------------')
    print("Error w/o exogenous   = " + str(err1))
    print("Error w   exogenous   = " + str(err2))
    print("Error w   exo + known = " + str(err3))
    print("Error last val cache  = " + str(errlv))


if __name__ == '__main__':
    f = pmd_exogenous
    if True:
        prior_plot_exogenous(f=f, k=1, n=200, r=0.95)
    if True:
        prior_plot(f=f, k=1, n=200, r=0.5)
    if True:
        pmd_exog_compare(f=f)
Пример #3
0
    if y is not None:
        # Process observation and return prediction
        assert isinstance(y, float) or len(
            y) == s['dim'], ' Cannot change dimension of input in flight '
        y0, exog = split_exogenous(y=y, dim=s['dim'])
        s = update_buffers(s=s, a=a, exog=exog, y0=y0)
        if True:  # Always fit prior to prediction
            none_, s, _ = flux_auto(y=None, s=s, k=k, a=a, t=t, e=e,
                                    r=r)  # Fit the model
            assert none_ is None
        return flux_or_last_value(s=s, k=k, exog=exog, y0=y0)

    if y is None:
        if len(s.get('buffer')) < s['n_burn']:
            s['model'] = None
        else:
            data = pd.DataFrame(columns=['y'], data=s.get('buffer'))
            s['model'] = pf.ARIMA(data=data,
                                  ar=s['ar'],
                                  ma=s['ma'],
                                  target='y',
                                  family=s['family'])
            _ = s['model'].fit("MLE")
        return None, s, None  # Acknowledge that a fit was requested by returning x=None, w=None


if __name__ == '__main__':
    err = prior_plot_exogenous(f=flux_auto, k=1, n=200, r=0.05)
    pass
Пример #4
0
        return x, x_std, s


def dlm_set_exog_hyperparams(s, r: R_TYPE):
    # Univariate model with autoregressive components
    # This uses the discounting method of H/W so doesn't need to be fit as often
    period_choices = [3, 5, 7, 10, 12, 16, 24, 32]
    s['auto_degree'], s['trend_degree'], period_choice = to_int_log_space(
        r,
        bounds=[
            (0.5, 3.2),  # auto-reg degree. 1 means linear trend, 2 quadratic
            (0.1, 3.4),  # seasonality degree
            (0, len(period_choices))  # seasonality period
        ])
    s['discount'] = 1 - (
        r % 0.05)  # Shouldn't matter too much as these get tuned eventually
    s['period'] = period_choices[period_choice]
    s['n_burn'] = 100
    s['n_fit'] = 500  # How often to tune discounts subsequently
    return s


if __name__ == '__main__':
    err = prior_plot_exogenous(f=dlm_exogenous_r3,
                               k=1,
                               n=1000,
                               r=np.random.rand())
    plt.figure()
    print('done')
    pass