def predict_loss(prev, paths, split_df):
    # prev: true value
    # paths: paths to the model weights
    t_preds = []
    for tm in range(3):
        tf.reset_default_graph()
        t_preds.append(
            predict(paths[-1:],
                    build_hparams(hparams.params_s32),
                    back_offset=0,
                    predict_window=288,
                    n_models=3,
                    target_model=tm,
                    seed=2,
                    batch_size=50,
                    asgd=True,
                    split_df=split_df))
    preds = sum(t_preds) / 3
    preds.index = [idx.decode('ascii') for idx in preds.index]
    # mean mae
    res = 0
    for idx in preds.index:
        res += np.abs(preds.loc[idx, :] - prev.loc[idx, -288:]).sum()
    res /= len(preds.index) * 288
    return preds, res
Exemplo n.º 2
0
def predict_loss(paths):
    # paths: paths to the model weights
    t_preds = []
    for tm in range(3):
        tf.reset_default_graph()
        t_preds.append(predict(paths[-1:], build_hparams(hparams.params_s32),
                        n_models=3, target_model=tm, seed=5, batch_size=50, asgd=True))
    preds=sum(t_preds) /3
    return preds
Exemplo n.º 3
0
def predict_loss(prev, paths):
    # prev: true value
    # paths: paths to the model weights
    t_preds = []
    for tm in range(3):
        tf.reset_default_graph()
        t_preds.append(
            predict(paths[-1:],
                    build_hparams(hparams.params_s32),
                    n_models=3,
                    target_model=tm,
                    seed=5,
                    batch_size=50,
                    asgd=True))
    preds = sum(t_preds) / 3
    # mean mae
    res = 0
    for idx in preds.index:
        res += np.abs(preds[idx] - prev[idx]) / prev[idx]
    res /= 72
    return preds, res
Exemplo n.º 4
0
        type=int,
        help=
        "Save model on each evaluation (10 evals per epoch), starting from this step"
    )
    parser.add_argument('--predict_window',
                        default=63,
                        type=int,
                        help="Number of days to predict")
    args = parser.parse_args()

    #param_dict = dict(vars(args))
    #param_dict['hparams'] = build_from_set(args.hparam_set)
    #del param_dict['hparam_set']
    #train(**param_dict)

    hparams = build_hparams()
    result = train("definc_attn",
                   hparams,
                   n_models=1,
                   train_sampling=1.0,
                   eval_sampling=1.0,
                   patience=5,
                   multi_gpu=False,
                   save_best_model=False,
                   gpu=0,
                   eval_memsize=15,
                   seed=5,
                   verbose=True,
                   forward_split=False,
                   write_summaries=True,
                   side_split=True,
Exemplo n.º 5
0
    raw_smape = smape(true, pred)
    masked_smape = np.ma.array(raw_smape, mask=np.isnan(raw_smape))
    return masked_smape.mean()

from make_features import read_all
df_all = read_all()
df_all.columns

prev = df_all

paths = [p for p in tf.train.get_checkpoint_state('data/cpt/s32').all_model_checkpoint_paths]

t_preds = []
for tm in range(3):
    tf.reset_default_graph()
    t_preds.append(predict(paths, build_hparams(hparams.params_s32), back_offset=0, predict_window=63,
                    n_models=3, target_model=tm, seed=2, batch_size=2048, asgd=True))

preds=sum(t_preds) /3

missing_pages = prev.index.difference(preds.index)
# Use zeros for missing pages
rmdf = pd.DataFrame(index=missing_pages,
                    data=np.tile(0, (len(preds.columns),len(missing_pages))).T, columns=preds.columns)
f_preds = preds.append(rmdf).sort_index()

# Use zero for negative predictions
f_preds[f_preds < 0.5] = 0
# Rouns predictions to nearest int
f_preds = np.round(f_preds).astype(np.int64)