Example #1
0
def compute_errors(df, pred):
    results = {}
    _pred = pred['predictions']
    results['Rmse'] = util.rmse(df, _pred)
    results['Smape'] = util.smape(df, _pred)
    results['Name'] = pred['name']
    return results
Example #2
0
def compute_yearly_errors(df, pred):
    _pred = pred['predictions']
    rmse = []
    smape = []
    year = []
    results = {}
    for i in range(2012, 2019):
        pred_year = _pred.loc[_pred.index.year == i]
        rmse.append(util.rmse(df, pred_year))
        smape.append(util.smape(df, pred_year))
        year.append(i)

    results['Name'] = pred['name']
    results['Rmse'] = rmse
    results['Smape'] = smape
    results['Year'] = year
    return (results)
Example #3
0
label_unstacked['error_s'] = error_s.unstack().values

data_meta = pd.merge(data_test,
                     label_unstacked,
                     left_on=['date', 'region'],
                     right_on=['date', 'region'])
y_test_meta = data_meta['UseComplex']
X_test_meta = data_meta.drop(
    ['region', 'region_i', 'y', 'date', 'UseComplex', 'error_c', 'error_s'],
    axis=1)

X_test_meta = scaler_meta.transform(X_test_meta)

predictions_classifier = classifier.predict(X_test_meta)
confusion_matrix(y_test_meta, predictions_classifier)

df_predictions = pd.Series(predictions_classifier,
                           index=[data_meta['date'], data_meta['region']])
df_predictions = df_predictions.unstack(level=-1)

pred_meta = pred_c.copy()
pred_meta[~df_predictions] = pred_s

# Save model and predictions
#dump(predictions_to_save, PATH + 'results/predictions_{}.joblib'.format(name))

print('RMSE_Meta:', util.rmse(df, pred_meta))
print('SMAPE_all:', util.smape(df, pred_meta))

print('RMSE complex:', util.rmse(df, pred_c))
Example #4
0
                         max_iter=300,
                         verbose=True,
                         random_state=rs)
regressor.out_activation_ = 'relu'

scaler = StandardScaler()
X_scaled = scaler.fit_transform(X_train)
regressor.fit(X_scaled, y_train)

X_test, y_test, index, data = util.gen_data(regions,
                                            end_train,
                                            end_test,
                                            weather=weather,
                                            holidays=holidays)
X_test = scaler.transform(X_test)
predictions = regressor.predict(X_test)
index['pred'] = predictions
predictions = index.set_index(['date', 'region']).unstack()
predictions.columns = predictions.columns.droplevel()

predictions_to_save = {}
predictions_to_save['name'] = name
predictions_to_save['predictions'] = predictions

# Save model and predictions
dump(regressor, PATH + 'models/model_{}.joblib'.format(name))
dump(predictions_to_save, PATH + 'results/predictions_{}.joblib'.format(name))

print('RMSE_all:', util.rmse(df, predictions))
print('SMAPE_all:', util.smape(df, predictions))
Example #5
0
    print('Shape of training data: ', X_train.shape)

    regressor = MLPRegressor(hidden_layer_sizes=(128, 32), tol=4, max_iter=300, verbose=True, random_state=rs)
    regressor.out_activation_ = 'relu'

    scaler = StandardScaler()
    X_scale = scaler.fit_transform(X_train)
    regressor.fit(X_scale, y_train)

    X_test, y_test, index, data = util.gen_data(regions, end_train, end_test, weather= weather, holidays=holidays)
    X_test = scaler.transform(X_test)
    predictions = regressor.predict(X_test)
    index['pred'] = predictions
    predictions = index.set_index(['date', 'region']).unstack()
    predictions.columns = predictions.columns.droplevel()
    pred_t.append(predictions)

    dump(regressor, PATH + 'models/{}_{}-{}.joblib'.format(name, start_train.year, end_train.year))


pred_yt = pd.concat(pred_t, axis = 0)
predictions_to_save = {}
predictions_to_save['name'] = name
predictions_to_save['predictions'] = pred_yt
dump(predictions_to_save, PATH + 'results/predictions_{}.joblib'.format(name))

print('RMSE_all:', util.rmse(df, pred_yt))
print('SMAPE_all:', util.smape(df, pred_yt))