def main():

    locations = [i for i in range(12,25)]
    stacked_errors = []

    for location in locations:

        df1 = remove_trend(location, 27)
        df2 = remove_trend(location, 26)
        df3 = remove_trend(location, 28)

        X1, y1, removed_seasonality1, removed_std1 = generate_lags(
            df1, 20, 2 * 24)
        X2, y2, removed_seasonality2, removed_std2 = generate_lags(
            df2, 20, 2 * 24)
        X3, y3, removed_seasonality3, removed_std3 = generate_lags(
            df3, 20, 2 * 24)

        tf.reset_default_graph()

        merged_model = Merged_Model()
        errors = merged_model.run_epochs(X1, X2, X3, y1, y2, y3, removed_seasonality1,
                                         removed_seasonality2, removed_seasonality3,
                                         removed_std1, removed_std2, removed_std3,
                                         location)

        stacked_errors.append(errors)

    pd.DataFrame(stacked_errors, columns=["LGA", "JFK", "EWR"]).to_csv(
        'data/multi_errors.csv', index=False, sep="\t")
Beispiel #2
0
def main():

    hyper_config = HyperParam_Ranges()
    fn = Auxiliary_funcs(hyper_config)

    df = remove_trend(16, 27)

    X_train, X_validation = fn.split_data_into_training_validation(
        df['time series'].values)

    for i in range(50):

        internal_hidden_size, batch_size, learning_rate = fn.generate_random_hyperparams(
        )

        # construct hyperparam string for each combination -> Tensorboard
        hparam = fn.make_hparam_string(internal_hidden_size, batch_size,
                                       learning_rate)

        # clear the default graph
        tf.reset_default_graph()

        config = Config(internal_hidden_size, batch_size, learning_rate)
        model = RNN_NeuralModel(config)
        model.run_epochs(X_train, X_validation, hparam)
Beispiel #3
0
def main():

    loc_fixed = 23

    df1 = remove_trend(loc_fixed, 24)
    df2 = remove_trend(loc_fixed, 21)
    df3 = remove_trend(loc_fixed, 22)

    X1, y1, removed_seasonality1, removed_std1 = generate_lags(df1, 20, 2 * 24)
    X2, y2, removed_seasonality2, removed_std2 = generate_lags(df2, 20, 2 * 24)
    X3, y3, removed_seasonality3, removed_std3 = generate_lags(df3, 20, 2 * 24)

    merged_model = Merged_Model()
    merged_model.run_epochs(X1, X2, X3, y1, y2, y3, removed_seasonality1,
                            removed_seasonality2, removed_seasonality3,
                            removed_std1, removed_std2, removed_std3)
def main():

    df = remove_trend(16, 28)

    config = Config()
    model = RNN_NeuralModel(config)
    model.run_epochs(df['time series'].values,
                     df['removed seasonality'].values,
                     df['removed std'].values)
def main():

    locations = [i for i in range(12,25)]
    stacked_errors = []

    for location in locations:

        df = remove_trend(location, 27)

        tf.reset_default_graph()

        config = Config()
        model = RNN_NeuralModel(config)
        error = model.run_epochs(df['time series'].values,
                         df['removed seasonality'].values,
                         df['removed std'].values,
                         location)

        stacked_errors.append(error)

    pd.DataFrame(stacked_errors).to_csv(
        'data/LGA_single_errors.csv', index=False, sep="\t")
with tf.name_scope("Cost") as scope:
   mse = tf.reduce_mean(tf.pow(tf.subtract(pred, target_placeholder), 2.0))

with tf.name_scope("Optimize") as scope:
   optimizer = tf.train.RMSPropOptimizer(learning_rate)
   opt = optimizer.minimize(mse)

""" Run graph """

with tf.Session() as sess:

  rmses = []

  for dest in [26, 27, 28]:

    df = remove_trend(16,dest)

    X = df['time series'].values
    removed_seasonality = df['removed seasonality'].values
    removed_std = df['removed std'].values

    stacked_preds = []
    stacked_ground_truth = []

    for i in range(bootstrap_size, len(X) , n_test):
      print("Current window", i, i + n_test)

      sess.run(tf.global_variables_initializer())

      X_train = X[:i]
Beispiel #7
0
def main():

    df = remove_trend(16, 27)
    X, y, removed_seasonality, removed_std = generate_lags(df, 20, 2 * 24)
    run_epochs(X, y, removed_seasonality, removed_std)