Пример #1
0
history_size = 30
target_distance = 1

features_considered = ['Close', 'Volume', 'MA_short', 'MA_long']
e = EquityData('data/SPY.csv', 'SPY')
e.data['MA_short'] = moving_average(e, window=21)
e.data['MA_long'] = moving_average(e, window=5)
e.data = e.data[21:]
EVALUATION_INTERVAL = int(e.data.shape[0]/BATCH_SIZE) * 1
features = e.data[features_considered]
assert(list(features)[0] == 'Close')
features.index = e.date()


dataset = features.values
x_train_multi, y_train_multi, x_val_multi, y_val_multi = split_multivariate(dataset, history_size, target_distance, step, single_step=False)

print ('Single window of past history : {}'.format(x_train_multi[0].shape))
print ('\n Target temperature to predict : {}'.format(y_train_multi[0].shape))

train_data_multi = tf.data.Dataset.from_tensor_slices((x_train_multi, y_train_multi))
train_data_multi = train_data_multi.cache().shuffle(BUFFER_SIZE).batch(BATCH_SIZE).repeat()

val_data_multi = tf.data.Dataset.from_tensor_slices((x_val_multi, y_val_multi))
val_data_multi = val_data_multi.batch(BATCH_SIZE).repeat()



multi_step_model = tf.keras.models.load_model('checkpoints/multivariate_multi_model')

for x, y in val_data_multi.take(2):
Пример #2
0
e.data['bb_bbli'] = indicator_bb.bollinger_lband_indicator()
e.data = e.data[21:]

EVALUATION_INTERVAL = int(e.data.shape[0] / BATCH_SIZE) * 2
features = e.data[features_considered]
assert (list(features)[0] == 'Close')
features.index = e.date()

# features.plot(subplots=True)
# plt.show()

dataset = features.values
x_train_single, y_train_single, x_val_single, y_val_single = split_multivariate(
    dataset,
    history_size,
    target_distance,
    step,
    single_step=True,
    classification=CLASSIFICATION)

print('Single window of past history : {}'.format(x_train_single[0].shape))

train_data_single = tf.data.Dataset.from_tensor_slices(
    (x_train_single, y_train_single))
train_data_single = train_data_single.cache().shuffle(BUFFER_SIZE).batch(
    BATCH_SIZE).repeat()

val_data_single = tf.data.Dataset.from_tensor_slices(
    (x_val_single, y_val_single))
val_data_single = val_data_single.batch(BATCH_SIZE).repeat()
Пример #3
0
    # pick selected features
    features = e.data[FEATURES]

    # cleanse
    features.index = e.data.index
    features = features.dropna()
    features = features[26:]

    # to numpy
    dataset = features.values

    # get validation and training data
    xt, yt, xv, yv = split_multivariate(dataset,
                                        HISTORY_SIZE,
                                        TARGET_DIS,
                                        STEP,
                                        single_step=True,
                                        classification=CLASSIFICATION)

    # construct datasets
    t_ds = tf.data.Dataset.from_tensor_slices((xt, yt))
    t_ds = t_ds.cache().shuffle(BUFFER_SIZE).batch(BATCH_SIZE).repeat()
    v_ds = tf.data.Dataset.from_tensor_slices((xv, yv))
    v_ds = v_ds.batch(BATCH_SIZE).repeat()

    # validation callback
    v_cb = tf.keras.callbacks.ModelCheckpoint(
        'checkpoints/multivariate_single_model',
        monitor='val_accuracy',
        verbose=1,
        save_best_only=True,