Ejemplo n.º 1
0
 def __init__(self,
              seq_length,
              symbol,
              date,
              n_sample,
              timeframe,
              initial_money=10000):
     self.balance = initial_money
     gen = VaeGen(0.8,
                  seq_length,
                  1,
                  symbol, ["close", "high", "low"],
                  date,
                  test_output_count=1,
                  window_step=1,
                  num_samples=n_sample,
                  n_ema=1,
                  ema_list=None,
                  diff=False,
                  logtransform=False,
                  timeframe=timeframe,
                  preprocess=False)
     self.X, self.Y = gen.get_test_data()
     self.buy_info = {"entryprice": None, "lot": 0}
     self.sell_info = {"entryprice": None, "lot": 0}
     print(self.X.shape, self.Y.shape)
     self.t = 0  # this value indicates the "present" in this simulation
     self.win_count = 0
     self.total_trade = 0
Ejemplo n.º 2
0
##################################################################

sim = FXTMTradeSimulator(seq_length,
                         symbol=symbol,
                         date=date,
                         n_sample=92160,
                         timeframe=timeframe)

gen_test = VaeGen(0.8,
                  seq_length,
                  1,
                  symbol, ["close", "high", "low"],
                  date,
                  n_ema=None,
                  ema_list=None,
                  test_output_count=1,
                  window_step=1,
                  num_samples=20000,
                  diff=True,
                  logtransform=False,
                  preprocess=True,
                  timeframe=timeframe)

stdizer = gen_test.stdizer

# initialize simulator

wavenet = WaveNet(n_filter,
                  n_residual,
                  n_skip,
                  n_layer,
Ejemplo n.º 3
0
    pred_length = 1
    symbols = ["EURUSD"]

    layers = [64]
    n_pp = 32

    batch_size = 512
    epochs = 100

    mfile = './SavedModel/RNN/LSTMAttn.h5'
    model_saver = ModelCheckpoint(mfile, save_best_only=True, save_weights_only=True)

    gen = VaeGen(train_ratio, seq_length, output_count, symbols,
                 ohlc_list=["close", "high", "low"],
                 n_ema=None,
                 diff=True,
                 logtransform=False,
                 test_output_count=output_count_for_test,
                 last_date=datetime(2019, 7, 28), num_samples=92160,
                 timeframe=MetaTrader5.MT5_TIMEFRAME_M1)

    trainX, trainY = gen.get_train_data()
    validX, validY = gen.get_val_data()

    test = AttnLSTM(layers, n_pp)
    test.compile(gen.input_dim, gen.output_dim, optimizer=Adam(decay=0.15))
    test.model_train.fit(trainX, trainY, batch_size=batch_size, epochs=epochs, callbacks=[model_saver],
                   validation_data=(validX, validY))
    test.model.load_weights(mfile)

    visualize(gen, test, pred_length)
Ejemplo n.º 4
0
# validX, validY = validX[:, :, 0:1, :], validY[:, :, 0:1, :]

# trainX, trainY = gen_list.get_train_data()
# trainX, trainY = np.transpose(trainX, (0, 2, 1, 3)), np.transpose(trainY, (0, 2, 1, 3))
# trainX_exp = np.reshape(trainX, (trainX.shape[0] * trainX.shape[1], trainX.shape[2], 1, trainX.shape[3],))
# trainY_exp = np.reshape(trainY, (trainY.shape[0] * trainY.shape[1], trainY.shape[2], 1, trainY.shape[3],))
# print(trainX_exp.shape, trainY_exp.shape)

gen_test = VaeGen(train_ratio,
                  seq_length_W,
                  output_count_W,
                  symbol,
                  ohlc,
                  date,
                  n_ema=ema,
                  osc_list=oscillator,
                  ema_list=ema_list,
                  test_output_count=predict_count + 16,
                  window_step=output_count_W,
                  num_samples=n_sample,
                  diff=diff,
                  logtransform=False,
                  timeframe=timeframe)
print(gen_test.input_dim)

validX, validY = gen_test.get_val_data()
trainX_control, trainY_control = gen_test.get_train_data()

##########################init & training#########################

# wavenet1 = WaveNet(n_filter, n_residual, n_skip, n_layer, n_repeat, filter_width=kernel_size, conditional=False,
Ejemplo n.º 5
0
    pred_length = 4
    symbols = ["EURUSD", "GBPUSD", "EURGBP"]

    layers = [50, 50]
    n_pp = 100

    batch_size = 128
    epochs = 100

    mfile = './SavedModel/RNN/Seq2SeqLSTM.h5'
    model_saver = ModelCheckpoint(mfile,
                                  save_best_only=True,
                                  save_weights_only=True)

    gen = VaeGen(train_ratio,
                 seq_length,
                 output_count,
                 symbols,
                 test_output_count=pred_length,
                 last_date=datetime(2019, 5, 28),
                 num_samples=92160,
                 timeframe=MetaTrader5.MT5_TIMEFRAME_M1)

    test = Seq2Seq(layers, n_pp)
    test.compile(gen.input_dim, gen.output_dim, optimizer='adam', loss='mae')
    # test.train_model.fit([gen.trainX, gen.trainY], gen.trainY, batch_size=batch_size, epochs=epochs, callbacks=[model_saver],
    #                validation_data=([gen.validX, gen.validY], gen.validY))
    test.train_model.load_weights(mfile)

    visualize(gen, test, pred_length)
Ejemplo n.º 6
0
if __name__ == "__main__":
    recon_length = 64
    pred_length = 4
    input_dim = [recon_length, 3, 3]
    output_dim_for_train = [recon_length, 3, 3]
    output_dim_for_test = [recon_length + pred_length, 3, 3]
    encoder_layers = [100, 100]
    decoder_layers = [100, 100]
    batch_size = 128
    symbol_list = ["EURUSD", "EURGBP", "GBPUSD"]
    epoch = 300

    latent_dim = 100

    gen = VaeGen(0.8, recon_length, pred_length, symbol_list=symbol_list,
                                  last_date=datetime(2019, 5, 28),
                                  num_samples=5000, timeframe=MetaTrader5.MT5_TIMEFRAME_D1)

    trainX = gen.trainX
    validX = gen.validX
    validY = gen.validY

    mfile = '.\SavedModel\VAE/VAE.h5'
    summarydir = ".\Summary\VAE"

    model_saver = ModelCheckpoint(mfile, save_best_only=True, save_weights_only=True)
    tensorboard = TensorBoard(log_dir=summarydir, write_graph=True)

    vae = SeqVAE(encoder_layers, decoder_layers, latent_dim)
    vae.compile(input_dim, output_dim=output_dim_for_train, optimizer=Adam(lr=0.001, epsilon=1e-9))
Ejemplo n.º 7
0
    batch_size = 128
    epochs = 64
    latent_dim = 200

    mfile = './SavedModel/VAE/WaveVAE.h5'
    model_saver = ModelCheckpoint(mfile,
                                  save_best_only=True,
                                  save_weights_only=True)
    # tensorboard --host 127.0.0.1 --logdir=D:\Projects\AI\Summary\Wavenet\

    from DataCookers.VAEdataset import VaeGen

    gen = VaeGen(0.8,
                 2048,
                 1,
                 symbol,
                 datetime(2019, 5, 26),
                 num_samples=92160,
                 timeframe=MetaTrader5.MT5_TIMEFRAME_M15)

    dataset = np.stack([
        gen.dataset[2048 * i:2048 * (i + 1)]
        for i in range(int(len(gen.dataset) / 2048))
    ])
    print(dataset.shape)

    VAE = WaveVAE(latent_dim, 0.0001)
    VAE.compile(gen.input_dim, Adam(clipvalue=1))
    # VAE.VAE.fit(gen.trainX, gen.trainX, batch_size=batch_size, epochs=epochs, validation_data=(gen.validX, gen.validX),
    #             callbacks=[model_saver], verbose=1)
    VAE.VAE.load_weights(mfile)
output_length = 8
feature_dim = 3
# input_vocab_size = tokenizer_pt.vocab_size + 2
# target_vocab_size = tokenizer_en.vocab_size + 2
dropout_rate = 0.1
learning_rate = CustomSchedule(d_model)
optimizer = tf.keras.optimizers.Adam(learning_rate, beta_1=0.9, beta_2=0.98,
                                     epsilon=1e-9)
loss_object = tf.keras.losses.Huber()
#################################################

FXTM_gen = VaeGen(train_ratio, seq_length, output_length, symbol, ohlc, date, n_ema=ema, osc_list=oscillator,
                  ema_list=ema_list,
                  test_output_count=predict_count + 16,
                  window_step=4,
                  num_samples=n_sample,
                  diff=diff,
                  logtransform=False,
                  preprocess=preprocess,
                  timeframe=timeframe)

train = FXTM_gen.get_train_data()
valid = FXTM_gen.get_val_data()

train = np.squeeze(train)
valid = np.squeeze(valid)

train = np.cumsum(train, axis=1)
valid = np.cumsum(valid, axis=1)

# for i in range(len(train)):
Ejemplo n.º 9
0
                          write_graph=True,
                          histogram_freq=1,
                          write_images=True)
model_saver = ModelCheckpoint(mfile,
                              save_best_only=True,
                              save_weights_only=True)

# tensorboard --host 127.0.0.1 --logdir=D:\Projects\AI\Summary\CondWavenet\

gen = VaeGen(train_ratio,
             seq_length,
             output_count,
             symbol,
             ohlc,
             date,
             n_ema=ema,
             ema_list=ema_list,
             osc_list=oscillator,
             test_output_count=predict_count,
             window_step=output_count,
             num_samples=n_sample,
             diff=diff,
             timeframe=timeframe)

trainX, trainY = gen.get_train_data()
validX, validY = gen.get_val_data()

print(validY[..., 0:1, 0:1].shape)

# test = WaveNetMK0(n_filter, n_fc, n_layer)
test = CondWaveNet(n_filter,
                   n_pp,
Ejemplo n.º 10
0
    n_samples = 92160
    diff = False
    timeframe = MetaTrader5.MT5_TIMEFRAME_D1
    layers = [50]
    n_bins = 64
    batch_size = 512
    epochs = 100

    mfile = './SavedModel/RNN/RWA.h5'
    tfile = './Summary/RNN/RWA/'
    model_saver = ModelCheckpoint(mfile, save_best_only=True, save_weights_only=True)
    tensorboard = TensorBoard(log_dir=tfile, write_graph=True, histogram_freq=1)
    gen = VaeGen(train_ratio, seq_length, output_count, symbols, ohlc_list, datetime(2019, 6, 18), n_ema=ema,
                 osc_list=oscillator,
                 test_output_count=pred_length,
                 window_step=1,
                 num_samples=n_samples,
                 diff=diff,
                 timeframe=timeframe)

    trainX, trainY = gen.get_train_data()
    validX, validY = gen.get_val_data()
    input_dim = gen.input_dim
    output_dim = gen.output_dim

    # test = keras.models.Sequential()
    # test.add(Input(shape=(None, input_dim[1])))
    # test.add(RecurrentWeightedAverage(layers[0]))
    # test.add(Dense(output_dim[1]))
    # test.add(Reshape(target_shape=(1, output_dim[1])))
    # test.compile(optimizer='adam', loss='mae')