コード例 #1
0
import tensorflow as tf
import numpy as np
import datetime
import os.path
np.random.seed(4)
tf.random.set_seed(4)
from util import csv_to_dataset, history_points

# dataset

#File that will be used
#csv_path = "ETHUSDT-1d-data.csv"
csv_path = "ETHBTC-1h-data.csv"

#ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset('ETHUSDT-1h-data.csv')
ohlcv_histories_train, ohlcv_histories_test, unscaled_y_test, y_train, y_test, y_normaliser, tech_ind_train, tech_ind_test = csv_to_dataset(
    csv_path)

# model architecture
bs = 2048
e = 5000

# define two sets of inputs
lstm_input = tf.keras.layers.Input(shape=(history_points, 5),
                                   name='lstm_input')
dense_input = tf.keras.layers.Input(shape=(tech_ind_train.shape[1], ),
                                    name='tech_input')

# the first branch operates on the first input
x = tf.keras.layers.LSTM(history_points, name='lstm_0')(lstm_input)
x = tf.keras.layers.Dropout(0.2, name='lstm_dropout_0')(x)
lstm_branch = tf.keras.models.Model(inputs=lstm_input, outputs=x)
コード例 #2
0
def main():
    """TODO:

    """
    params = Parameters()
    model = tf.keras.models.load_model(get_best_model_path(params), compile=True)

    ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = \
        csv_to_dataset(params.CSV_DATA_PATH, params.num_history_points)

    n = int(ohlcv_histories.shape[0] * params.train_split)
    ohlcv_test = ohlcv_histories[n:]
    tech_ind_test = technical_indicators[n:]
    y_test = next_day_open_values[n:]

    unscaled_y_test = unscaled_y[n:]

    y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
    y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)

    buys = []
    sells = []
    # thresh = 0.7
    thresh = 0.1

    start = 0
    end = -1
    x = -1

    holding_stocks = False
    ratio_treshold = 0.005

    def compute_earnings(buys_, sells_):

        # purchase amount
        purchase_amt = 10
        stock = 0
        balance = 0
        while len(buys_) > 0 and len(sells_) > 0:
            if buys_[0][0] < sells_[0][0]:
                # time to buy $10 worth of stock
                balance -= purchase_amt
                stock += purchase_amt / buys_[0][1]
                buys_.pop(0)
            else:
                # time to sell all of our stock
                balance += stock * sells_[0][1]
                stock = 0
                sells_.pop(0)
        print(f"earnings: ${balance}")

    for ohlcv, ind in zip(ohlcv_test[start: end], tech_ind_test[start: end]):
        normalised_price_today = ohlcv[-1][0]
        normalised_price_today = np.array([[normalised_price_today]])
        price_today = y_normaliser.inverse_transform(normalised_price_today)
        predicted_price_tomorrow = np.squeeze(y_normaliser.inverse_transform(model.predict([[ohlcv], [ind]])))

    ###### HIS CODE ########

        # delta = predicted_price_tomorrow - price_today
        # price_today = price_today[0][0]
        #
        # ratio = predicted_price_tomorrow / price_today
        # ratio_threshold = 0.02
        #
        # if ratio > 1 + ratio_threshold:
        #     buys.append((x, price_today))
        # elif ratio < 1 - ratio_threshold:
        #     sells.append((x, price_today))
        #
        # if delta > thresh and ratio > 1 + ratio_threshold:
        #     buys.append((x, price_today))
        # elif delta < -thresh and ratio < 1 - ratio_threshold:
        #     sells.append((x, price_today))

    ##### START STEFAN ######
        abs_price_today = price_today[0][0]

        if holding_stocks:
            price = buys[-1][1]
        else:
            price = abs_price_today

        # Absolute diff of price
        ratio = predicted_price_tomorrow / price

        if abs_price_today / price >= 1.3 and holding_stocks:
            sells.append((x, abs_price_today))
            holding_stocks = False

        elif ratio >= ratio_treshold and not holding_stocks:
            buys.append((x, abs_price_today))
            holding_stocks = True
            print("BUY: price today", abs_price_today, " predicted price tomorrow",
                  predicted_price_tomorrow)

        elif ratio < 1 - ratio_treshold and holding_stocks:
            sells.append((x, abs_price_today))
            holding_stocks = False

    ###################################################
        x += 1
    print(f"buys: {len(buys)}")
    print(f"sells: {len(sells)}")

    # we create new lists so we don't modify the original
    compute_earnings([b for b in buys], [s for s in sells])

    import matplotlib.pyplot as plt

    plt.gcf().set_size_inches(22, 15, forward=True)

    real = plt.plot(unscaled_y_test[start:end], label='real')
    pred = plt.plot(y_test_predicted[start:end], label='predicted')

    if len(buys) > 0:
        plt.scatter(list(list(zip(*buys))[0]), list(list(zip(*buys))[1]), c='#00ff00', s=50)
    if len(sells) > 0:
        plt.scatter(list(list(zip(*sells))[0]), list(list(zip(*sells))[1]), c='#ff0000', s=50)

    # TRAIN TEST
    # ohlcv_train = ohlcv_histories[:n]
    # tech_ind_train = technical_indicators[:n]
    # y_train = next_day_open_values[:n]

    # real = plt.plot(unscaled_y[start:end], label='real')
    # pred = plt.plot(y_predicted[start:end], label='predicted')

    plt.legend(['Real', 'Predicted', 'Buy', 'Sell'])
    plt.show()
コード例 #3
0
import keras
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
import numpy as np

np.random.seed(4)
import tensorflow as tf

tf.random.set_seed(4)
from util import csv_to_dataset, history_points
import matplotlib.pyplot as plt

if __name__ == '__main__':
    data_histories, _, next_day, unscaled_y, y_normaliser = csv_to_dataset(
        'MSFT_daily.csv')

    test_split = 0.9
    n = int(data_histories.shape[0] * test_split)
    data_train = data_histories[:n]
    y_train = next_day[:n]
    data_test = data_histories[n:]
    y_test = next_day[n:]
    unscaled_y_test = unscaled_y[n:]

    print(data_train.shape)
    print(data_test.shape)

    # model code
    lstm_input = Input(shape=(history_points, 5), name='input')
    x = LSTM(50, name='lstm_layer')(lstm_input)
コード例 #4
0
def main():
    """

    :return:
    """

    params = Parameters()
    if not params.SAVE_MODEL_PATH.exists():
        params.SAVE_MODEL_PATH.mkdir()

    print("Write params in JSON")
    json_string = Parameters().to_json()
    params_path = params.SAVE_MODEL_PATH / "params.json"
    with open(params_path.as_posix(), "w") as f:
        f.write(json_string)

    ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
        params.CSV_DATA_PATH, params.num_history_points)

    print("----", ohlcv_histories.shape, technical_indicators.shape,
          next_day_open_values.shape)

    n = int(ohlcv_histories.shape[0] * params.train_split)

    ohlcv_train = ohlcv_histories[:n]
    tech_ind_train = technical_indicators[:n]
    y_true_train = next_day_open_values[:n]

    ohlcv_test = ohlcv_histories[n:]
    tech_ind_test = technical_indicators[n:]
    # y_true_test = next_day_open_values[n:]
    unscaled_y_test = unscaled_y[n:]

    print("train_val split", ohlcv_train.shape)
    print("test split", ohlcv_test.shape)

    callbacks: List[tf.keras.callbacks.Callback] = [
        tf.keras.callbacks.TensorBoard(params.SAVE_MODEL_PATH.as_posix()),
        tf.keras.callbacks.ModelCheckpoint(
            get_best_model_path(params),
            monitor='val_loss',
            verbose=1,
            save_best_only=True,
        )
    ]

    # model = tech_net(technical_indicators.shape[1:], params)
    #
    # adam = optimizers.Adam(lr=params.LR)
    # model.compile(optimizer=adam, loss='mse')
    #
    # model.fit(x=[ohlcv_train, tech_ind_train], y=y_true_train, batch_size=params.BATCH_SIZE, epochs=params.EPOCHS,
    #           shuffle=True, validation_split=params.val_split_out_of_train,
    #           callbacks=callbacks)

    evaluate(params, ohlcv_test, tech_ind_test, unscaled_y_test, y_normaliser)
コード例 #5
0
ファイル: basic_model.py プロジェクト: imthaghost/spyyder
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
from util import csv_to_dataset, history_points
import numpy as np
import tensorflow
# np.random.seed(4)
# from tensorflow import set_random_seed
# set_random_seed(4)
# np.random.seed(4)
# set_random_seed(4)
tensorflow.random.set_seed(4)

# dataset testing and trainig

ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    'MSFT_intraday.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)
import keras
import tensorflow as tf
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation, concatenate
from keras import optimizers
import numpy as np
np.random.seed(16)
import tensorflow as tf
tf.random.set_seed(16)
from util import csv_to_dataset, history_points
import matplotlib.pyplot as plt

# dataset
if __name__ == '__main__':

    data_histories, moving_av, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
        'MSFT_daily.csv')
    test_split = 0.9
    n = int(data_histories.shape[0] * test_split)
    data_train = data_histories[:n]
    moving_av_train = moving_av[:n]
    y_train = next_day_open_values[:n]
    data_test = data_histories[n:]
    moving_av_test = moving_av[n:]
    y_test = next_day_open_values[n:]
    unscaled_y_test = unscaled_y[n:]

    #print(data_train.shape)
    #print(data_test.shape)

    # model code
    lstm_input = Input(shape=(history_points, 5), name='lstm_input')
コード例 #7
0
ファイル: predict.py プロジェクト: Bourn23/AIDATrader
def get_predict(model_path='technical_model.h5',
                symbol='GOOGL',
                update='daily'):
    stock = './data/' + symbol.upper() + "_" + update.lower() + ".csv"
    print(stock)
    print(model_path)
    model = load_model(model_path)

    ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
        stock)

    test_split = 0.9
    n = int(ohlcv_histories.shape[0] * test_split)

    ohlcv_train = ohlcv_histories[:n]
    tech_ind_train = technical_indicators[:n]
    y_train = next_day_open_values[:n]

    ohlcv_test = ohlcv_histories[n:]
    tech_ind_test = technical_indicators[n:]
    y_test = next_day_open_values[n:]

    unscaled_y_test = unscaled_y[n:]

    y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
    y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)

    buys = []
    sells = []
    thresh = 0.1

    start = 0
    end = -1

    x = -1
    for ohlcv, ind in zip(ohlcv_test[start:end], tech_ind_test[start:end]):
        normalised_price_today = ohlcv[-1][0]
        normalised_price_today = np.array([[normalised_price_today]])
        price_today = y_normaliser.inverse_transform(normalised_price_today)
        predicted_price_tomorrow = np.squeeze(
            y_normaliser.inverse_transform(model.predict([[ohlcv], [ind]])))
        delta = predicted_price_tomorrow - price_today
        if delta > thresh:
            buys.append((x, price_today[0][0]))
        elif delta < -thresh:
            sells.append((x, price_today[0][0]))
        x += 1
    print(f"buys: {len(buys)}")
    print(f"sells: {len(sells)}")

    def compute_earnings(buys_, sells_):
        purchase_amt = 10
        stock = 0
        balance = 0
        while len(buys_) > 0 and len(sells_) > 0:
            if buys_[0][0] < sells_[0][0]:
                # time to buy $10 worth of stock
                balance -= purchase_amt
                stock += purchase_amt / buys_[0][1]
                buys_.pop(0)
            else:
                # time to sell all of our stock
                balance += stock * sells_[0][1]
                stock = 0
                sells_.pop(0)
        print(f"earnings: ${balance}")

    # we create new lists so we dont modify the original
    compute_earnings([b for b in buys], [s for s in sells])

    import matplotlib.pyplot as plt

    plt.gcf().set_size_inches(22, 15, forward=True)

    real = plt.plot(unscaled_y_test[start:end], label='real')
    pred = plt.plot(y_test_predicted[start:end], label='predicted')

    if len(buys) > 0:
        plt.scatter(list(list(zip(*buys))[0]),
                    list(list(zip(*buys))[1]),
                    c='#00ff00',
                    s=50)
    if len(sells) > 0:
        plt.scatter(list(list(zip(*sells))[0]),
                    list(list(zip(*sells))[1]),
                    c='#ff0000',
                    s=50)

    # real = plt.plot(unscaled_y[start:end], label='real')
    # pred = plt.plot(y_predicted[start:end], label='predicted')

    plt.legend(['Real', 'Predicted', 'Buy', 'Sell'])

    plt.show()
コード例 #8
0
import tensorflow as tf
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation, concatenate
from keras import optimizers
import numpy as np

np.random.seed(4)
from tensorflow import set_random_seed

set_random_seed(4)
from util import csv_to_dataset

history_points = 50
# dataset

ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    'data_daily/AAPL_daily.csv', history_points, 0, False)

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
tech_ind_train = technical_indicators[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
tech_ind_test = technical_indicators[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
コード例 #9
0
def train_model(options):
    info = get_info_from_options(options)
    file_name = info['file_name']
    moving_average = info['moving_average']
    epochs = info['epochs']
    save_name = info['model_name']
    model_base_name = info['model_base_name']

    print(f'{model_base_name}')

    if os.path.isfile(save_name) == True:
        print(f'{model_base_name} trained')
        return
    try:
        ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
            file_name)
    except ():
        print('data parse false')
    test_split = 0.9
    n = int(ohlcv_histories.shape[0] * test_split)
    ohlcv_train = ohlcv_histories[:n]
    tech_ind_train = technical_indicators[:n]
    y_train = next_day_open_values[:n]
    ohlcv_test = ohlcv_histories[n:]
    tech_ind_test = technical_indicators[n:]
    y_test = next_day_open_values[n:]
    unscaled_y_test = unscaled_y[n:]
    # model architecture
    # define two sets of inputs
    lstm_input = Input(shape=(history_points, 5), name='lstm_input')

    dense_input = Input(shape=(technical_indicators.shape[1], ),
                        name='tech_input')
    # the first branch operates on the first input
    x = LSTM(moving_average, name='lstm_0')(lstm_input)
    x = Dropout(0.2, name='lstm_dropout_0')(x)
    lstm_branch = Model(inputs=lstm_input, outputs=x)

    # the second branch opreates on the second input
    y = Dense(20, name='tech_dense_0')(dense_input)
    y = Activation("relu", name='tech_relu_0')(y)
    y = Dropout(0.2, name='tech_dropout_0')(y)
    technical_indicators_branch = Model(inputs=dense_input, outputs=y)

    # combine the output of the two branches
    combined = concatenate(
        [lstm_branch.output, technical_indicators_branch.output],
        name='concatenate')

    z = Dense(64, activation="sigmoid", name='dense_pooling')(combined)
    z = Dense(1, activation="linear", name='dense_out')(z)

    # our model will accept the inputs of the two branches and
    # then output a single value
    model = Model(
        inputs=[lstm_branch.input, technical_indicators_branch.input],
        outputs=z)
    adam = optimizers.Adam(lr=0.0005)
    model.compile(optimizer=adam, loss='mse')
    model.fit(x=[ohlcv_train, tech_ind_train],
              y=y_train,
              batch_size=64,
              epochs=epochs,
              shuffle=True,
              validation_split=0.1)
    # evaluation
    y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
    # print('y_test_predicted.shape', y_test_predicted.shape)
    y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)
    y_predicted = model.predict([ohlcv_histories, technical_indicators])
    y_predicted = y_normaliser.inverse_transform(y_predicted)
    assert unscaled_y_test.shape == y_test_predicted.shape
    real_mse = np.mean(np.square(unscaled_y_test - y_test_predicted))
    scaled_mse = real_mse / (np.max(unscaled_y_test) -
                             np.min(unscaled_y_test)) * 100
    # print(scaled_mse)
    plt.gcf().set_size_inches(22, 15, forward=True)
    start = 0
    end = -1

    real = plt.plot(unscaled_y_test[start:end], label='real')
    pred = plt.plot(y_test_predicted[start:end], label='predicted')

    plt.legend(['Real', 'Predicted'])
    # plt.show()
    model.save(save_name)
コード例 #10
0
import numpy as np
from tensorflow import keras
from util import csv_to_dataset, history_points

from binance.client import Client
from binance.enums import *

model = keras.models.load_model('./save_model')

ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset('BNBUSDT_1d_2021-01-01 00-00-00_2021-04-15 00-00-00.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
tech_ind_train = technical_indicators[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
tech_ind_test = technical_indicators[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)

buys = []
sells = []
thresh = 0.1
コード例 #11
0
def predict_model(options):
    info = get_info_from_options(options)
    file_name = info['file_name']
    modal_name = info['model_name']
    start = info['start']
    end = info['end']
    model_base_name = info['model_base_name']
    predict_delay_session_list = info['predict_delay_session_list']
    check_result_already_exist_value = check_result_already_exist(options)
    if check_result_already_exist_value == True:
        print(f'{model_base_name} predicted')
        return False
    model = load_model(modal_name)
    ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
        file_name)
    test_split = 0.9
    n = int(ohlcv_histories.shape[0] * test_split)

    ohlcv_train = ohlcv_histories[:n]
    tech_ind_train = technical_indicators[:n]
    y_train = next_day_open_values[:n]

    ohlcv_test = ohlcv_histories[n:]
    tech_ind_test = technical_indicators[n:]
    y_test = next_day_open_values[n:]

    unscaled_y_test = unscaled_y[n:]
    y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
    y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)
    buys = []
    sells = []
    thresh = 0.1

    # start = 0
    # end = -1
    x = 0

    todayPriceList = []
    predictedPriceList = []
    length = len(ohlcv_test[start:end])
    for ohlcv, ind in zip(ohlcv_test[start:end], tech_ind_test[start:end]):
        normalised_price_today = ohlcv[-1][0]
        normalised_price_today = np.array([[normalised_price_today]])
        price_today = y_normaliser.inverse_transform(normalised_price_today)
        ohlcv_predict = np.array(ohlcv, ndmin=3)  # fix dimension
        ind_predict = np.array(ind, ndmin=2)  # fix dimension
        predicted_price_tomorrow = np.squeeze(
            y_normaliser.inverse_transform(
                model.predict([ohlcv_predict, ind_predict])))
        todayPriceList.append(price_today[0][0])
        predictedPriceList.append(float(predicted_price_tomorrow))
        delta = predicted_price_tomorrow - price_today
        if delta > thresh:
            buys.append((x, price_today[0][0]))
        elif delta < -thresh:
            sells.append((x, price_today[0][0]))
        x += 1
        # print(x, '/', length)

    # print(f"buys: {len(buys)}")
    # print(f"sells: {len(sells)}")

    # def compute_earnings(buys_, sells_):
    #     purchase_amt = 10
    #     stock = 0
    #     balance = 0
    #     while len(buys_) > 0 and len(sells_) > 0:
    #         if buys_[0][0] < sells_[0][0]:
    #             # time to buy $10 worth of stock
    #             balance -= purchase_amt
    #             stock += purchase_amt / buys_[0][1]
    #             buys_.pop(0)
    #         else:
    #             # time to sell all of our stock
    #             balance += stock * sells_[0][1]
    #             stock = 0
    #             sells_.pop(0)
    #     print(f"earnings: ${balance}")

    # we create new lists so we dont modify the original
    # compute_earnings([b for b in buys], [s for s in sells])

    # plt.gcf().set_size_inches(15, 10, forward=True)
    # real = plt.plot(todayPriceList, label='real')
    # pred = plt.plot(predictedPriceList, label='predicted')

    # if len(buys) > 0:
    #     plt.scatter(list(list(zip(*buys))[0]),
    #                 list(list(zip(*buys))[1]), c='#00ff00', s=30)
    # if len(sells) > 0:
    #     plt.scatter(list(list(zip(*sells))[0]),
    #                 list(list(zip(*sells))[1]), c='#ff0000', s=30)

    # caculate buy sell with difference predicted price
    buyWithDifference = []
    sellWithDifference = []
    buy_sell_difference = []

    rootPredictedPriceList = predictedPriceList[0]
    for index, predictedValue in enumerate(predictedPriceList):
        if index > 0:
            if predictedValue > predictedPriceList[index - 1]:
                buyWithDifference.append(
                    (index - 1, predictedPriceList[index - 1]))
                buy_sell_difference.append('buy')
            else:
                sellWithDifference.append(
                    (index - 1, predictedPriceList[index - 1]))
                buy_sell_difference.append('sell')

    # plt.scatter(list(list(zip(*buyWithDifference))[0]),
    #             list(list(zip(*buyWithDifference))[1]), c='#00ff00', s=10)
    # plt.scatter(list(list(zip(*sellWithDifference))[0]),
    #             list(list(zip(*sellWithDifference))[1]), c='#ff0000', s=10)

    # plt.legend(['Real', 'Predicted', 'Buy', 'Sell',
    #             'buyWithDifference', 'sellWithDifference'])

    # caculate accuracy
    # print('buyWithDifference', len(buyWithDifference))
    # print('sellWithDifference', len(sellWithDifference))
    # print('todayPriceList', len(todayPriceList))

    def caculate_accuracy_buy_sell_with_delay_session(deal_type,
                                                      delay_session):
        total_caculate_deal = 0
        true_deal = 0
        false_deal = 0
        if (deal_type == 'buy'):
            list_data = buyWithDifference
        else:
            list_data = sellWithDifference
        # remove last item because list predict from 0 to length - 1
        length_real_data = len(todayPriceList) - 1
        for index, deal_predicted_price in list_data:
            # print(index, deal_predicted_price)
            index_has_delay_session = index + delay_session
            if index_has_delay_session <= length_real_data:
                total_caculate_deal += 1
                deal_real_price = todayPriceList[index]
                delay_session_price = todayPriceList[index_has_delay_session]
                # print('dealRealPrice', deal_real_price,
                #       'delay_sessionPrice', delay_session_price)
                if (deal_type == 'buy'):
                    if (delay_session_price > deal_real_price):
                        true_deal += 1
                    else:
                        false_deal += 1
                else:
                    if (delay_session_price < deal_real_price):
                        true_deal += 1
                    else:
                        false_deal += 1
        if total_caculate_deal > 0:
            print(deal_type, ': total deal: ', len(list_data),
                  ', total caculate deal: ', total_caculate_deal, ', true: ',
                  true_deal, ', false: ', false_deal, ' accuracy: ',
                  round(true_deal / total_caculate_deal * 100, 1), '%')
            return [
                deal_type,
                len(list_data), total_caculate_deal, true_deal, false_deal,
                round(true_deal / total_caculate_deal * 100, 1)
            ]
            # return [deal_type, len(list_data), total_caculate_deal, true_deal, false_deal,'no_signed_deal']
        else:
            print(deal_type, ': total deal: ', len(list_data),
                  ', total caculate deal: ', total_caculate_deal, ', true: ',
                  true_deal, ', false: ', false_deal)
            return [
                deal_type,
                len(list_data), total_caculate_deal, true_deal, false_deal,
                'no_signed_deal'
            ]

    def caculate_accuracy_predicted_with_delay_session(delay_session):
        print('delay_session: ', delay_session)
        buy_result = caculate_accuracy_buy_sell_with_delay_session(
            'buy', delay_session)
        # sell_result = caculate_accuracy_buy_sell_with_delay_session('sell', delay_session)
        # print('total: total deal: ', buy_result[1]+sell_result[1], ', total caculate deal: ',
        #     buy_result[2] + sell_result[2], ', true: ', buy_result[3] +
        #     sell_result[3], ', false: ', buy_result[4] +
        #     sell_result[4], ' accuracy: ',
        #     ((buy_result[3] +
        #         sell_result[3])/(buy_result[2] + sell_result[2])*100), '%')

        return buy_result

    buy_result_list = {}
    for delay_session in predict_delay_session_list:
        buy_result = caculate_accuracy_predicted_with_delay_session(
            delay_session)
        buy_result_list[delay_session] = buy_result

    return {
        'options': options,
        'buy_result_list': buy_result_list,
        'predicted_price_list': predictedPriceList,
        'buy_sell_difference': buy_sell_difference
    }
コード例 #12
0
np.random.seed(4)
import tensorflow
tensorflow.random.set_seed(4)
from util import csv_to_dataset, history_points

import argparse

parser = argparse.ArgumentParser(description='File to process')
parser.add_argument('dataFile', help='Data source file path')
parser.add_argument('saveToFile', help='Where to save the built model')

args = parser.parse_args()

# dataset

ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(args.dataFile)

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)
コード例 #13
0
def model_training(filepath, model, history_points=50,  offset=0):
    ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(filepath, history_points,
                                                                                        offset=offset)

    test_split = 0.9
    n = int(ohlcv_histories.shape[0] * test_split)

    ohlcv_train = ohlcv_histories[:n]
    y_train = next_day_open_values[:n]

    ohlcv_test = ohlcv_histories[n:]
    y_test = next_day_open_values[n:]

    unscaled_y_test = unscaled_y[n:]

    # model architecture

    model = model(history_points)

    adam = optimizers.Adam(lr=0.0005)
    model.compile(optimizer=adam, loss='mse')
    trained_model = model.fit(x=ohlcv_train, y=y_train, batch_size=32, epochs=100, shuffle=True, validation_split=0.1)

    # evaluation

    y_test_predicted = model.predict(ohlcv_test)
    y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)
    y_predicted = model.predict(ohlcv_histories)
    y_predicted = y_normaliser.inverse_transform(y_predicted)

    assert unscaled_y_test.shape == y_test_predicted.shape
    real_mse = np.mean(np.square(unscaled_y_test - y_test_predicted))
    scaled_mse = real_mse / (np.max(unscaled_y_test) - np.min(unscaled_y_test)) * 100
    print(scaled_mse)

    import matplotlib.pyplot as plt

    plt.gcf().set_size_inches(22, 15, forward=True)

    start = 0
    end = -1

    real = plt.plot(unscaled_y_test[start:end], label='real')
    pred = plt.plot(y_test_predicted[start:end], label='predicted')

    # real = plt.plot(unscaled_y[start:end], label='real')
    # pred = plt.plot(y_predicted[start:end], label='predicted')

    plt.title(filepath)

    plt.legend(['Real', 'Predicted'])

    plt.show()

    from datetime import datetime
    model.save(f'basic_model.h5')

    history = trained_model.history

    plt.title(filepath)

    plt.plot(history['loss'], label="Loss")
    plt.plot(history['val_loss'], label="Val Loss")

    plt.plot([0, len(history['val_loss'])], [history['val_loss'][-1], history['val_loss'][-1]], ls='-')

    plt.show()

    return mse(unscaled_y_test, y_test_predicted)
コード例 #14
0
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
import numpy as np

np.random.seed(4)
from tensorflow import set_random_seed

set_random_seed(4)
from util import csv_to_dataset
from params import num_history_points

# dataset

CSV_DATA_PATH: str = "/media/dorel/DATA/work/stock-trading-ml/data/msft_daily.csv"

ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    CSV_DATA_PATH, )

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)
コード例 #15
0
import keras
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
import numpy as np
np.random.seed(4)
import tensorflow as tf
tf.random.set_seed(4)
from util import csv_to_dataset, history_points

from binance.client import Client
from binance.enums import *

# dataset

ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    csv_path='BNBUSDT_1d_2021-01-01 00-00-00_2021-04-15 00-00-00.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)
コード例 #16
0
import numpy as np
from tensorflow import keras
from util import csv_to_dataset, history_points

from binance.client import Client
from binance.enums import *

model = keras.models.load_model('./save_model')

ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(symbol="BNBUSDT", interval=Client.KLINE_INTERVAL_1MINUTE, start_time=1618419600000)

test_split = 0.4
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
tech_ind_train = technical_indicators[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
tech_ind_test = technical_indicators[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)

start = 0
end = -1

import matplotlib.pyplot as plt
コード例 #17
0
import keras
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
import numpy as np
np.random.seed(4)
import tensorflow
tensorflow.random.set_seed(4)
from util import csv_to_dataset, history_points


# dataset

ohlcv_histories, _, next_day_open_values, unscaled_y, next_day_open_higher, y_normaliser = csv_to_dataset('final/TX_daily.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)


# model architecture
コード例 #18
0
import numpy as np
from keras.models import load_model
from util import csv_to_dataset, history_points

model = load_model('technical_model.h5')

ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    'MSFT_daily.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
tech_ind_train = technical_indicators[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
tech_ind_test = technical_indicators[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

y_test_predicted = model.predict([ohlcv_test, tech_ind_test])
y_test_predicted = y_normaliser.inverse_transform(y_test_predicted)

buys = []
sells = []
thresh = 0.1

start = 0
end = -1
コード例 #19
0
ファイル: train.py プロジェクト: akildemir/AlgoTrade
    def train(filePath, symbol):
        # dataset
        ohlcv_histories, technical_indicators, next_day_open_values, technical_normalizer, y_normaliser, data_normaliser = csv_to_dataset(
            filePath, 3)

        # model architecture
        # define two sets of inputs
        lstm_input = Input(shape=(history_points, 5), name='lstm_input')
        dense_input = Input(shape=(technical_indicators.shape[1], ),
                            name='tech_input')

        # the first branch operates on the first input
        x = LSTM(50, name='lstm_0')(lstm_input)
        x = Dropout(0.2, name='lstm_dropout_0')(x)
        lstm_branch = Model(inputs=lstm_input, outputs=x)

        # the second branch opreates on the second input
        y = Dense(20, name='tech_dense_0')(dense_input)
        y = Activation("relu", name='tech_relu_0')(y)
        y = Dropout(0.2, name='tech_dropout_0')(y)
        technical_indicators_branch = Model(inputs=dense_input, outputs=y)

        # combine the output of the two branches
        combined = concatenate(
            [lstm_branch.output, technical_indicators_branch.output],
            name='concatenate')

        z = Dense(64, activation="sigmoid", name='dense_pooling')(combined)
        z = Dense(1, activation="linear", name='dense_out')(z)

        # our model will accept the inputs of the two branches and
        # then output a single value
        model = Model(
            inputs=[lstm_branch.input, technical_indicators_branch.input],
            outputs=z)
        adam = optimizers.Adam(lr=0.0005)
        model.compile(optimizer=adam, loss='mse')
        model.fit(x=[ohlcv_histories, technical_indicators],
                  y=next_day_open_values,
                  batch_size=32,
                  epochs=50,
                  shuffle=True,
                  validation_split=0.1)
        return model, technical_normalizer, y_normaliser, data_normaliser
コード例 #20
0
import keras
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation
from keras import optimizers
import numpy as np
np.random.seed(4)
import tensorflow as tf
tf.random.set_seed(4)
from util import csv_to_dataset, history_points

# dataset

ohlcv_histories, _, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    'stock_data.csv')

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)
print(ohlcv_test.shape)

# model architecture
コード例 #21
0
import keras
import tensorflow as tf
from keras.models import Model
from keras.layers import Dense, Dropout, LSTM, Input, Activation, concatenate
from keras import optimizers
import numpy as np
np.random.seed(4)
import tensorflow
tensorflow.random.set_seed(4)
import sys
from util import csv_to_dataset, history_points

# dataset

ohlcv_histories, technical_indicators, next_day_open_values, unscaled_y, y_normaliser = csv_to_dataset(
    sys.argv[1] + "_" + sys.argv[2] + ".csv")

test_split = 0.9
n = int(ohlcv_histories.shape[0] * test_split)

ohlcv_train = ohlcv_histories[:n]
tech_ind_train = technical_indicators[:n]
y_train = next_day_open_values[:n]

ohlcv_test = ohlcv_histories[n:]
tech_ind_test = technical_indicators[n:]
y_test = next_day_open_values[n:]

unscaled_y_test = unscaled_y[n:]

print(ohlcv_train.shape)