コード例 #1
0
ファイル: TomorrowPredict.py プロジェクト: ccliuyang/My_Quant
def predict_tomorrow(stk_code,
                     label,
                     N_STEPS=N_STEPS,
                     feature_cols=feature_cols,
                     HIDDEN_SIZE=HIDDEN_SIZE,
                     NUM_LAYERS=NUM_LAYERS):
    """

    :param stk_code:    例子 '300183'
    :param label:       例子 'high'
    :param N_STEPS:
    :param feature_cols:
    :param HIDDEN_SIZE:
    :param NUM_LAYERS:
    :return:
    """
    """ ---------------------- 读取json中存储的极值 ---------------------- """
    with open(rootPath + '\LSTM\AboutLSTM\stk_max_min.json', 'r') as f:
        max_min_info = json.load(f)
    """ ---------------------- 获取实时数据 ---------------------- """
    data_now = ts.get_k_data(stk_code)[-(N_STEPS + 30):]

    # 增加M9 Rank
    data_now['m9'] = data_now['close'].rolling(window=9).mean()
    data_now['diff_m9'] = data_now.apply(lambda x:
                                         (x['close'] - x['m9']) / x['close'],
                                         axis=1)
    data_now['rank'] = data_now.apply(lambda x: relativeRank(
        max_min_info[stk_code]['m9_history'], x['diff_m9']),
                                      axis=1)

    # rootPath = 'C:/Users\paul\Desktop\软件代码\Git-Clone'

    for c in ['close', 'high', 'low', 'open']:
        data_now[c] = (data_now[c].values - max_min_info[stk_code]['p_min']
                       ) / (max_min_info[stk_code]['p_max'] -
                            max_min_info[stk_code]['p_min'])

    data_now['volume'] = (
        data_now['volume'].values - max_min_info[stk_code]['v_min']) / (
            max_min_info[stk_code]['v_max'] - max_min_info[stk_code]['v_min'])

    # 进行归一化
    input_normal = data_now.loc[:, feature_cols].tail(20).values

    tf.reset_default_graph()
    """ ---------------------- 创建模型 ---------------------- """
    predictions, loss, train_op, X, y = lstm_model(n_steps=N_STEPS,
                                                   n_inputs=len(feature_cols),
                                                   HIDDEN_SIZE=HIDDEN_SIZE,
                                                   NUM_LAYERS=NUM_LAYERS)

    # 创建保存器用于模型
    saver = tf.train.Saver()

    # 初始化
    sess = tf.Session()
    model_name = stk_code + '_' + label
    model_dir = rootPath + '\LSTM\AboutLSTM\modelDir/'

    if os.path.exists(model_dir + model_name + '/' + model_name +
                      '.ckpt.meta'):

        saver = tf.train.import_meta_graph(model_dir + model_name + '/' +
                                           model_name + '.ckpt.meta')
        saver.restore(sess,
                      tf.train.latest_checkpoint(model_dir + model_name + '/'))

        # graph = tf.get_default_graph()
        # 防报错
        tf.reset_default_graph()

        r_rela = sess.run([predictions], feed_dict={X:
                                                    [input_normal]})[0][0][0]

        return max_min_info[stk_code]['p_min'] + (
            max_min_info[stk_code]['p_max'] -
            max_min_info[stk_code]['p_min']) * r_rela

    else:
        print('加载模型' + model_name + '失败!')
        return -1
コード例 #2
0
ファイル: LSTM_Book.py プロジェクト: hua0129/MoDeng
def train(data_train, times, model_dir, model_name, N_STEPS, feature_cols,
          HIDDEN_SIZE, NUM_LAYERS):
    """

    :param times:               训练次数
    :param model_dir:           模型路径        例如:'..\modelDir/'
    :param model_name:          模型名称
    :return:
    """
    tf.reset_default_graph()

    # 创建模型
    predictions, loss, train_op, X, y = lstm_model(n_steps=N_STEPS,
                                                   n_inputs=len(feature_cols),
                                                   HIDDEN_SIZE=HIDDEN_SIZE,
                                                   NUM_LAYERS=NUM_LAYERS)

    # 创建保存器用于模型
    saver = tf.train.Saver()

    # 初始化
    sess = tf.Session()
    if os.path.exists(model_dir + model_name + '/' + model_name +
                      '.ckpt.meta'):

        saver = tf.train.import_meta_graph(model_dir + model_name + '/' +
                                           model_name + '.ckpt.meta')

        saver.restore(sess,
                      tf.train.latest_checkpoint(model_dir + model_name + '/'))

        graph = tf.get_default_graph()
    else:
        sess.run(tf.global_variables_initializer())

    loss_list = []
    t_s = time.time()  # 起始时间
    loss_list_accuracy = []

    for i in range(times):

        # 从总样本中随机抽取,batch_size = 7
        list_sample = random.sample(data_train, 7)

        input = [x[0] for x in list_sample]
        output = np.reshape([x[1][-1] for x in list_sample], newshape=[-1, 1])

        _, _, l = sess.run([predictions, train_op, loss],
                           feed_dict={
                               X: input,
                               y: output
                           })

        # 保存最后1000个误差,取均值,作为本次训练的误差
        if i > times - 1000:
            loss_list_accuracy.append(l)

        loss_list.append(l)

        if len(loss_list) > 100:

            print('本次损失为:' + str(np.mean(loss_list)))
            loss_list = []

    print('总耗时:' + '%0.2f' % ((time.time() - t_s) / 60) + '分钟')

    # 保存模型
    saver.save(sess=sess,
               save_path=model_dir + model_name + '/' + model_name + '.ckpt')

    # 返回本次训练的误差
    return np.mean(loss_list_accuracy)
コード例 #3
0
import os
import numpy as np
from pylab import *
import pickle
""" 本脚本根据训练好的lstm模型进行预测 """
""" -------------------- 测试 ---------------------- """
stk_code = 'cyb'

# 准备数据
with open('../DataPrepare/' + stk_code + 'test' + '.pkl', 'rb') as f:
    list = pickle.load(f)
""" -------------------------- 加载lstm模型进行预测 --------------------------- """

# 创建模型
predictions, loss, train_op, X, y = lstm_model(n_steps=N_STEPS,
                                               n_inputs=N_INPUTS,
                                               HIDDEN_SIZE=HIDDEN_SIZE,
                                               NUM_LAYERS=NUM_LAYERS)

# 创建保存器用于模型
saver = tf.train.Saver(tf.global_variables())

# 初始化
sess = tf.Session()
if os.path.exists('../modelDir/300183/300183_high.ckpt.meta'):

    saver = tf.train.import_meta_graph(
        '..\modelDir/300183/300183_high.ckpt.meta')
    saver.restore(sess, tf.train.latest_checkpoint('..\modelDir/300183/'))

    # graph = tf.get_default_graph()
    """ ---------------------- 使用模型进行预测 ------------------------- """
コード例 #4
0
if __name__ == '__main__':

    stk_code = '300508'
    label = 'high'

    # 准备数据
    with open(
            '../DataPrepare/' + stk_code + '/' + stk_code + 'test' + label +
            '.pkl', 'rb') as f:
        data_train = pickle.load(f)

    tf.reset_default_graph()

    # 创建模型
    predictions, loss, train_op, X, y = lstm_model(n_steps=N_STEPS,
                                                   n_inputs=len(feature_cols),
                                                   HIDDEN_SIZE=HIDDEN_SIZE,
                                                   NUM_LAYERS=NUM_LAYERS)

    # 创建保存器用于模型
    saver = tf.train.Saver()

    # 初始化
    sess = tf.Session()
    if os.path.exists('../modelDir/' + stk_code + '_' + label + '/' +
                      stk_code + '_' + label + '.ckpt.meta'):

        saver = tf.train.import_meta_graph('..\modelDir/' + stk_code + '_' +
                                           label + '/' + stk_code + '_' +
                                           label + '.ckpt.meta')

        saver.restore(