Exemplo n.º 1
0
 def __init__(self, base_price, portion, step_percent, cur_position,
              start_time, end_time, init_balance):
     # 基准价X
     self.base_price = base_price
     # 份数N
     self.portion = portion
     # 步长Y
     self.step_percent = step_percent
     # 当前持有份数S
     self.cur_position = cur_position
     # self.init_balance = init_balance
     # 现金余额M
     self.balance = init_balance
     self.start_time = start_time
     self.end_time = end_time
     self.data = ReadData.read_datas_5min_test(start_time, end_time)
     self.idt = self.data['id']
     self.idt = self.idt[self.idt >= start_time]
     self.dict = dict()
     # 每次交易份数
     self.portion_per_time = cur_position // portion
     temp = self.data[self.data['id'] == start_time]
     first_close_t = temp.iat[0, 2]
     temp = self.data[self.data['id'] == end_time]
     end_close_t = temp.iat[0, 2]
     self.total_init = first_close_t * cur_position + init_balance
     self.end_total = end_close_t * cur_position + init_balance
Exemplo n.º 2
0
x = tf.placeholder(tf.float32, shape=[None, WND_HEIGHT, WND_WIDTH])

SeqLens = tf.placeholder(shape=[cfg.BatchSize], dtype=tf.int32)

x_expanded = tf.expand_dims(x, 3)

Inputs = CNN(x_expanded, phase_train, 'CNN_1')

logits = RNN(Inputs, SeqLens, 'RNN_1')

# CTC Beam Search Decoder to decode pred string from the prob map
decoded, log_prob = tf.nn.ctc_beam_search_decoder(logits, SeqLens)

#Reading test data...
InputListTest, SeqLensTest, _ = ReadData(cfg.TEST_LOCATION, cfg.TEST_LIST,
                                         cfg.TEST_NB, WND_HEIGHT, WND_WIDTH,
                                         WND_SHIFT, VEC_PER_WND, '')

print('Initializing...')

session = tf.Session()

session.run(tf.global_variables_initializer())

LoadModel(session, cfg.SaveDir + '/')

try:
    session.run(tf.assign(phase_train, False))

    randIxs = range(0, len(InputListTest))
Exemplo n.º 3
0
                                    dtype=tf.float32)
OverallValidationError = tf.Variable(0,
                                     name='OverallValidationError',
                                     dtype=tf.float32)
OverallTrainingLoss_s = tf.summary.scalar('OverallTrainingLoss',
                                          OverallTrainingLoss)
OverallTrainingError_s = tf.summary.scalar('OverallTrainingError',
                                           OverallTrainingError)
OverallValidationLoss_s = tf.summary.scalar('OverallValidationLoss',
                                            OverallValidationLoss)
OverallValidationError_s = tf.summary.scalar('OverallValidationError',
                                             OverallValidationError)

#Reading training data...
inputList, seqLens, targetList = ReadData(cfg.TRAIN_LOCATION, cfg.TRAIN_LIST,
                                          cfg.TRAIN_NB, WND_HEIGHT, WND_WIDTH,
                                          WND_SHIFT, VEC_PER_WND,
                                          cfg.TRAIN_TRANS)

#Reading validation data...
if (cfg.VAL_NB > 0):
    inputListVal, seqLensVal, targetListVal = ReadData(
        cfg.VAL_LOCATION, cfg.VAL_LIST, cfg.VAL_NB, WND_HEIGHT, WND_WIDTH,
        WND_SHIFT, VEC_PER_WND, cfg.VAL_TRANS)

# Starting everything...
LogFile.write("Initializing...\n\n")
LogFile.flush()

session = tf.Session()

session.run(tf.global_variables_initializer())