Ejemplo n.º 1
0
value_moving_average = 50
split = (0.5, 0.3, 0.2)
plotting = False
saving = False

# load data
oanda_data = np.load('data\\EUR_USD_H1.npy')[-50000:]
output_data_raw = price_to_binary_target(oanda_data, delta=0.0001)
price_data_raw = extract_timeseries_from_oanda_data(oanda_data, ['closeMid'])
input_data_raw, input_data_dummy_raw = get_features(oanda_data)
price_data_raw = np.concatenate([[[0]],
                                 (price_data_raw[1:] - price_data_raw[:-1]) / (price_data_raw[1:] + 1e-10)], axis=0)

# prepare data
input_data, output_data, input_data_dummy, price_data = \
    remove_nan_rows([input_data_raw, output_data_raw,
                     input_data_dummy_raw, price_data_raw])
input_data_scaled_no_dummies = (
    input_data - min_max_scaling[1, :]) / (min_max_scaling[0, :] - min_max_scaling[1, :])
input_data_scaled = np.concatenate(
    [input_data_scaled_no_dummies, input_data_dummy], axis=1)

# split to train, test and cross validation
input_train, input_test, input_cv, output_train, output_test, output_cv, price_train, price_test, price_cv = \
    train_test_validation_split(
        [input_data_scaled, output_data, price_data], split=split)

# get dims
_, input_dim = np.shape(input_train)
_, output_dim = np.shape(output_train)

# forward-propagation
Ejemplo n.º 2
0
def do_stuff_every_period():

    global log
    global start_time
    global last_complete_candle_stamp
    global margin_rate
    current_time = str(datetime.datetime.now(tz))[:-13]

    # estimate position size
    account_balance = np.around(
        float(trading_sess.check_account_summary()['account']['balance']), 0)
    funds_to_commit = account_balance * (1 / margin_rate)

    # download latest data
    # always check if new candle is present, because even after 5 seconds, it might be not formed if market is very calm
    # make sure this loop does not loop endlessly on weekends (this is configured in scheduler)
    while True:
        oanda_data = get_latest_oanda_data(
            'EUR_USD', 'H1',
            300)  # many data-points to increase EMA and such accuracy
        current_complete_candle_stamp = oanda_data[-1]['time']
        if current_complete_candle_stamp != last_complete_candle_stamp:  # if new candle is complete
            break
        time.sleep(5)
    last_complete_candle_stamp = current_complete_candle_stamp

    # get features
    input_data_raw, input_data_dummy = get_features(oanda_data)
    input_data, input_data_dummy = remove_nan_rows(
        [input_data_raw, input_data_dummy])
    input_data_scaled_no_dummy = (input_data - min_max_scaling[1, :]) / (
        min_max_scaling[0, :] - min_max_scaling[1, :])
    input_data_scaled = np.concatenate(
        [input_data_scaled_no_dummy, input_data_dummy], axis=1)

    # estimate signal
    y_pred = sess.run(y_,
                      feed_dict={
                          x: input_data_scaled[-1:, :],
                          drop_out: 1
                      })
    order_signal_id = y_pred.argmax()
    order_signal = [
        1, -1, 0
    ][order_signal_id]  # 0 stands for buy, 1 for sell, 2 for hold

    # manage trading positions
    current_position = trading_sess.order_book['EUR_USD']['order_type']
    if current_position != order_signal:
        if current_position is not None:
            trading_sess.close_order('EUR_USD')
        trading_sess.open_order('EUR_USD', funds_to_commit * order_signal)
    else:
        print('{}: EUR_USD (holding)'.format(['Long', 'Short',
                                              'Nothing'][order_signal_id]))

    # log
    new_log = pd.DataFrame(
        [[current_time, oanda_data[-1]['closeMid'], y_pred]],
        columns=['Datetime', 'Last input Price', 'y_pred'])
    log = log.append(new_log)
    log.to_csv('logs/log {}.csv'.format(start_time))

    print(
        '{} | price: {:.5f} | signal: buy: {:.2f}, sell: {:.2f}, nothing: {:.2f}'
        .format(current_time, oanda_data[-1]['closeMid'], y_pred[0][0],
                y_pred[0][1], y_pred[0][2]))
Ejemplo n.º 3
0
split = (0.5, 0.3, 0.2)
plotting = False
saving = False
time_steps = 4

# load data
oanda_data = np.load('data\\EUR_USD_H1.npy')  # [-50000:]
price_data_raw = extract_timeseries_from_oanda_data(oanda_data, ['closeMid'])
input_data_raw, input_data_dummy = get_features(oanda_data)
price_data_raw = np.concatenate([[[0]],
                                 (price_data_raw[1:] - price_data_raw[:-1]) /
                                 (price_data_raw[1:] + 1e-10)],
                                axis=0)

# prepare data
input_data, price_data, input_data_dummy = remove_nan_rows(
    [input_data_raw, price_data_raw, input_data_dummy])
input_data_scaled_no_dummies = (input_data - min_max_scaling[1, :]) / (
    min_max_scaling[0, :] - min_max_scaling[1, :])
input_data_scaled = np.concatenate(
    [input_data_scaled_no_dummies, input_data_dummy], axis=1)
input_data, _ = get_cnn_input_output(input_data,
                                     np.zeros_like(input_data),
                                     time_steps=time_steps)
price_data = price_data[-len(input_data):]

# split to train,test and cross validation
input_train, input_test, input_cv, price_train, price_test, price_cv = \
    train_test_validation_split([input_data, price_data], split=split)

# get dims
_, input_dim, _, _ = np.shape(input_train)
plotting = False
saving = False
transaction_c = 0.000

# load data
oanda_data = np.load('data\\EUR_USD_H1.npy')[-50000:]
y_data = price_to_binary_target(oanda_data, delta=0.000275)
x_data = get_features_v2(oanda_data,
                         time_periods=[10, 25, 50, 120, 256],
                         return_numpy=False)

# separate, rearrange and remove nans
price = x_data['price'].as_matrix().reshape(-1, 1)
price_change = x_data['price_delta'].as_matrix().reshape(-1, 1)
x_data = x_data.drop(['price', 'price_delta'], axis=1).as_matrix()
price, price_change, x_data, y_data = remove_nan_rows(
    [price, price_change, x_data, y_data])

# split to train, test and cross validation
input_train, input_test, input_cv, output_train, output_test, output_cv, price_train, price_test, price_cv = \
    train_test_validation_split([x_data, y_data, price_change], split=split)

# pre-process data: scale, pca, polynomial
input_train, input_test, input_cv = min_max_scale(input_train,
                                                  input_test,
                                                  input_cv,
                                                  std_dev_threshold=2.5)
# input_train, input_test, input_cv = get_pca(input_train, input_test, input_cv, threshold=0.01)
input_train, input_test, input_cv = get_poloynomials(input_train,
                                                     input_test,
                                                     input_cv,
                                                     degree=2)