rates = pd.DataFrame.from_records(download_rates("EUR_USD", 5000), columns=['time', 'rate']) rates.set_index('time', inplace=True) calendar_samples = 31536000 calendar_update = True if os.path.isfile("calendar_update_" + str(calendar_samples)): prev_time = pickle.load(open("calendar_update_" + str(calendar_samples), 'rb')) if (curr_time - prev_time) < 60 * 60 * 2: calendar_update = False if calendar_update == True: print "Downloading Calendar" calendar = pd.DataFrame.from_records(download_calendar(calendar_samples), columns=['currency', 'impact', 'actual', 'forecast', 'time', 'region']) pickle.dump(calendar, open("calendar_" + str(calendar_samples), 'wb')) pickle.dump(int(time.time()), open("calendar_update_" + str(calendar_samples), 'wb')) else: calendar = pickle.load(open("calendar_" + str(calendar_samples), 'rb')) calendar.set_index('time', inplace=True) training_set = create_training_set(rates.index, calendar) print len(training_set), "training_set_size" for pair in currency_pairs: currency1 = pair[0:3] currency2 = pair[4:7]
def process_pending_trades(): base_calendar = pd.DataFrame.from_records(download_calendar.download_calendar(31536000), columns=['currency', 'impact', 'actual', 'forecast', 'time', 'region']) trade_logger.info(str(base_calendar.tail(10)))
def process_pending_trades(account_numbers, avg_spreads): total_balance = 0 total_float_profit = 0 total_margin_available = 0 total_margin_used = 0 for account_number in account_numbers: response_value, _ = sendCurlRequest( "https://api-fxpractice.oanda.com/v3/accounts/" + account_number + "/summary", "GET") j = json.loads(response_value) account_profit = float(j['account'][u'unrealizedPL']) account_balance = float(j['account'][u'balance']) margin_available = float(j['account']['marginAvailable']) margin_used = float(j['account']['marginUsed']) total_balance += account_balance total_float_profit += account_profit total_margin_available += margin_available total_margin_used += margin_used trade_logger.info('Equity: ' + str(total_balance + total_float_profit)) if os.path.isfile(root_dir + "group_metadata_news"): group_metadata = pickle.load( open(root_dir + "group_metadata_news", 'rb')) else: group_metadata = {} if os.path.isfile(root_dir + "order_metadata_news"): order_metadata = pickle.load( open(root_dir + "order_metadata_news", 'rb')) else: order_metadata = {} if os.path.isfile(root_dir + "group_base_wt_news"): prev_model_wt = pickle.load(open(root_dir + "group_base_wt_news", 'rb')) else: prev_model_wt = {} if "max_equity" not in group_metadata: group_metadata["max_equity"] = total_balance group_metadata["max_equity"] = max(group_metadata["max_equity"], total_balance) orders = [] total_margin = 0 for account_number in account_numbers: orders1, total_margin = get_open_trades(account_number, order_metadata, total_margin) orders += orders1 orders_by_prediction = {} orders_by_model = {} time_diff_hours = {} existing_order_amount = set() for order in orders: if order.prediction_key not in orders_by_prediction: orders_by_prediction[order.prediction_key] = [] if order.model_key not in orders_by_model: time_diff_hours[order.model_key] = 9999999999 orders_by_model[order.model_key] = [] if order.pair + "_" + order.account_number + "_" + str( order.amount) in existing_order_amount: trade_logger.info("Duplicate FIFO trade") existing_order_amount.add(order.pair + "_" + order.account_number + "_" + str(order.amount)) orders_by_prediction[order.prediction_key].append(order) orders_by_model[order.model_key].append(order) time_diff_hours[order.model_key] = min( time_diff_hours[order.model_key], order.time_diff_hours) total_orders = len(orders) avg_prices = {} pair_bid_ask_map = {} for pair in currency_pairs: response, _ = sendCurlRequest( "https://api-fxpractice.oanda.com/v1/prices?instruments=" + pair, "GET") response = json.loads(response)['prices'] pip_size = 0.0001 if pair[4:] == "JPY": pip_size = 0.01 bid = None ask = None for spread_count in range(1): curr_spread = 0 for price in response: if price['instrument'] == pair: curr_price = (price['bid'] + price['ask']) / 2 curr_spread = abs(price['bid'] - price['ask']) / pip_size bid = price['bid'] ask = price['ask'] break if curr_price == 0: print "price not found" continue pair_bid_ask_map[pair] = {} pair_bid_ask_map[pair]['bid'] = bid pair_bid_ask_map[pair]['ask'] = ask first_currency = pair[0:3] second_currency = pair[4:7] avg_prices[first_currency + "_" + second_currency] = curr_price for pair in currency_pairs: first_currency = pair[0:3] second_currency = pair[4:7] avg_prices[second_currency + "_" + first_currency] = 1.0 / avg_prices[pair] base_calendar = pd.DataFrame.from_records( download_calendar.download_calendar(604800), columns=['currency', 'impact', 'actual', 'forecast', 'time', 'region']) print base_calendar sys.exit(0) for pair in currency_pairs: model_key = pair ideal_spread = avg_spreads[pair] pip_size = 0.0001 if pair[4:] == "JPY": pip_size = 0.01 actual_spread = abs(pair_bid_ask_map[pair]['bid'] - pair_bid_ask_map[pair]['ask']) / pip_size actual_spread /= ideal_spread curr_price = abs(pair_bid_ask_map[pair]['bid'] + pair_bid_ask_map[pair]['ask']) / pip_size curr_spread = actual_spread last_processed_key = model_key + "_last_processed" last_price_key = model_key + "_last_price" if last_price_key not in group_metadata: group_metadata[last_price_key] = curr_price price_diff = abs(curr_price - group_metadata[last_price_key]) / 0.0001 price_diff /= 10 curr_spread /= max(1, price_diff) print model_key, "Curr Spread", curr_spread if curr_spread < 1.2: group_metadata[last_processed_key] = time.time() group_metadata[last_price_key] = curr_price group_metadata[model_key + "_curr_spread"] = curr_spread if curr_spread > 1.2: continue if pair + "_last_calendar_update" not in group_metadata: group_metadata[pair + "_last_calendar_update"] = time.time() if (time.time() - group_metadata[pair + "_last_calendar_update"] ) < 60 * 60 and False: continue group_metadata[pair + "_last_calendar_update"] = time.time() for currency in [pair[0:3], pair[4:7]]: model_key = pair + "_" + currency calendar = base_calendar[base_calendar["currency"] == currency] last_time = calendar["time"].values.tolist()[-1] if len(calendar[calendar["time"] >= time.time() - (36 * 60 * 60)]) == 0: close_group_trades(orders_by_model, model_key, None, \ order_metadata, group_metadata, total_margin_available, total_margin_used, \ existing_order_amount, curr_price, avg_prices, avg_spreads, \ len(orders_by_model)) if (time.time() - last_time ) < 12 * 60 * 60 and model_key not in orders_by_model: continue if len(calendar[calendar["time"] >= time.time() - (24 * 60 * 60)]) == 0: continue prices, times = get_time_series(pair, 12) deltas = [] for price in prices: deltas.append(price - prices[0]) z_delta = (deltas[-1] - np.mean(deltas)) / np.std(deltas) curr_model_wt = float(total_balance) * 0.05 base_model_key = enter_group_trades(model_key, pair, z_delta, order_metadata, \ group_metadata, orders_by_model, curr_model_wt, prev_model_wt, \ total_margin_available, total_margin_used, curr_price, \ account_numbers, existing_order_amount, curr_spread, avg_prices) close_group_trades(orders_by_model, model_key, z_delta < 0, \ order_metadata, group_metadata, total_margin_available, total_margin_used, \ existing_order_amount, curr_price, avg_prices, avg_spreads, \ len(orders_by_model)) pickle.dump(prev_model_wt, open(root_dir + "group_base_wt_news", 'wb')) pickle.dump(group_metadata, open(root_dir + "group_metadata_news", 'wb')) pickle.dump(order_metadata, open(root_dir + "order_metadata_news", 'wb')) trade_logger.info('Total Orders: ' + str(total_orders))
prices.append(item['closeMid']) index += 1 return prices, times currency_pairs = [ "AUD_CAD", "CHF_JPY", "EUR_NZD", "GBP_JPY", "AUD_CHF", "EUR_AUD", "GBP_NZD", "USD_CAD", "AUD_JPY", "EUR_CAD", "GBP_USD", "USD_CHF", "AUD_NZD", "EUR_CHF", "EUR_USD", "NZD_CAD", "AUD_USD", "EUR_GBP", "GBP_AUD", "NZD_CHF", "CAD_CHF", "EUR_JPY", "GBP_CAD", "NZD_JPY", "CAD_JPY", "GBP_CHF", "NZD_USD", "USD_JPY" ] base_calendar = pd.DataFrame.from_records( download_calendar.download_calendar(31536000), columns=['currency', 'impact', 'actual', 'forecast', 'time', 'region']) def back_test_currency(currency, price_df, pair): test_calendar = base_calendar[base_calendar['currency'] == currency] orders = [] min_profit = 99999999 equity = 0 mag_factor = 1.0 equity_curve = [] for index, row in test_calendar.iterrows():