def check_deal_placements(): if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("check_deal_placements may issue a real trade!") create_time = get_now_seconds_utc() fake_order_book_time1 = -10 fake_order_book_time2 = -20 deal_volume = 5 pair_id = CURRENCY_PAIR.BTC_TO_ARDR sell_exchange_id = EXCHANGE.POLONIEX buy_exchange_id = EXCHANGE.BITTREX difference = "difference is HUGE" file_name = "test.log" msg_queue = get_message_queue() processor = ConnectionPool(pool_size=2) trade_at_first_exchange = Trade(DEAL_TYPE.SELL, sell_exchange_id, pair_id, 0.00000001, deal_volume, fake_order_book_time1, create_time) trade_at_second_exchange = Trade(DEAL_TYPE.BUY, buy_exchange_id, pair_id, 0.00004, deal_volume, fake_order_book_time2, create_time) trade_pairs = TradePair(trade_at_first_exchange, trade_at_second_exchange, fake_order_book_time1, fake_order_book_time2, DEAL_TYPE.DEBUG) init_deals_with_logging_speedy(trade_pairs, difference, file_name, processor, msg_queue)
def test_expired_deal_placement(): load_keys(API_KEY_PATH) priority_queue = get_priority_queue() ts = get_now_seconds_utc() order = Trade(DEAL_TYPE.SELL, EXCHANGE.BINANCE, CURRENCY_PAIR.BTC_TO_STRAT, price=0.001, volume=5.0, order_book_time=ts, create_time=ts, execute_time=ts, order_id='whatever') msg = "Replace existing order with new one - {tt}".format(tt=order) err_code, json_document = init_deal(order, msg) print json_document order.order_id = parse_order_id(order.exchange_id, json_document) priority_queue.add_order_to_watch_queue(ORDERS_EXPIRE_MSG, order)
def is_order_present_in_order_history(pg_conn, trade, table_name="arbitrage_orders"): """ We can execute history retrieval several times. Some exchanges do not have precise mechanism to exclude particular time range. It is possible to have multiple trades per order = order_id. As this is arbitrage it mean that all other fields may be the same. exchange_id | trade_type | pair_id | price | volume | order_id | timest executed_volume :param pg_conn: :param trade: :param table_name: :return: """ select_query = """select arbitrage_id, exchange_id, trade_type, pair_id, price, volume, executed_volume, order_id, trade_id, order_book_time, create_time, execute_time from {table_name} where order_id = '{order_id}'""".format( table_name=table_name, order_id=trade.order_id) cursor = pg_conn.cursor cursor.execute(select_query) for row in cursor: cur_trade = Trade.from_row(row) if abs(cur_trade.executed_volume - trade.executed_volume) < 0.0000001 and \ cur_trade.create_time == trade.create_time: return True return False
def load_trades_from_csv_to_db(): file_name = "all_orders.csv" start_time = -1 end_time = -2 pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT, _db_name=DB_NAME) bittrex_order_by_pair = defaultdict(list) with open(file_name, 'r') as f: reader = csv.reader(f) for row in reader: new_trade = Trade.from_bittrex_scv(row) if start_time <= new_trade.create_time <= end_time: bittrex_order_by_pair[new_trade.pair_id].append(new_trade) unique_only = True for pair_id in bittrex_order_by_pair: headline = "Loading bittrex trades - {p}".format( p=get_currency_pair_to_bittrex(pair_id)) wrap_with_progress_bar(headline, bittrex_order_by_pair[pair_id], save_to_pg_adapter, pg_conn, unique_only, is_trade_present_in_trade_history, init_arbitrage_id=-20, table_name="arbitrage_trades")
def parse_orders_currency(json_document, pair_name): orders = [] for entry in json_document: trade = Trade.from_poloniex_history(entry, pair_name) if trade is not None: orders.append(trade) return orders
def test_insert_order(): from enums.exchange import EXCHANGE from enums.deal_type import DEAL_TYPE from enums.currency_pair import CURRENCY_PAIR wtf = Trade(DEAL_TYPE.SELL, EXCHANGE.POLONIEX, CURRENCY_PAIR.BTC_TO_ARDR, 0.00001, 10.4, 1516039961, 1516039961) pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT, _db_name=DB_NAME) save_order_into_pg(wtf, pg_conn)
def test_failed_order_placement_bittrex(): load_keys(API_KEY_PATH) ts = get_now_seconds_utc() order = Trade(DEAL_TYPE.SELL, EXCHANGE.BITTREX, CURRENCY_PAIR.BTC_TO_ETH, price=0.075, volume=0.1, order_book_time=ts, create_time=ts) msg = "Testing huobi - {tt}".format(tt=order) err_code, json_document = init_deal(order, msg) print json_document msg_queue = get_message_queue() msg_queue.add_order(FAILED_ORDERS_MSG, order)
def get_last_binance_trade(pg_conn, start_date, end_time, pair_id, table_name="arbitrage_trades"): select_query = """select arbitrage_id, exchange_id, trade_type, pair_id, price, volume, executed_volume, order_id, trade_id, order_book_time, create_time, execute_time from {table_name} where exchange_id = {exchange_id} and pair_id = {pair_id} and create_time >= {start_time} and create_time <= {end_time} ORDER BY create_time DESC limit 1""".format( table_name=table_name, exchange_id=EXCHANGE.BINANCE, pair_id=pair_id, start_time=start_date, end_time=end_time) cursor = pg_conn.cursor cursor.execute(select_query) for row in cursor: return Trade.from_row(row) return None
def receive_binance_trade_batch(key, pair_name, limit, last_order_id): trades_by_pair = [] error_code, json_document = get_trades_history_binance( key, pair_name, limit, last_order_id) while error_code == STATUS.FAILURE: print "receive_trade_batch: got error responce - Reprocessing" sleep_for(2) error_code, json_document = get_trades_history_binance( key, pair_name, limit, last_order_id) for entry in json_document: trades_by_pair.append(Trade.from_binance_history(entry, pair_name)) return trades_by_pair
def get_open_orders_poloniex_result_processor(json_document, pair_name): orders = [] if is_error(json_document): msg = "get_open_orders_poloniex_result_processor - error response - {er}".format( er=json_document) log_to_file(msg, ERROR_LOG_FILE_NAME) return STATUS.FAILURE, orders for entry in json_document: order = Trade.from_poloniex(entry, pair_name) if order is not None: orders.append(order) return STATUS.SUCCESS, orders
def get_orders_binance_result_processor(msg, json_document, pair_name): """ json_document - response from exchange api as json string pair_name - for backwards compatibilities """ orders = [] if is_error(json_document): log_to_file(msg, ERROR_LOG_FILE_NAME) return STATUS.FAILURE, orders for entry in json_document: order = Trade.from_binance(entry) if order is not None: orders.append(order) return STATUS.SUCCESS, orders
def get_open_orders_kraken_result_processor(json_document, pair_name): open_orders = EMPTY_LIST if is_error(open_orders) or "open" not in json_document["result"]: msg = "get_open_orders_kraken_result_processor - error response - {er}".format( er=json_document) log_to_file(msg, ERROR_LOG_FILE_NAME) return open_orders for order_id in json_document["result"]["open"]: new_order = Trade.from_kraken( order_id, json_document["result"]["open"][order_id]) if new_order is not None: open_orders.append(new_order) return open_orders
def test_trade_present(): pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT) # 6479142 ts = 1516142509 trade = Trade(DEAL_TYPE.BUY, EXCHANGE.BINANCE, CURRENCY_PAIR.BTC_TO_STRAT, price=0.001184, volume=2.08, order_book_time=ts, create_time=ts, execute_time=ts, order_id='whatever') res = is_trade_present_in_trade_history(pg_conn, trade, table_name="tmp_history_trades") print res
def test_order_presence(): pg_conn = init_pg_connection(_db_host=DB_HOST, _db_port=DB_PORT) # 6479142 ts = get_now_seconds_utc() some_trade = Trade(DEAL_TYPE.BUY, EXCHANGE.BINANCE, CURRENCY_PAIR.BTC_TO_STRAT, price=0.001184, volume=2.08, order_book_time=ts, create_time=ts, execute_time=ts, order_id='whatever') res = is_order_present_in_order_history(pg_conn, some_trade, table_name="tmp_binance_orders") print res
def get_all_orders(pg_conn, table_name="arbitrage_orders", time_start=START_OF_TIME, time_end=START_OF_TIME): orders = [] if time_start == START_OF_TIME and time_end == START_OF_TIME: select_query = """select arbitrage_id, exchange_id, trade_type, pair_id, price, volume, executed_volume, order_id, trade_id, order_book_time, create_time, execute_time from {table_name}""".format(table_name=table_name) else: select_query = """select arbitrage_id, exchange_id, trade_type, pair_id, price, volume, executed_volume, order_id, trade_id, order_book_time, create_time, execute_time from {table_name} where create_time >= {start_time} and create_time <= {end_time} """.format(table_name=table_name, start_time=time_start, end_time=time_end) cursor = pg_conn.cursor cursor.execute(select_query) for row in cursor: orders.append(Trade.from_row(row)) return orders
def get_order_history_bittrex_result_processor(json_document, pair_name): """ json_document - response from exchange api as json string pair_name - for backwords compabilities """ orders = [] if is_error(json_document) or json_document["result"] is None: msg = "get_order_history_bittrex_result_processor - error response - {er}".format( er=json_document) log_to_file(msg, ERROR_LOG_FILE_NAME) return STATUS.FAILURE, orders for entry in json_document["result"]: order = Trade.from_bittrex_history(entry) if order is not None: orders.append(order) return STATUS.SUCCESS, orders
def get_order_history_kraken_result_processor(json_document, pair_name): """ json_document - response from exchange api as json string pair_name - for backwords compabilities """ orders = EMPTY_LIST if is_error(json_document) or "closed" not in json_document["result"]: msg = "get_order_history_kraken_result_processor - error response - {er}".format( er=json_document) log_to_file(msg, ERROR_LOG_FILE_NAME) return orders for order_id in json_document["result"]["closed"]: new_order = Trade.from_kraken( order_id, json_document["result"]["closed"][order_id]) if new_order is not None: orders.append(new_order) return orders
def test_failed_deal_placement(): load_keys(API_KEY_PATH) msg_queue = get_message_queue() ts = 1517938516 order = Trade(DEAL_TYPE.SELL, EXCHANGE.BITTREX, CURRENCY_PAIR.BTC_TO_STRAT, price=0.000844, volume=5.0, order_book_time=ts, create_time=ts, execute_time=ts, order_id=None) # from dao.order_utils import get_open_orders_by_exchange # r = get_open_orders_by_exchange(EXCHANGE.BITTREX, CURRENCY_PAIR.BTC_TO_STRAT) # for rr in r: # print r # raise # # msg = "Replace existing order with new one - {tt}".format(tt=order) # err_code, json_document = init_deal(order, msg) # print json_document # order.deal_id = parse_deal_id(order.exchange_id, json_document) # msg_queue.add_order(ORDERS_MSG, order) sleep_for(3) msg_queue.add_order(FAILED_ORDERS_MSG, order) print order
def search_for_arbitrage(sell_order_book, buy_order_book, threshold, balance_threshold, action_to_perform, balance_state, deal_cap, type_of_deal, worker_pool, msg_queue): """ :param sell_order_book: order_book from exchange where we are going to SELL :param buy_order_book: order_book from exchange where we are going to BUY :param threshold: difference in price in percent that MAY trigger MUTUAL deal placement :param balance_threshold: for interface compatibility with balance_adjustment method :param action_to_perform: method that will be called in case threshold condition are met :param balance_state: balance across all active exchange for all supported currencies :param deal_cap: dynamically updated minimum volume per currency :param type_of_deal: ARBITRAGE or REVERSE. EXPIRED or FAILED will not be processed here :param worker_pool: gevent based connection pool for speedy deal placement :param msg_queue: redis backed msq queue with notification for Telegram :return: """ deal_status = STATUS.FAILURE, None if not sell_order_book.bid or not buy_order_book.ask: return deal_status difference = get_change(sell_order_book.bid[FIRST].price, buy_order_book.ask[LAST].price, provide_abs=False) if should_print_debug(): log_arbitrage_heart_beat(sell_order_book, buy_order_book, difference) if difference >= threshold: min_volume = determine_minimum_volume(sell_order_book, buy_order_book, balance_state) min_volume = adjust_minimum_volume_by_trading_cap(deal_cap, min_volume) min_volume = adjust_maximum_volume_by_trading_cap(deal_cap, min_volume) min_volume = round_volume_by_exchange_rules( sell_order_book.exchange_id, buy_order_book.exchange_id, min_volume, sell_order_book.pair_id) if min_volume <= 0: log_arbitrage_determined_volume_not_enough(sell_order_book, buy_order_book, msg_queue) return deal_status sell_price = adjust_price_by_order_book(sell_order_book.bid, min_volume) arbitrage_id = get_next_arbitrage_id() create_time = get_now_seconds_utc() trade_at_first_exchange = Trade(DEAL_TYPE.SELL, sell_order_book.exchange_id, sell_order_book.pair_id, sell_price, min_volume, sell_order_book.timest, create_time, arbitrage_id=arbitrage_id) buy_price = adjust_price_by_order_book(buy_order_book.ask, min_volume) trade_at_second_exchange = Trade(DEAL_TYPE.BUY, buy_order_book.exchange_id, buy_order_book.pair_id, buy_price, min_volume, buy_order_book.timest, create_time, arbitrage_id=arbitrage_id) final_difference = get_change(sell_price, buy_price, provide_abs=False) if final_difference <= 0.2: log_arbitrage_determined_price_not_enough( sell_price, sell_order_book.bid[FIRST].price, buy_price, buy_order_book.ask[LAST].price, difference, final_difference, sell_order_book.pair_id, msg_queue) return deal_status trade_pair = TradePair(trade_at_first_exchange, trade_at_second_exchange, sell_order_book.timest, buy_order_book.timest, type_of_deal) placement_status = action_to_perform(trade_pair, final_difference, "history_trades.log", worker_pool, msg_queue) # NOTE: if we can't update balance for more than TIMEOUT seconds arbitrage process will exit # for exchange_id in [trade_pair.deal_1.exchange_id, trade_pair.deal_2.exchange_id]: # update_balance_by_exchange(exchange_id) # deal_status = placement_status, trade_pair return deal_status