def get_ticker_speedup(timest, processor): ohlc_async_requests = [] for exchange_id in [EXCHANGE.BITTREX, EXCHANGE.KRAKEN, EXCHANGE.HUOBI]: for pair_id in CURRENCY_PAIR.values(): pair_name = get_currency_pair_name_by_exchange_id( pair_id, exchange_id) if pair_name is None: continue method_for_url = get_ticker_url_by_exchange_id(exchange_id) request_url = method_for_url(pair_name) constructor = get_ticker_constructor_by_exchange_id(exchange_id) ohlc_async_requests.append( WorkUnit(request_url, constructor, pair_name, timest)) async_results = processor.process_async_get(ohlc_async_requests, timeout=HTTP_TIMEOUT_SECONDS) async_results += get_tickers_poloniex(POLONIEX_CURRENCY_PAIRS, timest) async_results += get_tickers_binance(BINANCE_CURRENCY_PAIRS, timest) return async_results
def log_dublicative_order_book(log_file_name, msg_queue, order_book, prev_order_book): msg = """ <b> !!! WARNING !!! </b> Number of similar asks OR bids are the same for the most recent and cached version of order book for exchange_name {exch} pair_name {pn} cached timest: {ts1} {dt1} recent timest: {ts2} {dt2} Verbose information can be found in logs error & """.format(exch=get_exchange_name_by_id(order_book.exchange_id), pn=get_currency_pair_name_by_exchange_id( order_book.pair_id, order_book.exchange_id), ts1=prev_order_book.timest, dt1=ts_to_string_utc(prev_order_book.timest), ts2=order_book.timest, dt2=ts_to_string_utc(order_book.timest)) msg_queue.add_message(DEAL_INFO_MSG, msg) print_to_console(msg, LOG_ALL_ERRORS) log_to_file(msg, log_file_name) msg = """Cached version of order book: {o} Recent version of order book: {oo} """.format(o=str(prev_order_book), oo=str(order_book)) log_to_file(msg, log_file_name)
def get_order_book(exchange_id, pair_id): timest = get_now_seconds_utc() pair_name = get_currency_pair_name_by_exchange_id(pair_id, exchange_id) if pair_name is None: print "UNSUPPORTED COMBINATION OF PAIR ID AND EXCHANGE", pair_id, exchange_id assert pair_name is None method = get_order_book_method_by_exchange_id(exchange_id) return method(pair_name, timest)
def get_ticker(exchange_id, pair_id): method = get_ticker_method_by_exchange_id(exchange_id) pair_name = get_currency_pair_name_by_exchange_id(pair_id, exchange_id) if pair_name is None: msg = "get_ticker for arbitrage - wrong pair_id - {pair_id} for exchange_id = {idd}!".format( pair_id=pair_id, idd=exchange_id) print_to_console(msg, LOG_ALL_ERRORS) assert pair_name is None return method(pair_name, get_now_seconds_utc())
def test_poloniex_doge(): if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("test_poloniex_doge may issue a real trade!") load_keys(API_KEY_PATH) key = get_key_by_exchange(EXCHANGE.POLONIEX) pair_id = CURRENCY_PAIR.BTC_TO_DGB pair_name = get_currency_pair_name_by_exchange_id(pair_id, EXCHANGE.POLONIEX) err, json_repr = add_buy_order_poloniex(key, pair_name, price=0.00000300, amount=100) print json_repr
def test_binance_xlm(): if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("test_binance_xlm may issue a real trade!") load_keys(API_KEY_PATH) key = get_key_by_exchange(EXCHANGE.BINANCE) pair_id = CURRENCY_PAIR.BTC_TO_XLM pair_name = get_currency_pair_name_by_exchange_id(pair_id, EXCHANGE.BINANCE) err, json_repr = add_buy_order_binance(key, pair_name, price=0.00003000, amount=100) print json_repr
def test_bittrex_strat(): if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("test_bittrex_strat may issue a real trade!") key = get_key_by_exchange(EXCHANGE.BITTREX) pair_id = CURRENCY_PAIR.BTC_TO_STRAT pair_name = get_currency_pair_name_by_exchange_id(pair_id, EXCHANGE.BITTREX) err, json_repr = add_buy_order_bittrex(key, pair_name, price=0.0007, amount=10) print json_repr err, json_repr = add_sell_order_bittrex(key, pair_name, price=0.0015, amount=10) print json_repr
def get_history_speedup(date_start, date_end, processor): history_async_requests = [] for exchange_id in EXCHANGE.values(): if exchange_id == EXCHANGE.KRAKEN: continue for pair_id in CURRENCY_PAIR.values(): pair_name = get_currency_pair_name_by_exchange_id(pair_id, exchange_id) if pair_name: method_for_url = get_history_url_by_exchange_id(exchange_id) request_url = method_for_url(pair_name, date_start, date_end) constructor = get_history_constructor_by_exchange_id(exchange_id) history_async_requests.append(WorkUnit(request_url, constructor, pair_name, date_end)) return processor.process_async_get(history_async_requests, HTTP_TIMEOUT_SECONDS)
def get_order_book_speedup(date_end, processor): order_book_async_requests = [] for exchange_id in EXCHANGE.values(): for pair_id in CURRENCY_PAIR.values(): pair_name = get_currency_pair_name_by_exchange_id( pair_id, exchange_id) if pair_name: method_for_url = get_order_book_url_by_exchange_id(exchange_id) request_url = method_for_url(pair_name) constructor = get_order_book_constructor_by_exchange_id( exchange_id) order_book_async_requests.append( WorkUnit(request_url, constructor, pair_name, date_end)) return processor.process_async_get(order_book_async_requests, HTTP_TIMEOUT_SECONDS)
def get_order_books_for_arbitrage_pair(cfg, date_end, processor): order_book_async_requests = [] for exchange_id in [cfg.sell_exchange_id, cfg.buy_exchange_id]: pair_name = get_currency_pair_name_by_exchange_id( cfg.pair_id, exchange_id) if pair_name is None: die_hard( "UNSUPPORTED COMBINATION OF PAIR ID - {} AND EXCHANGE - {}". format(cfg.pair_id, exchange_id)) method_for_url = get_order_book_url_by_exchange_id(exchange_id) request_url = method_for_url(pair_name) constructor = get_order_book_constructor_by_exchange_id(exchange_id) order_book_async_requests.append( WorkUnit(request_url, constructor, pair_name, date_end)) return processor.process_async_get( order_book_async_requests, timeout=HTTP_TIMEOUT_ORDER_BOOK_ARBITRAGE)
def get_open_orders_for_arbitrage_pair(cfg, processor): open_orders = [] for exchange_id in [cfg.sell_exchange_id, cfg.buy_exchange_id]: key = get_key_by_exchange(exchange_id) pair_name = get_currency_pair_name_by_exchange_id( cfg.pair_id, exchange_id) method_for_url = get_open_orders_post_details_generator(exchange_id) post_details = method_for_url(key, pair_name) constructor = get_open_orders_constructor_by_exchange_id(exchange_id) http_method = get_http_method_open_order_by_exchange_id(exchange_id) wu = WorkUnit(post_details.final_url, constructor, pair_name) wu.add_post_details(post_details) wu.add_http_method(http_method) open_orders.append(wu) return processor.process_async_custom(open_orders, HTTP_TIMEOUT_SECONDS)
def get_ticker_for_arbitrage(pair_id, timest, exchange_list, processor): async_requests = [] for exchange_id in exchange_list: pair_name = get_currency_pair_name_by_exchange_id(pair_id, exchange_id) if pair_name is None: msg = "get_ticker for arbitrage - wrong pair_id - {pair_id} for exchange_id = {idd}!".format( pair_id=pair_id, idd=exchange_id) print_to_console(msg, LOG_ALL_ERRORS) assert pair_name is None method_for_url = get_ticker_url_by_exchange_id(exchange_id) request_url = method_for_url(pair_name) constructor = get_ticker_constructor_by_exchange_id(exchange_id) async_requests.append( WorkUnit(request_url, constructor, pair_name, timest)) res = processor.process_async_get(async_requests, timeout=5) return res
def get_order_history_by_exchange(exchange_id, pair_id): res = STATUS.FAILURE, None key = get_key_by_exchange(exchange_id) pair_name = get_currency_pair_name_by_exchange_id(pair_id, exchange_id) method_by_exchange = { EXCHANGE.BITTREX: get_order_history_bittrex, EXCHANGE.KRAKEN: get_order_history_kraken, EXCHANGE.POLONIEX: get_order_history_poloniex, EXCHANGE.BINANCE: get_order_history_binance, EXCHANGE.HUOBI: get_order_history_huobi } if exchange_id in method_by_exchange: get_order_history = method_by_exchange[exchange_id] res = get_order_history(key, pair_name) else: msg = "get_open_orders_by_exchange - Unknown exchange! {idx}".format( idx=exchange_id) print_to_console(msg, LOG_ALL_ERRORS) return res
parser = argparse.ArgumentParser(description="Constantly poll two exchange for order book for particular pair " "and initiate sell\\buy deals for arbitrage opportunities") parser.add_argument('--threshold', action="store", type=float, required=True) parser.add_argument('--balance_threshold', action="store", type=float, required=True) parser.add_argument('--reverse_threshold', action="store", type=float, required=True) parser.add_argument('--sell_exchange_id', action="store", type=int, required=True) parser.add_argument('--buy_exchange_id', action="store", type=int, required=True) parser.add_argument('--pair_id', action="store", type=int, required=True) parser.add_argument('--deal_expire_timeout', action="store", type=int, required=True) parser.add_argument('--cfg', action="store", required=True) arguments = parser.parse_args() cfg = ArbitrageConfig.from_args(arguments) app_settings = CommonSettings.from_cfg(cfg) set_logging_level(app_settings.logging_level_id) set_log_folder(app_settings.log_folder) load_keys(app_settings.key_path) # to avoid time-consuming check in future - validate arguments here for exchange_id in [cfg.sell_exchange_id, cfg.buy_exchange_id]: pair_name = get_currency_pair_name_by_exchange_id(cfg.pair_id, exchange_id) if pair_name is None: log_dont_supported_currency(cfg, exchange_id, cfg.pair_id) exit() ArbitrageListener(cfg, app_settings).start()
def init_deals_with_logging_speedy(trade_pairs, difference, file_name, processor, msg_queue): # FIXME move after deal placement ? global overall_profit_so_far overall_profit_so_far += trade_pairs.current_profit base_currency_id, dst_currency_id = split_currency_pairs( trade_pairs.deal_1.pair_id) msg = """We try to send following deals to exchange. <b>Expected profit in {base_coin}:</b> <i>{cur}</i>. <b>Overall:</b> <i>{tot}</i> <b>Difference in percents:</b> <i>{diff}</i> Deal details: {deal} """.format(base_coin=get_currency_name_by_id(base_currency_id), cur=float_to_str(trade_pairs.current_profit), tot=float_to_str(overall_profit_so_far), diff=difference, deal=str(trade_pairs)) msg_queue.add_message(DEAL_INFO_MSG, msg) log_to_file(msg, file_name) if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("init_deals_with_logging_speedy called for {f}".format( f=trade_pairs)) parallel_deals = [] for order in [trade_pairs.deal_1, trade_pairs.deal_2]: method_for_url = dao.get_method_for_create_url_trade_by_exchange_id( order) # key, pair_name, price, amount key = get_key_by_exchange(order.exchange_id) pair_name = get_currency_pair_name_by_exchange_id( order.pair_id, order.exchange_id) post_details = method_for_url(key, pair_name, order.price, order.volume) constructor = return_with_no_change wu = WorkUnit(post_details.final_url, constructor, order) wu.add_post_details(post_details) parallel_deals.append(wu) res = processor.process_async_post(parallel_deals, DEAL_MAX_TIMEOUT) if res is None: log_to_file( "For TradePair - {tp} result is {res}".format(tp=trade_pairs, res=res), file_name) log_to_file( "For TradePair - {tp} result is {res}".format(tp=trade_pairs, res=res), ERROR_LOG_FILE_NAME) return # check for errors only for entry in res: json_response, order = entry if "ERROR" in json_response: msg = """ <b>ERROR: </b>NONE During deal placement: {u1} Details: {err_msg} """.format(u1=order, err_msg=json_response) msg_queue.add_order(FAILED_ORDERS_MSG, order) else: msg = """ For trade {trade} Response is {resp} """.format(trade=order, resp=json_response) print_to_console(msg, LOG_ALL_ERRORS) msg_queue.add_message(DEBUG_INFO_MSG, msg) log_to_file(msg, file_name) for order in [trade_pairs.deal_1, trade_pairs.deal_2]: msg_queue.add_order(ORDERS_MSG, order)
def arbitrage_between_pair(args): cfg = ArbitrageConfig.from_args(args) app_settings = CommonSettings.from_cfg(args.cfg) set_logging_level(app_settings.logging_level_id) set_log_folder(app_settings.log_folder) load_keys(app_settings.key_path) priority_queue, msg_queue, local_cache = init_queues(app_settings) processor = ConnectionPool(pool_size=2) # to avoid time-consuming check in future - validate arguments here for exchange_id in [args.sell_exchange_id, args.buy_exchange_id]: pair_name = get_currency_pair_name_by_exchange_id( cfg.pair_id, exchange_id) if pair_name is None: log_dont_supported_currency(cfg, exchange_id, cfg.pair_id) exit() deal_cap = MarketCap(cfg.pair_id, get_now_seconds_utc()) deal_cap.update_max_volume_cap(NO_MAX_CAP_LIMIT) update_min_cap(cfg, deal_cap, processor) balance_state = dummy_balance_init(timest=0, default_volume=Decimal("0"), default_available_volume=Decimal("0")) if not YES_I_KNOW_WHAT_AM_I_DOING: die_hard("LIVE TRADING!") while True: if get_now_seconds_utc( ) - deal_cap.last_updated > MIN_CAP_UPDATE_TIMEOUT: update_min_cap(cfg, deal_cap, processor) for mode_id in [DEAL_TYPE.ARBITRAGE, DEAL_TYPE.REVERSE]: cur_timest_sec = get_now_seconds_utc() method = search_for_arbitrage if mode_id == DEAL_TYPE.ARBITRAGE else adjust_currency_balance active_threshold = cfg.threshold if mode_id == DEAL_TYPE.ARBITRAGE else cfg.reverse_threshold balance_state = get_updated_balance_arbitrage( cfg, balance_state, local_cache) if balance_state.expired(cur_timest_sec, cfg.buy_exchange_id, cfg.sell_exchange_id, BALANCE_EXPIRED_THRESHOLD): log_balance_expired_errors(cfg, msg_queue, balance_state) die_hard("Balance expired") order_book_src, order_book_dst = get_order_books_for_arbitrage_pair( cfg, cur_timest_sec, processor) if order_book_dst is None or order_book_src is None: log_failed_to_retrieve_order_book(cfg) sleep_for(3) continue if is_order_books_expired(order_book_src, order_book_dst, local_cache, msg_queue, cfg.log_file_name): sleep_for(3) continue local_cache.cache_order_book(order_book_src) local_cache.cache_order_book(order_book_dst) # init_deals_with_logging_speedy status_code, deal_pair = method(order_book_src, order_book_dst, active_threshold, cfg.balance_threshold, init_deals_with_logging_speedy, balance_state, deal_cap, type_of_deal=mode_id, worker_pool=processor, msg_queue=msg_queue) add_orders_to_watch_list(deal_pair, priority_queue) print_to_console("I am still alive! ", LOG_ALL_DEBUG) sleep_for(2) sleep_for(3) deal_cap.update_max_volume_cap(NO_MAX_CAP_LIMIT)