Example #1
0
def check_deal_placements():
    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("check_deal_placements may issue a real trade!")

    create_time = get_now_seconds_utc()
    fake_order_book_time1 = -10
    fake_order_book_time2 = -20
    deal_volume = 5
    pair_id = CURRENCY_PAIR.BTC_TO_ARDR

    sell_exchange_id = EXCHANGE.POLONIEX
    buy_exchange_id = EXCHANGE.BITTREX

    difference = "difference is HUGE"
    file_name = "test.log"

    msg_queue = get_message_queue()

    processor = ConnectionPool(pool_size=2)

    trade_at_first_exchange = Trade(DEAL_TYPE.SELL, sell_exchange_id, pair_id,
                                    0.00000001, deal_volume,
                                    fake_order_book_time1, create_time)

    trade_at_second_exchange = Trade(DEAL_TYPE.BUY, buy_exchange_id, pair_id,
                                     0.00004, deal_volume,
                                     fake_order_book_time2, create_time)

    trade_pairs = TradePair(trade_at_first_exchange, trade_at_second_exchange,
                            fake_order_book_time1, fake_order_book_time2,
                            DEAL_TYPE.DEBUG)

    init_deals_with_logging_speedy(trade_pairs, difference, file_name,
                                   processor, msg_queue)
Example #2
0
    def subscribe(self):

        if self.should_run:
            die_hard("Poloniex - another subcription thread running?")

        if get_logging_level() == LOG_ALL_TRACE:
            msg = "Poloniex - call subscribe!"
            log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
            print_to_console(msg, LOG_ALL_MARKET_RELATED_CRAP)

        self.should_run = True

        if get_logging_level() == LOG_ALL_TRACE:
            websocket.enableTrace(True)

        # Create connection
        try:
            self.ws = create_connection(POLONIEX_WEBSCOKET_URL,
                                        enable_multithread=True)
            self.ws.settimeout(15)
        except Exception as e:
            msg = 'Poloniex - connect ws error - {}, retry...'.format(str(e))
            print_to_console(msg, LOG_ALL_ERRORS)
            self.disconnect()

            return

        # actual subscription in dedicated thread
        # self.on_open()
        self.ws.send(self.subscribe_string)
        log_conect_to_websocket("Poloniex")

        # event loop for processing responce
        while self.should_run:
            try:
                compressed_data = self.ws.recv()
                self.on_public(compressed_data)
            except Exception as e:

                log_error_on_receive_from_socket("Poloniex", e)

                break

            if self.last_heartbeat_ts:
                # During last 5 seconds - no heartbeats no any updates
                ts_now = get_now_seconds_utc()
                if ts_now - self.last_heartbeat_ts > POLONIEX_WEBSOCKET_TIMEOUT:
                    log_heartbeat_is_missing("Poloniex",
                                             POLONIEX_WEBSOCKET_TIMEOUT,
                                             self.last_heartbeat_ts, ts_now)

                    break

        log_subscription_cancelled("Poloniex")

        self.disconnect()
Example #3
0
def test_poloniex_doge():
    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("test_poloniex_doge may issue a real trade!")

    load_keys(API_KEY_PATH)
    key = get_key_by_exchange(EXCHANGE.POLONIEX)
    pair_id = CURRENCY_PAIR.BTC_TO_DGB
    pair_name = get_currency_pair_name_by_exchange_id(pair_id,
                                                      EXCHANGE.POLONIEX)
    err, json_repr = add_buy_order_poloniex(key,
                                            pair_name,
                                            price=0.00000300,
                                            amount=100)
    print json_repr
Example #4
0
def test_binance_xlm():
    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("test_binance_xlm may issue a real trade!")

    load_keys(API_KEY_PATH)
    key = get_key_by_exchange(EXCHANGE.BINANCE)
    pair_id = CURRENCY_PAIR.BTC_TO_XLM
    pair_name = get_currency_pair_name_by_exchange_id(pair_id,
                                                      EXCHANGE.BINANCE)
    err, json_repr = add_buy_order_binance(key,
                                           pair_name,
                                           price=0.00003000,
                                           amount=100)
    print json_repr
Example #5
0
    def subscribe(self):

        #
        #       FIXME DBG PART - REMOVE AFTER TESTS
        #

        if self.should_run:
            die_hard("Binance another running?")

        msg = "Binance - call subscribe!"
        log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)

        self.should_run = True

        if get_logging_level() == LOG_ALL_TRACE:
            websocket.enableTrace(True)

        # Create connection
        try:
            self.ws = create_connection(self.subscription_url,
                                        enable_multithread=True)
            self.ws.settimeout(15)
        except Exception as e:
            print('Binance - connect ws error - {}, retry...'.format(str(e)))

            self.disconnect()

            return

        # actual subscription - for binance can be embedded within url
        # self.ws.send(self.subscription_url)

        log_conect_to_websocket("Binance")

        # event loop
        while self.should_run:
            try:
                compressed_data = self.ws.recv()
                self.on_public(self.ws, compressed_data)
            except Exception as e:

                log_error_on_receive_from_socket("Binance", e)

                break

        log_subscription_cancelled("Binance")

        self.disconnect()
Example #6
0
def init_deal(trade_to_perform, debug_msg):
    # FIXME
    die_hard("init_deal called for {f} with message: {msg}".format(
        f=trade_to_perform, msg=debug_msg))

    res = STATUS.FAILURE, None
    try:
        if trade_to_perform.trade_type == DEAL_TYPE.SELL:
            res = dao.sell_by_exchange(trade_to_perform)
        else:
            res = dao.buy_by_exchange(trade_to_perform)
    except Exception, e:
        msg = "init_deal: FAILED ERROR WE ALL DIE with following exception: {excp} {dbg}".format(
            excp=e, dbg=debug_msg)
        print_to_console(msg, LOG_ALL_ERRORS)
        log_to_file(msg, ERROR_LOG_FILE_NAME)
Example #7
0
    def subscribe(self):

        #
        #       FIXME DBG PART - REMOVE AFTER TESTS
        #

        if self.should_run:
            die_hard("Bittrex another running?")

        msg = "Bittrex - call subscribe!"
        log_to_file(msg, SOCKET_ERRORS_LOG_FILE_NAME)
        print msg

        self.should_run = True

        try:
            with Session() as session:

                self.connection = Connection(self.url, session)
                self.hub = self.connection.register_hub(self.hub_name)

                self.hub.client.on(BittrexParameters.MARKET_DELTA,
                                   self.on_public)

                self.connection.start()

                log_conect_to_websocket("Bittrex")

                while self.connection.started and self.should_run:
                    try:
                        self.hub.server.invoke(
                            BittrexParameters.SUBSCRIBE_EXCHANGE_DELTA,
                            self.pair_name)
                    except Exception as e:
                        log_send_heart_beat_failed("Bittrex", e)

                        # FIXME NOTE - still not sure - connection.wait(1)
                        self.should_run = False

                        break
                    sleep_for(1)
        except Exception as e:
            log_error_on_receive_from_socket("Bittrex", e)

        log_subscription_cancelled("Bittrex")

        self.disconnect()
Example #8
0
def test_bittrex_strat():
    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("test_bittrex_strat may issue a real trade!")

    key = get_key_by_exchange(EXCHANGE.BITTREX)
    pair_id = CURRENCY_PAIR.BTC_TO_STRAT
    pair_name = get_currency_pair_name_by_exchange_id(pair_id,
                                                      EXCHANGE.BITTREX)
    err, json_repr = add_buy_order_bittrex(key,
                                           pair_name,
                                           price=0.0007,
                                           amount=10)
    print json_repr
    err, json_repr = add_sell_order_bittrex(key,
                                            pair_name,
                                            price=0.0015,
                                            amount=10)
    print json_repr
Example #9
0
def load_trade_history(args):
    """
        Retrieve executed trades from ALL exchanges via REST api
        and save into db

        Those data later will be used for analysis
        of profitability of trading and bot's performance

    :param args: period, exchanges, connection details
    :return:
    """

    pg_conn, settings = process_args(args)

    log_initial_settings(
        "Starting trade history retrieval for bots using following exchanges: \n",
        settings.exchanges)

    if args.start_time is None or args.end_time is None:
        end_time = get_now_seconds_utc()
        start_time = end_time - 24 * 3600
    else:
        end_time = parse_time(args.end_time, '%Y-%m-%d %H:%M:%S')
        start_time = parse_time(args.start_time, '%Y-%m-%d %H:%M:%S')

    if start_time == end_time or end_time <= start_time:
        die_hard("Wrong time interval provided! {ts0} - {ts1}".format(
            ts0=start_time, ts1=end_time))

    load_keys(settings.key_path)

    while True:
        for exchange_id in settings.exchanges:
            method = get_trade_retrieval_method_by_exchange(exchange_id)
            method(pg_conn, start_time, end_time)
            sleep_for(1)

        print_to_console("Trade retrieval heartbeat", LOG_ALL_DEBUG)

        sleep_for(TRADE_POLL_TIMEOUT)

        end_time = get_now_seconds_utc()
        start_time = end_time - 24 * 3600
Example #10
0
    def subscribe(self):

        if self.should_run:
            die_hard("Huobi - another subcription thread running?")

        self.should_run = True

        if get_logging_level() == LOG_ALL_TRACE:
            websocket.enableTrace(True)

        # Create connection
        try:
            self.ws = create_connection(HUOBI_WEBSOCKET_URL, enable_multithread=True, sslopt={"cert_reqs": ssl.CERT_NONE})
            self.ws.settimeout(15)
        except Exception as e:
            print('Huobi - connect ws error - {}, retry...'.format(str(e)))

            self.disconnect()

            return

        # actual subscription in dedicated thread
        self.on_open()

        log_conect_to_websocket("Huobi")

        # event loop
        while self.should_run:
            try:
                compress_data = self.ws.recv()
                if compress_data:
                    self.on_public(compress_data)
            except Exception as e:

                log_error_on_receive_from_socket("Huobi", e)

                break

        log_subscription_cancelled("Huobi")

        self.disconnect()
Example #11
0
def get_order_books_for_arbitrage_pair(cfg, date_end, processor):

    order_book_async_requests = []

    for exchange_id in [cfg.sell_exchange_id, cfg.buy_exchange_id]:
        pair_name = get_currency_pair_name_by_exchange_id(
            cfg.pair_id, exchange_id)
        if pair_name is None:
            die_hard(
                "UNSUPPORTED COMBINATION OF PAIR ID - {} AND EXCHANGE - {}".
                format(cfg.pair_id, exchange_id))

        method_for_url = get_order_book_url_by_exchange_id(exchange_id)
        request_url = method_for_url(pair_name)
        constructor = get_order_book_constructor_by_exchange_id(exchange_id)

        order_book_async_requests.append(
            WorkUnit(request_url, constructor, pair_name, date_end))

    return processor.process_async_get(
        order_book_async_requests, timeout=HTTP_TIMEOUT_ORDER_BOOK_ARBITRAGE)
Example #12
0
    def insert_new_ask_preserve_order(self,
                                      new_ask,
                                      overwrite_volume=True,
                                      err_msg=None):
        """
            Ask array are sorted in reversed order i.e. lowest - first

            self.ask = sorted(self.ask, key = lambda x: x.price, reverse=False)

            NOTE: consider new value volume as overwrite in case flag overwrite_volume is equal to be True

            Order of condition check is very IMPORTANT!
        """

        item_insert_point = binary_search(self.ask, new_ask, cmp_method_ask)
        is_present = False
        if item_insert_point < len(self.ask):
            is_present = self.ask[item_insert_point] == new_ask

        almost_zero = new_ask.volume <= MIN_VOLUME_ORDER_BOOK
        should_overwrite = is_present and overwrite_volume
        should_update_volume = is_present and not overwrite_volume
        update_volume_error = not is_present and not overwrite_volume
        should_delete = almost_zero and is_present

        if should_delete:
            del self.ask[item_insert_point]
        elif should_overwrite:
            self.ask[item_insert_point].volume = new_ask.volume
        elif should_update_volume:
            self.ask[item_insert_point].volume -= new_ask.volume

            if self.ask[item_insert_point].volume < 0:
                die_hard("Negative value of ask!")

        elif update_volume_error:
            log_to_file(err_msg, SOCKET_ERRORS_LOG_FILE_NAME)
        elif not almost_zero:
            # FIXME NOTE O(n) - slow by python implementation
            self.ask.insert(item_insert_point, new_ask)
Example #13
0
    def on_order_book_update(self, exchange_id, order_book_updates):
        """
        :param exchange_id:
        :param order_book_updates:  parsed OrderBook or OrderBookUpdates according to exchange specs
        :param stage:               whether BOTH orderbook synced or NOT
        :return:
        """

        exchange_name = get_exchange_name_by_id(exchange_id)

        print_to_console("Got update for {exch} Current number of threads: {thr_num}"
                         .format(exch=exchange_name, thr_num=threading.active_count()), LOG_ALL_ERRORS)

        current_stage = get_stage()

        if not self.buy_subscription.is_running() or not self.sell_subscription.is_running():

            log_one_of_subscriptions_failed(self.buy_subscription.is_running(), self.sell_subscription.is_running(), current_stage)

            self.shutdown_subscriptions()

            return

        if order_book_updates is None:
            print_to_console("Order book update is NONE! for {}".format(exchange_name), LOG_ALL_ERRORS)
            return

        if current_stage == ORDER_BOOK_SYNC_STAGES.BEFORE_SYNC:
            print_to_console("Syncing in progress ...", LOG_ALL_ERRORS)

            if exchange_id == self.buy_exchange_id:
                if self.buy_order_book_synced:
                    order_book_update_status = self.order_book_buy.update(exchange_id, order_book_updates)
                    if order_book_update_status == STATUS.FAILURE:

                        log_order_book_update_failed_pre_sync("BUY", exchange_id, order_book_updates)

                        self.shutdown_subscriptions()

                else:
                    self.buy_exchange_updates.put(order_book_updates)
            else:
                if self.sell_order_book_synced:
                    order_book_update_status = self.order_book_sell.update(exchange_id, order_book_updates)
                    if order_book_update_status == STATUS.FAILURE:

                        log_order_book_update_failed_pre_sync("SELL", exchange_id, order_book_updates)

                        self.shutdown_subscriptions()

                else:
                    self.sell_exchange_updates.put(order_book_updates)

        elif current_stage == ORDER_BOOK_SYNC_STAGES.AFTER_SYNC:

            print_to_console("Update after syncing... {}".format(exchange_name), LOG_ALL_ERRORS)

            if exchange_id == self.buy_exchange_id:
                order_book_update_status = self.order_book_buy.update(exchange_id, order_book_updates)
                if order_book_update_status == STATUS.FAILURE:

                    log_order_book_update_failed_post_sync(exchange_id, order_book_updates)

                    self.shutdown_subscriptions()

                    return

            else:
                order_book_update_status = self.order_book_sell.update(exchange_id, order_book_updates)
                if order_book_update_status == STATUS.FAILURE:

                    log_order_book_update_failed_post_sync(exchange_id, order_book_updates)

                    self.shutdown_subscriptions()

                    return

            #
            #   Remove this line to activate trading
            #
            print_top10(exchange_id, self.order_book_buy, self.order_book_sell)

            if not YES_I_KNOW_WHAT_AM_I_DOING:
                die_hard("LIVE TRADING!")

                # DK NOTE: only at this stage we are ready for searching for arbitrage

                # for mode_id in [DEAL_TYPE.ARBITRAGE, DEAL_TYPE.REVERSE]:
                #   method = search_for_arbitrage if mode_id == DEAL_TYPE.ARBITRAGE else adjust_currency_balance
                #   active_threshold = self.threshold if mode_id == DEAL_TYPE.ARBITRAGE else self.reverse_threshold
                # FIXME NOTE: order book expiration check
                # FIXME NOTE: src dst vs buy sell
                ts1 = get_now_seconds_utc_ms()
                status_code, deal_pair = search_for_arbitrage(self.order_book_sell, self.order_book_buy,
                                                              self.threshold,
                                                              self.balance_threshold,
                                                              init_deals_with_logging_speedy,
                                                              self.balance_state, self.deal_cap,
                                                              type_of_deal=DEAL_TYPE.ARBITRAGE,
                                                              worker_pool=self.processor,
                                                              msg_queue=self.msg_queue)

                ts2 = get_now_seconds_utc_ms()

                msg = "Start: {ts1} ms End: {ts2} ms Runtime: {d} ms".format(ts1=ts1, ts2=ts2, d=ts2-ts1)

                #
                #               FIXME
                #
                #   Yeah, we write to disk after every trade
                #   Yeah, it is not really about speed :(
                #
                log_to_file(msg, "profile.txt")
                add_orders_to_watch_list(deal_pair, self.priority_queue)

            self.deal_cap.update_max_volume_cap(NO_MAX_CAP_LIMIT)
Example #14
0
def init_deals_with_logging_speedy(trade_pairs, difference, file_name,
                                   processor, msg_queue):

    # FIXME move after deal placement ?

    global overall_profit_so_far
    overall_profit_so_far += trade_pairs.current_profit

    base_currency_id, dst_currency_id = split_currency_pairs(
        trade_pairs.deal_1.pair_id)

    msg = """We try to send following deals to exchange.
        <b>Expected profit in {base_coin}:</b> <i>{cur}</i>.
        <b>Overall:</b> <i>{tot}</i>
        <b>Difference in percents:</b> <i>{diff}</i>

                Deal details:
        {deal}
        """.format(base_coin=get_currency_name_by_id(base_currency_id),
                   cur=float_to_str(trade_pairs.current_profit),
                   tot=float_to_str(overall_profit_so_far),
                   diff=difference,
                   deal=str(trade_pairs))

    msg_queue.add_message(DEAL_INFO_MSG, msg)
    log_to_file(msg, file_name)

    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("init_deals_with_logging_speedy called for {f}".format(
            f=trade_pairs))

    parallel_deals = []

    for order in [trade_pairs.deal_1, trade_pairs.deal_2]:
        method_for_url = dao.get_method_for_create_url_trade_by_exchange_id(
            order)
        # key, pair_name, price, amount
        key = get_key_by_exchange(order.exchange_id)
        pair_name = get_currency_pair_name_by_exchange_id(
            order.pair_id, order.exchange_id)
        post_details = method_for_url(key, pair_name, order.price,
                                      order.volume)
        constructor = return_with_no_change

        wu = WorkUnit(post_details.final_url, constructor, order)
        wu.add_post_details(post_details)

        parallel_deals.append(wu)

    res = processor.process_async_post(parallel_deals, DEAL_MAX_TIMEOUT)

    if res is None:
        log_to_file(
            "For TradePair - {tp} result is {res}".format(tp=trade_pairs,
                                                          res=res), file_name)
        log_to_file(
            "For TradePair - {tp} result is {res}".format(tp=trade_pairs,
                                                          res=res),
            ERROR_LOG_FILE_NAME)
        return

    # check for errors only
    for entry in res:
        json_response, order = entry
        if "ERROR" in json_response:

            msg = """   <b>ERROR: </b>NONE
            During deal placement: {u1}
            Details: {err_msg}
            """.format(u1=order, err_msg=json_response)

            msg_queue.add_order(FAILED_ORDERS_MSG, order)

        else:
            msg = """ For trade {trade}
            Response is {resp} """.format(trade=order, resp=json_response)

        print_to_console(msg, LOG_ALL_ERRORS)
        msg_queue.add_message(DEBUG_INFO_MSG, msg)
        log_to_file(msg, file_name)

    for order in [trade_pairs.deal_1, trade_pairs.deal_2]:
        msg_queue.add_order(ORDERS_MSG, order)
Example #15
0
def watch_balance_for_exchange(args):
    """
            Those routine update balance at redis CACHE
            for ALL coins at ONE exchange for active key set.

            NOTE:   It still rely on REST api - i.e. not proactive
                    For some exchanges - balance not immediately updated

                    Initially all exchanges were polled sequentially
                    But it lead to delays in the past
                    due to exchange errors or throttling

    :param args: config file and exchange_id
    :return:
    """
    settings = CommonSettings.from_cfg(args.cfg)

    exchange_id = get_exchange_id_by_name(args.exchange)
    if exchange_id not in EXCHANGE.values():
        log_wrong_exchange_id(exchange_id)
        die_hard("Exchange id {} seems to be unknown? 0_o".format(exchange_id))

    log_initial_settings(
        "Starting balance monitoring for following exchange: \n",
        [exchange_id])

    cache = connect_to_cache(host=settings.cache_host,
                             port=settings.cache_port)
    msg_queue = get_message_queue(host=settings.cache_host,
                                  port=settings.cache_port)

    load_keys(settings.key_path)
    set_log_folder(settings.log_folder)
    set_logging_level(settings.logging_level_id)

    init_balances(settings.exchanges, cache)

    cnt = 0

    while True:
        # We load initial balance using init_balance
        sleep_for(BALANCE_POLL_TIMEOUT)

        cnt += BALANCE_POLL_TIMEOUT

        log_balance_update_heartbeat(exchange_id)

        balance_for_exchange = update_balance_by_exchange(exchange_id, cache)
        while balance_for_exchange is None:
            log_cant_update_balance(exchange_id)
            sleep_for(1)
            balance_for_exchange = update_balance_by_exchange(
                exchange_id, cache)

        if cnt >= BALANCE_HEALTH_CHECK_TIMEOUT:
            cnt = 0
            log_last_balances(settings.exchanges, cache, msg_queue)

            for base_currency_id in BASE_CURRENCY:
                threshold = BASE_CURRENCIES_BALANCE_THRESHOLD[base_currency_id]
                if not balance_for_exchange.do_we_have_enough(
                        base_currency_id, threshold):
                    log_not_enough_base_currency(exchange_id, base_currency_id,
                                                 threshold,
                                                 balance_for_exchange,
                                                 msg_queue)
Example #16
0
def arbitrage_between_pair(args):
    cfg = ArbitrageConfig.from_args(args)

    app_settings = CommonSettings.from_cfg(args.cfg)

    set_logging_level(app_settings.logging_level_id)
    set_log_folder(app_settings.log_folder)
    load_keys(app_settings.key_path)

    priority_queue, msg_queue, local_cache = init_queues(app_settings)

    processor = ConnectionPool(pool_size=2)

    # to avoid time-consuming check in future - validate arguments here
    for exchange_id in [args.sell_exchange_id, args.buy_exchange_id]:
        pair_name = get_currency_pair_name_by_exchange_id(
            cfg.pair_id, exchange_id)
        if pair_name is None:
            log_dont_supported_currency(cfg, exchange_id, cfg.pair_id)
            exit()

    deal_cap = MarketCap(cfg.pair_id, get_now_seconds_utc())
    deal_cap.update_max_volume_cap(NO_MAX_CAP_LIMIT)
    update_min_cap(cfg, deal_cap, processor)

    balance_state = dummy_balance_init(timest=0,
                                       default_volume=Decimal("0"),
                                       default_available_volume=Decimal("0"))

    if not YES_I_KNOW_WHAT_AM_I_DOING:
        die_hard("LIVE TRADING!")

    while True:

        if get_now_seconds_utc(
        ) - deal_cap.last_updated > MIN_CAP_UPDATE_TIMEOUT:
            update_min_cap(cfg, deal_cap, processor)

        for mode_id in [DEAL_TYPE.ARBITRAGE, DEAL_TYPE.REVERSE]:
            cur_timest_sec = get_now_seconds_utc()

            method = search_for_arbitrage if mode_id == DEAL_TYPE.ARBITRAGE else adjust_currency_balance
            active_threshold = cfg.threshold if mode_id == DEAL_TYPE.ARBITRAGE else cfg.reverse_threshold

            balance_state = get_updated_balance_arbitrage(
                cfg, balance_state, local_cache)

            if balance_state.expired(cur_timest_sec, cfg.buy_exchange_id,
                                     cfg.sell_exchange_id,
                                     BALANCE_EXPIRED_THRESHOLD):
                log_balance_expired_errors(cfg, msg_queue, balance_state)
                die_hard("Balance expired")

            order_book_src, order_book_dst = get_order_books_for_arbitrage_pair(
                cfg, cur_timest_sec, processor)

            if order_book_dst is None or order_book_src is None:
                log_failed_to_retrieve_order_book(cfg)
                sleep_for(3)
                continue

            if is_order_books_expired(order_book_src, order_book_dst,
                                      local_cache, msg_queue,
                                      cfg.log_file_name):
                sleep_for(3)
                continue

            local_cache.cache_order_book(order_book_src)
            local_cache.cache_order_book(order_book_dst)

            # init_deals_with_logging_speedy
            status_code, deal_pair = method(order_book_src,
                                            order_book_dst,
                                            active_threshold,
                                            cfg.balance_threshold,
                                            init_deals_with_logging_speedy,
                                            balance_state,
                                            deal_cap,
                                            type_of_deal=mode_id,
                                            worker_pool=processor,
                                            msg_queue=msg_queue)

            add_orders_to_watch_list(deal_pair, priority_queue)

            print_to_console("I am still alive! ", LOG_ALL_DEBUG)
            sleep_for(2)

        sleep_for(3)

        deal_cap.update_max_volume_cap(NO_MAX_CAP_LIMIT)