예제 #1
0
    def remove(self, netting_order: MarketOrder or LimitOrder) -> float:
        """
        Remove position from inventory and return position PnL.

        :param netting_order: order object used to net position
        :return: (bool) TRUE if position removed successfully
        """
        pnl = 0.
        if self.position_count < 1:
            LOGGER.info('Error. No {} positions to remove.'.format(self.side))
            return pnl

        order = self.positions.popleft()

        # Calculate PnL
        if self.side == 'long':
            pnl = (netting_order.price / order.average_execution_price) - 1.
        elif self.side == 'short':
            pnl = (order.average_execution_price / netting_order.price) - 1.

        # Add Profit and Loss to realized gains/losses
        self.realized_pnl += pnl

        # Update positions attributes
        self.total_exposure -= order.average_execution_price
        self.average_price = self.total_exposure / self.position_count if \
            self.position_count > 0 else 0.
        self.full_inventory = self.position_count >= self.max_position_count

        LOGGER.debug(
            'remove-> Netted {} position #{} with {} trade #{} PnL = {:.4f}'.
            format(self.side, order.id, netting_order.side, netting_order.id,
                   pnl))

        return pnl
예제 #2
0
    def match(self, msg: dict) -> None:
        """
        Change volume of book.

        :param msg: incoming order message
        """
        msg_order_id = msg.get('maker_order_id', None)
        if msg_order_id in self.order_map:
            old_order = self.order_map[msg_order_id]
            order = {
                'order_id': msg_order_id,
                'price': float(msg['price']),
                'size': float(msg['size']),
                'side': msg['side'],
                'time': msg['time'],
                'type': msg['type'],
                'product_id': msg['product_id']
            }
            price = order['price']
            if price in self.price_dict:
                remove_size = order['size']
                remaining_size = old_order['size'] - remove_size
                order['size'] = remaining_size
                self.order_map[old_order['order_id']] = order
                old_order_price = old_order.get('price', None)
                self.price_dict[price].add_market(quantity=remove_size,
                                                  price=old_order_price)
                self.price_dict[price].remove_quantity(quantity=remove_size,
                                                       price=old_order_price)
            else:
                LOGGER.info('\nmatch: price not in tree already [%s]\n' % msg)
        elif RECORD_DATA:
            LOGGER.warn('\n%s match: order id cannot be found for %s\n' %
                        (self.sym, msg))
예제 #3
0
    def _process_trades(self, msg):
        """
        Internal method to process trade messages
        :param msg: incoming tick
        :return: False if a re-subscribe is required
        """
        if len(msg) == 2:
            #  historical trades
            return True

        msg_type = msg[1]
        side = 'upticks' if msg[2][2] > 0.0 else 'downticks'

        if msg_type == 'hb':
            LOGGER.info('Heartbeat for trades')
            return True

        elif msg_type == 'te':
            trade = {
                'price': float(msg[2][3]),
                'size': float(msg[2][2]),
                'side': side,
                'type': msg_type,
                "product_id": self.sym
            }
            self.db.new_tick(trade)
            return self._process_trades_replay(msg=trade)

        return True
예제 #4
0
    def render_lob_feature_names(include_orderflow: bool = INCLUDE_ORDERFLOW) -> list:
        """
        Get the column names for the LOB render features.

        :param include_orderflow: if TRUE, order flow imbalance stats are included in set
        :return: list containing features names
        """
        feature_names = list()

        feature_names.append('midpoint')
        feature_names.append('spread')
        feature_names.append('buys')
        feature_names.append('sells')

        feature_types = ['distance', 'notional']
        if include_orderflow:
            feature_types += ['cancel_notional', 'limit_notional', 'market_notional']

        for side in ['bid', 'ask']:
            for feature in feature_types:
                for row in range(MAX_BOOK_ROWS):
                    feature_names.append("{}_{}_{}".format(side, feature, row))

        LOGGER.info("render_feature_names() has {} features".format(len(feature_names)))

        return feature_names
예제 #5
0
    def _load_book(self, book):
        """
        Load initial limit order book snapshot
        :param book: order book snapshot
        :return: void
        """
        start_time = time()

        self.db.new_tick({'type': 'load_book', 'product_id': self.sym})

        for row in book[1]:
            order = {
                "order_id": int(row[0]),
                "price": float(row[1]),
                "size": float(abs(row[2])),
                "side": 'sell' if float(row[2]) < float(0) else 'buy',
                "product_id": self.sym,
                "type": 'preload'
            }
            self.db.new_tick(order)

            if order['side'] == 'buy':
                self.bids.insert_order(order)
            else:
                self.asks.insert_order(order)

        self.db.new_tick({'type': 'book_loaded', 'product_id': self.sym})

        self.bids.warming_up = self.asks.warming_up = False

        elapsed = time() - start_time
        LOGGER.info('%s: book loaded..............in %f seconds\n' %
                    (self.sym, elapsed))
예제 #6
0
    def import_csv(filename: str) -> pd.DataFrame:
        """
        Import an historical tick file created from the export_to_csv() function.

        :param filename: Full file path including filename
        :return: (panda.DataFrame) historical limit order book data
        """
        start_time = dt.now(tz=TIMEZONE)

        if 'xz' in filename:
            data = pd.read_csv(filepath_or_buffer=filename,
                               index_col=0,
                               compression='xz',
                               engine='c')
        elif 'csv' in filename:
            data = pd.read_csv(filepath_or_buffer=filename,
                               index_col=0,
                               engine='c')
        else:
            LOGGER.warn('Error: file must be a csv or xz')
            data = None

        elapsed = (dt.now(tz=TIMEZONE) - start_time).seconds
        LOGGER.info('Imported %s from a csv in %i seconds' %
                    (filename[-25:], elapsed))
        return data
예제 #7
0
    def remove_order(self, msg: dict) -> None:
        """
        Done messages result in the order being removed from map.

        :param msg: incoming order message
        """
        msg_order_id = msg.get('order_id', None)
        if msg_order_id in self.order_map:

            old_order = self.order_map[msg_order_id]
            price = old_order.get('price', None)

            if price in self.price_dict:
                if msg.get('reason', None) == 'canceled':
                    self.price_dict[price].add_cancel(quantity=float(
                        msg.get('remaining_size')),
                                                      price=price)

                self.price_dict[price].remove_quantity(
                    quantity=old_order['size'], price=price)
                self.price_dict[price].remove_count()

                if self.price_dict[price].count == 0:
                    self.remove_price(price)

            elif RECORD_DATA:
                LOGGER.info('%s remove_order: price not in price_map [%s]' %
                            (msg['product_id'], str(price)))

            del self.order_map[msg_order_id]
예제 #8
0
    def extract_features(self, query: dict) -> None:
        """
        Create and export limit order book data to csv. This function
        exports multiple days of data and ensures each day starts and
        ends exactly on time.

        :param query: (dict) ccy=sym, daterange=(YYYYMMDD,YYYYMMDD)
        :return: void
        """
        start_time = dt.now(tz=TIMEZONE)

        order_book_data = self.get_orderbook_snapshot_history(query=query)
        if order_book_data is not None:
            dates = order_book_data['system_time'].dt.date.unique()
            LOGGER.info('dates: {}'.format(dates))
            for date in dates[:]:
                tmp = order_book_data.loc[
                    order_book_data['system_time'].dt.date == date]
                self.export_to_csv(tmp,
                                   filename='{}_{}'.format(
                                       query['ccy'][0], date),
                                   compress=True)

        elapsed = (dt.now(tz=TIMEZONE) - start_time).seconds
        LOGGER.info(
            '***\nSimulator.extract_features() executed in %i seconds\n***' %
            elapsed)
예제 #9
0
    def get_tick_history(self, query: dict) -> Union[pd.DataFrame, None]:
        """
        Function to query the Arctic Tick Store and...
        1.  Return the specified historical data for a given set of securities
            over a specified amount of time
        2.  Convert the data returned from the query from a panda to a list of dicts
            and while doing so, allocate the work across all available CPU cores

        :param query: (dict) of the query parameters
            - ccy: list of symbols
            - startDate: int YYYYMMDD start date
            - endDate: int YYYYMMDD end date
        :return: list of dicts, where each dict is a tick that was recorded
        """
        start_time = dt.now(tz=self.tz)

        assert self.recording is False, "RECORD_DATA must be set to FALSE to replay data"
        cursor = self._query_arctic(**query)
        if cursor is None:
            LOGGER.info(
                '\nNothing returned from Arctic for the query: %s\n...Exiting...'
                % str(query))
            return

        elapsed = (dt.now(tz=self.tz) - start_time).seconds
        LOGGER.info('***Completed get_tick_history() in %i seconds***' %
                    elapsed)

        return cursor
예제 #10
0
    def run(self) -> None:
        """
        Thread to override in Coinbase or Bitfinex or Bitmex implementation class.

        :return:
        """
        LOGGER.info("run() initiated on : {}".format(self.name))
        self.last_worker_time = dt.now()
예제 #11
0
    def load_book(self) -> None:
        """
        Load initial limit order book snapshot.
        """
        book = self._get_book()

        start_time = time()

        self.sequence = book['sequence']
        now = dt.now(tz=TIMEZONE)
        load_time = str(now)

        self.db.new_tick({
            'type': 'load_book',
            'product_id': self.sym,
            'sequence': self.sequence
        })

        for bid in book['bids']:
            msg = {
                'price': float(bid[0]),
                'size': float(bid[1]),
                'order_id': bid[2],
                'side': 'buy',
                'product_id': self.sym,
                'type': 'preload',
                'sequence': self.sequence,
                'time': load_time,
            }
            self.db.new_tick(msg)
            self.bids.insert_order(msg)

        for ask in book['asks']:
            msg = {
                'price': float(ask[0]),
                'size': float(ask[1]),
                'order_id': ask[2],
                'side': 'sell',
                'product_id': self.sym,
                'type': 'preload',
                'sequence': self.sequence,
                'time': load_time,
            }
            self.db.new_tick(msg)
            self.asks.insert_order(msg)

        self.db.new_tick({
            'type': 'book_loaded',
            'product_id': self.sym,
            'sequence': self.sequence
        })
        del book
        self.bids.warming_up = self.asks.warming_up = False

        elapsed = time() - start_time
        LOGGER.info('%s: book loaded................in %f seconds' %
                    (self.sym, elapsed))
예제 #12
0
    def _process_book(self, msg):
        """
        Internal method to process FULL BOOK market data
        :param msg: incoming tick
        :return: False if re-subscribe is required
        """
        # check for a heartbeat
        if msg[1] == 'hb':
            # render_book('heart beat %s' % msg)
            return True

        # order book message (initial snapshot)
        elif np.shape(msg[1])[0] > 3:
            LOGGER.info('%s loading book...' % self.sym)
            self.clear_book()
            self._load_book(msg)
            return True

        else:
            # else, the incoming message is a order update
            order = {
                "order_id": int(msg[1][0]),
                "price": float(msg[1][1]),
                "size": float(abs(msg[1][2])),
                "side": 'sell' if float(msg[1][2]) < float(0) else 'buy',
                "product_id": self.sym,
                "type": 'update'
            }

            self.db.new_tick(order)

            # order should be removed from the book
            if order['price'] == 0.:
                if order['side'] == 'buy':
                    self.bids.remove_order(order)
                elif order['side'] == 'sell':
                    self.asks.remove_order(order)

            # order is a new order or size update for bids
            elif order['side'] == 'buy':
                if order['order_id'] in self.bids.order_map:
                    self.bids.change(order)
                else:
                    self.bids.insert_order(order)

            # order is a new order or size update for asks
            elif order['side'] == 'sell':
                if order['order_id'] in self.asks.order_map:
                    self.asks.change(order)
                else:
                    self.asks.insert_order(order)

            # unhandled msg
            else:
                LOGGER.warn('\nUnhandled list msg %s' % msg)

            return True
예제 #13
0
    def clear_book(self) -> None:
        """
        Method to reset the limit order book.

        :return: (void)
        """
        self.bids.clear()  # warming_up flag reset in `Position` class
        self.asks.clear()  # warming_up flag reset in `Position` class
        self.last_tick_time = None
        LOGGER.info("{}'s order book cleared.".format(self.sym))
예제 #14
0
    def step_limit_order_pnl(self, bid_price: float, ask_price: float,
                             buy_volume: float, sell_volume: float,
                             step: int) -> (float, bool, bool):
        """
        Update PnL & positions every time step in the environment.

        :param bid_price: (float) current time step bid price
        :param ask_price: (float) current time step ask price
        :param buy_volume: (float) current time step buy volume
        :param sell_volume: (float) current time step sell volume
        :param step: (int) current time step number
        :return: (float) PnL for current time step due to limit order fill and netting
        """
        pnl = 0.
        is_long_order_filled = self.long_inventory.step(
            bid_price=bid_price,
            ask_price=ask_price,
            buy_volume=buy_volume,
            sell_volume=sell_volume,
            step=step)
        is_short_order_filled = self.short_inventory.step(
            bid_price=bid_price,
            ask_price=ask_price,
            buy_volume=buy_volume,
            sell_volume=sell_volume,
            step=step)

        if is_long_order_filled and is_short_order_filled:
            # protection in case Long and Short orders get filled in the same time step.
            # Although this shouldn't happen, it prevents an error from occurring if it
            # does happen.
            LOGGER.info(
                "WARNING: Long and Short orders filled in the same step")
            LOGGER.info(
                'bid={} | ask={} | buy_vol={} | sell_vol={} | step={}'.format(
                    bid_price, ask_price, buy_volume, sell_volume, step))
            is_short_order_filled = False

        if is_long_order_filled:
            # check if we can net the inventory
            if self.short_inventory_count > 0:
                # net out the inventory
                new_position = self.long_inventory.pop_position()
                pnl += self.short_inventory.remove(netting_order=new_position)

        if is_short_order_filled:
            # check if we can net the inventory
            if self.long_inventory_count > 0:
                # net out the inventory
                new_position = self.short_inventory.pop_position()
                pnl += self.long_inventory.remove(netting_order=new_position)

        return pnl, is_long_order_filled, is_short_order_filled
예제 #15
0
    def create_model(self, name: str = 'cnn') -> Sequential:
        """
        Helper function get create and get the default MLP or CNN model.

        :param name: Neural network type ['mlp' or 'cnn']
        :return: neural network
        """
        LOGGER.info("creating model for {}".format(name))
        if name == 'cnn':
            return self._create_cnn_model()
        elif name == 'mlp':
            return self._create_mlp_model()
예제 #16
0
    def init_db_connection(self) -> None:
        """
        Initiate database connection to Arctic.

        :return: (void)
        """
        LOGGER.info("init_db_connection for {}...".format(self.sym))
        try:
            self.db = Arctic(MONGO_ENDPOINT)
            self.db.initialize_library(ARCTIC_NAME, lib_type=TICK_STORE)
            self.collection = self.db[ARCTIC_NAME]
        except PyMongoError as e:
            LOGGER.warn("Database.PyMongoError() --> {}".format(e))
예제 #17
0
    def _create_mlp_model(self) -> Sequential:
        """
        Create a DENSE neural network with dense layer at the end

        :return: keras model
        """
        features_shape = (self.memory_frame_stack,
                          *self.env.observation_space.shape)
        model = Sequential()
        model.add(
            Dense(units=256, input_shape=features_shape, activation='relu'))
        model.add(Dense(units=256, activation='relu'))
        model.add(Flatten())
        model.add(Dense(self.env.action_space.n, activation='softmax'))
        LOGGER.info(model.summary())
        return model
예제 #18
0
    def _get_book(self) -> dict:
        """
        Get order book snapshot.

        :return: order book
        """
        LOGGER.info('%s get_book request made.' % self.sym)
        start_time = time()

        self.clear_book()
        path = (COINBASE_BOOK_ENDPOINT % self.sym)
        book = requests.get(path, params={'level': 3}).json()

        elapsed = time() - start_time
        LOGGER.info('%s get_book request completed in %f seconds.' % (self.sym, elapsed))
        return book
예제 #19
0
def reset_ema(ema) -> None:
    """
    Reset EMA manager.

    :param ema: EMA manager to be reset
    :return: EMA manager that has been reset
    """
    if ema is None:
        pass
    elif isinstance(ema, ExponentialMovingAverage):
        ema.reset()
        LOGGER.info("Reset EMA data.")
    elif isinstance(ema, list):
        for e in ema:
            e.reset()
        LOGGER.info("Reset EMA data.")
    return ema
예제 #20
0
 def __init__(self,
              sym: str,
              exchange: str,
              record_data: bool = RECORD_DATA):
     """
     Database constructor.
     """
     self.counter = 0
     self.data = list()
     self.tz = TIMEZONE
     self.sym = sym
     self.exchange = exchange
     self.recording = record_data
     self.db = self.collection = None
     if self.recording:
         LOGGER.info('\nDatabase: [%s is recording %s]\n' %
                     (self.exchange, self.sym))
예제 #21
0
    def run(self) -> None:
        """
        Thread to override in Coinbase or Bitfinex or Bitmex implementation class.
        """
        LOGGER.info("run() initiated on : {}".format(self.name))
        self.last_worker_time = dt.now()
        # Used for debugging exchanges individually
        # Timer(4.0, _timer_worker, args=(self.book, self.last_worker_time,)).start()


# from data_recorder.connector_components.orderbook import OrderBook

# Used for debugging exchanges individually
# def _timer_worker(orderbook: OrderBook, last_worker_time: dt) -> None:
#     """
#     Thread worker to be invoked every N seconds
#     (e.g., configs.SNAPSHOT_RATE)
#
#     :param orderbook: OrderBook
#     :return: void
#     """
#     now = dt.now()
#     delta = now - last_worker_time
#     print('\n{} - {} with delta {}\n{}'.format(orderbook.sym, now, delta.microseconds,
#                                                orderbook))
#     last_worker_time = now
#
#     Timer(SNAPSHOT_RATE, _timer_worker, args=(orderbook, last_worker_time,)).start()
#
#     if orderbook.done_warming_up:
#         """
#         This is the place to insert a trading model.
#         You'll have to create your own.
#
#         Example:
#             orderbook_data = tuple(coinbaseClient.book, bitfinexClient.book)
#             model = agent.dqn.Agent()
#             fix_api = SomeFixAPI()
#             action = model(orderbook_data)
#             if action is buy:
#                 buy_order = create_order(pair, price, etc.)
#                 fix_api.send_order(buy_order)
#
#         """
#         _ = orderbook.render_book()
예제 #22
0
    def run(self):
        """
        Handle incoming level 3 data on a separate thread or process.

        Returns
        -------

        """
        super(CoinbaseClient, self).run()
        while True:
            msg = self.queue.get()

            if self.book.new_tick(msg) is False:
                self.book.load_book()
                self.retry_counter += 1
                LOGGER.info('\n[%s - %s] ...going to try and reload the order '
                            'book\n' % (self.exchange.upper(), self.sym))
                continue
예제 #23
0
파일: ema.py 프로젝트: yushu-liu/crypto-rl
def reset_ema(ema: Union[List[ExponentialMovingAverage], ExponentialMovingAverage, None]) -> \
        Union[List[ExponentialMovingAverage], ExponentialMovingAverage, None]:
    """
    Reset the EMA smoother.

    :param ema:
    :return:
    """
    if ema is None:
        pass
    elif isinstance(ema, ExponentialMovingAverage):
        ema.reset()
        LOGGER.info("Reset EMA data.")
    elif isinstance(ema, list):
        for e in ema:
            e.reset()
        LOGGER.info("Reset EMA data.")
    return ema
예제 #24
0
파일: ema.py 프로젝트: yushu-liu/crypto-rl
def load_ema(alpha: Union[List[float], float, None]) -> \
        Union[List[ExponentialMovingAverage], ExponentialMovingAverage, None]:
    """
    Set exponential moving average smoother.

    :param alpha: decay rate for EMA
    :return: (var) EMA
    """
    if alpha is None:
        # print("EMA smoothing DISABLED")
        return None
    elif isinstance(alpha, float):
        LOGGER.info(f"EMA smoothing ENABLED: {alpha}")
        return ExponentialMovingAverage(alpha=alpha)
    elif isinstance(alpha, list):
        LOGGER.info(f"EMA smoothing ENABLED: {alpha}")
        return [ExponentialMovingAverage(alpha=a) for a in alpha]
    else:
        raise ValueError(f"_load_ema() --> unknown alpha type: {type(alpha)}")
예제 #25
0
def load_ema(alpha=None):
    """
    Set exponential moving average smoother.

    :param alpha: decay rate for EMA
    :return: (var) EMA
    """
    if alpha is None:
        # print("EMA smoothing DISABLED")
        return None
    elif isinstance(alpha, float):
        LOGGER.info("EMA smoothing ENABLED: {}".format(alpha))
        return ExponentialMovingAverage(alpha=alpha)
    elif isinstance(alpha, list):
        LOGGER.info("EMA smoothing ENABLED: {}".format(alpha))
        return [ExponentialMovingAverage(alpha=a) for a in alpha]
    else:
        raise ValueError("_load_ema() --> unknown alpha type: {}".format(
            type(alpha)))
예제 #26
0
    def add(self, order: MarketOrder or LimitOrder) -> bool:
        """
        Add / update an order.

        :param order: (Order) New order to be used for updating existing order or
                        placing a new order
        :return: (bool) TRUE if order add action successfully completed, FALSE if already
                        at position_max or unknown order.side
        """
        if order.order_type == 'market':
            return self._add_market_order(order=order)
        elif order.order_type == 'limit':
            return self._add_limit_order(order=order)
        else:
            LOGGER.info("Position() add --> unknown order_type {}".format(
                order.order_type))
            raise ValueError(
                "ERROR: order_type must be limit or market, not {}".format(
                    order.order_type))
예제 #27
0
    def _query_arctic(self, ccy: str, start_date: int,
                      end_date: int) -> Union[pd.DataFrame, None]:
        """
        Query database and return LOB messages starting from LOB reconstruction.

        :param ccy: currency symbol
        :param start_date: YYYYMMDD start date
        :param end_date: YYYYMMDD end date
        :return: (pd.DataFrame) results found in database
        """
        assert self.collection is not None, \
            "Arctic.Collection() must not be null."

        start_time = dt.now(tz=self.tz)

        try:
            LOGGER.info(
                '\nGetting {} data from Arctic Tick Store...'.format(ccy))
            cursor = self.collection.read(symbol=ccy,
                                          date_range=DateRange(
                                              start_date, end_date))

            # filter ticks for the first LOAD_BOOK message
            #   (starting point for order book reconstruction)
            # min_datetime = cursor.loc[cursor.type == 'load_book'].index[0]
            dates = np.unique(
                cursor.loc[cursor.type == 'load_book'].index.date)
            start_index = cursor.loc[((cursor.index.date == dates[0]) &
                                      (cursor.type == 'load_book'))].index[-1]
            # cursor = cursor.loc[cursor.index >= min_datetime]
            cursor = cursor.loc[cursor.index >= start_index]

            elapsed = (dt.now(tz=self.tz) - start_time).seconds
            LOGGER.info('Completed querying %i %s records in %i seconds' %
                        (cursor.shape[0], ccy, elapsed))

        except Exception as ex:
            cursor = None
            LOGGER.warn('Simulator._query_arctic() thew an exception: \n%s' %
                        str(ex))

        return cursor
예제 #28
0
    def run(self) -> None:
        """
        Handle incoming level 3 data on a separate thread or process.

        Returns
        -------

        """
        super(BitfinexClient, self).run()
        while True:
            msg = self.queue.get()

            if self.book.new_tick(msg) is False:
                self.retry_counter += 1
                self.book.clear_book()
                LOGGER.info(
                    '\n[%s - %s] ...going to try and reload the order book\n' %
                    (self.exchange.upper(), self.sym))
                raise websockets.ConnectionClosed(
                    10001, '%s: no explanation' % self.exchange.upper())
예제 #29
0
    async def unsubscribe(self) -> None:
        """
        Unsubscribe limit order book WebSocket from exchange.
        """
        LOGGER.info('Client - %s sending unsubscribe request for %s.' %
                    (self.exchange.upper(), self.sym))

        await self.ws.send(self.request_unsubscribe)
        output = json.loads(await self.ws.recv())

        LOGGER.info('Client - %s: unsubscribe successful.' % (self.exchange.upper()))
        LOGGER.info('unsubscribe() -> Output:')
        LOGGER.info(output)
예제 #30
0
    def new_tick(self, msg: dict):
        """
        Method to process incoming ticks.

        :param msg: incoming tick
        :return: False if there is an exception (or need to reconnect the WebSocket)
        """
        # check for data messages, which only come in lists
        if isinstance(msg, list):
            if msg[0] == self.channel_id['book']:
                return self._process_book(msg)
            elif msg[0] == self.channel_id['trades']:
                return self._process_trades(msg)

        # non-data messages
        elif isinstance(msg, dict):
            if 'event' in msg:
                return self._process_events(msg)
            elif msg['type'] == 'te':
                self.last_tick_time = msg.get('system_time', None)
                return self._process_trades_replay(msg)
            elif msg['type'] in ['update', 'preload']:
                self.last_tick_time = msg.get('system_time', None)
                return self._process_book_replay(msg)
            elif msg['type'] == 'load_book':
                self.clear_book()
                return True
            elif msg['type'] == 'book_loaded':
                self.bids.warming_up = False
                self.asks.warming_up = False
                return True
            else:
                LOGGER.info(
                    'new_tick() message does not know how to be processed = %s'
                    % str(msg))

        # unhandled exception
        else:
            LOGGER.warn('unhandled exception\n%s\n' % msg)
            return True