Exemple #1
0
class Polo(Base):
    """
    Poloniex interface
    """
    arg_parser = configargparse.get_argument_parser()
    arg_parser.add('--polo_api_key', help='Poloniex API key')
    arg_parser.add("--polo_secret", help='Poloniex secret key')
    arg_parser.add("--polo_txn_fee", help='Poloniex txn. fee')
    arg_parser.add("--polo_buy_order", help='Poloniex buy order type')
    arg_parser.add("--polo_sell_order", help='Poloniex sell order type')
    valid_candle_intervals = [300, 900, 1800, 7200, 14400, 86400]

    def __init__(self):
        super(Polo, self).__init__()
        args = self.arg_parser.parse_known_args()[0]
        api_key = args.polo_api_key
        secret = args.polo_secret
        self.transaction_fee = float(args.polo_txn_fee)
        self.polo = Poloniex(api_key, secret)
        self.buy_order_type = args.polo_buy_order
        self.sell_order_type = args.polo_sell_order
        self.pair_delimiter = '_'
        self.tickers_cache_refresh_interval = 50  # If the ticker request is within the interval, get data from cache
        self.last_tickers_fetch_epoch = 0
        self.last_tickers_cache = None  # Cache for storing immediate tickers

    def get_balances(self):
        """
        Return available account balances (function returns ONLY currencies > 0)
        """
        try:
            balances = self.polo.returnBalances()
            only_non_zeros = {k: float(v) for k, v in balances.items() if float(v) > 0.0}
        except PoloniexError as e:
            print(colored('!!! Got exception (polo.get_balances): ' + str(e), 'red'))
            only_non_zeros = dict()

        return only_non_zeros

    def get_symbol_ticker(self, symbol, candle_size=5):
        """
        Returns real-time ticker Data-Frame for given symbol/pair
        Info: Currently Poloniex returns tickers for ALL pairs. To speed the queries and avoid
              unnecessary API calls, this method implements temporary cache
        """
        epoch_now = int(time.time())
        if epoch_now < (self.last_tickers_fetch_epoch + self.tickers_cache_refresh_interval):
            # If the ticker request is within cache_fetch_interval, try to get data from cache
            pair_ticker = self.last_tickers_cache[symbol].copy()
        else:
            # If cache is too old fetch data from Poloniex API
            try:
                ticker = self.polo.returnTicker()
                pair_ticker = ticker[symbol]
                self.last_tickers_fetch_epoch = int(time.time())
                self.last_tickers_cache = ticker.copy()
            except (PoloniexError | JSONDecodeError) as e:
                print(colored('!!! Got exception in get_symbol_ticker. Details: ' + str(e), 'red'))
                pair_ticker = self.last_tickers_cache[symbol].copy()
                pair_ticker = dict.fromkeys(pair_ticker, None)

        df = pd.DataFrame.from_dict(pair_ticker, orient="index")
        df = df.T
        # We will use 'last' price as closing one
        df = df.rename(columns={'last': 'close', 'baseVolume': 'volume'})
        df['close'] = df['close'].astype(float)
        df['volume'] = df['volume'].astype(float)
        df['pair'] = symbol
        df['date'] = int(datetime.datetime.utcnow().timestamp())
        return df

    def return_ticker(self):
        """
        Returns ticker for all currencies
        """
        return self.polo.returnTicker()

    def cancel_order(self, order_number):
        """
        Cancels order for given order number
        """
        return self.polo.cancelOrder(order_number)

    def get_open_orders(self, currency_pair='all'):
        """
        Returns your open orders
        """
        return self.polo.returnOpenOrders(currency_pair)

    def get_pairs(self):
        """
        Returns ticker pairs for all currencies
        """
        ticker = self.polo.returnTicker()
        return list(ticker)

    def get_candles_df(self, currency_pair, epoch_start, epoch_end, period=False):
        """
        Returns candlestick chart data in pandas dataframe
        """
        try:
            data = self.get_candles(currency_pair, epoch_start, epoch_end, period)
            df = pd.DataFrame(data)
            df = df.tail(1)
            df['close'] = df['close'].astype(float)
            df['volume'] = df['volume'].astype(float)
            df['pair'] = currency_pair
            return df
        except (PoloniexError, JSONDecodeError) as e:
            print()
            print(colored('!!! Got exception while retrieving polo data:' + str(e) + ', pair: ' + currency_pair, 'red'))
        return pd.DataFrame()

    def get_candles(self, currency_pair, epoch_start, epoch_end, interval_in_sec=300):
        """
        Returns candlestick chart data
        """
        candle_interval = self.get_valid_candle_interval(interval_in_sec)
        data = []
        try:
            data = self.polo.returnChartData(currency_pair, candle_interval, epoch_start, epoch_end)
        except (PoloniexError, JSONDecodeError) as e:
            print()
            print(colored('!!! Got exception while retrieving polo data:' + str(e) + ', pair: ' + currency_pair, 'red'))
        return data

    def get_market_history(self, start, end, currency_pair='all'):
        """
        Returns market trade history
        """
        data = []
        try:
            data = self.polo.marketTradeHist(currencyPair=currency_pair,
                                             start=start,
                                             end=end)
        except (PoloniexError, JSONDecodeError) as e:
            logger = logging.getLogger(__name__)
            logger.error('Got exception while retrieving polo data:' + str(e) + ', pair: ' + currency_pair, e)
        return data

    def get_valid_candle_interval(self, period_in_sec):
        """
        Returns closest value from valid candle intervals
        """
        if not period_in_sec:
            return period_in_sec

        if period_in_sec in self.valid_candle_intervals:
            return period_in_sec
        # Find the closest valid interval
        return min(self.valid_candle_intervals, key=lambda x: abs(x - period_in_sec))

    def trade(self, actions, wallet, trade_mode):
        if trade_mode == TradeMode.backtest:
            return Base.trade(actions, wallet, trade_mode)
        else:
            actions = self.life_trade(actions)
            return actions

    def life_trade(self, actions):
        """
        Places orders and returns order number
        !!! For now we are NOT handling postOnly type of orders !!!
        """
        for action in actions:

            if action.action == TradeState.none:
                actions.remove(action)
                continue

            # Handle buy_sell mode
            wallet = self.get_balances()
            if action.buy_sell_mode == BuySellMode.all:
                action.amount = self.get_buy_sell_all_amount(wallet, action)
            elif action.buy_sell_mode == BuySellMode.fixed:
                action.amount = self.get_fixed_trade_amount(wallet, action)

            print('Processing live-action: ' + str(action.action) +
                  ', amount:', str(action.amount) +
                  ', pair:', action.pair +
                  ', rate:', str(action.rate) +
                  ', buy_sell_mode:', action.buy_sell_mode)

            # If we don't have enough assets, just skip/remove the action
            if action.amount == 0.0:
                print(colored('No assets to buy/sell, ...skipping: ' + str(action.amount) + ' ' + action.pair, 'green'))
                actions.remove(action)
                continue

            # ** Buy Action **
            if action.action == TradeState.buy:
                try:
                    print(colored('Setting buy order: ' + str(action.amount) + '' + action.pair, 'green'))
                    action.order_number = self.polo.buy(action.pair, action.rate, action.amount, self.buy_order_type)
                except PoloniexError as e:
                    print(colored('Got exception: ' + str(e) + ' Txn: buy-' + action.pair, 'red'))
                    continue
                amount_unfilled = action.order_number.get('amountUnfilled')
                if float(amount_unfilled) == 0.0:
                    actions.remove(action)
                    print(colored('Bought: ' + str(action.amount) + '' + action.pair, 'green'))
                else:
                    action.amount = amount_unfilled
                    print(colored('Not filled 100% buy txn. Unfilled amount: ' + str(amount_unfilled) + '' + action.pair, 'red'))

            # ** Sell Action **
            elif action.action == TradeState.sell:
                try:
                    print(colored('Setting sell order: ' + str(action.amount) + '' + action.pair, 'yellow'))
                    action.order_number = self.polo.sell(action.pair, action.rate,  action.amount, self.buy_order_type)
                except PoloniexError as e:
                    print(colored('Got exception: ' + str(e) + ' Txn: sell-' + action.pair, 'red'))
                    continue
                amount_unfilled = action.order_number.get('amountUnfilled')
                if float(amount_unfilled) == 0.0:
                    actions.remove(action)
                    print(colored('Sold: ' + str(action.amount) + '' + action.pair, 'yellow'))
                else:
                    action.amount = amount_unfilled
                    print(colored('Not filled 100% sell txn. Unfilled amount: ' + str(amount_unfilled) + '' + action.pair, 'red'))
        return actions
Exemple #2
0
class Grabber(object):
    """
    Poloniex only returns max of 50,000 records at a time, meaning we have to coordinate download and
    save of many chunks of data. Moreover, there is no fixed amount of records per unit of time, which
    requires a synchronization of chunks by trade id.

    For example: If we would like to go one month back in time, Poloniex could have returned us only
    the most recent week. Because Polo returns only 50,000 of the latest records (not the oldest ones),
    we can synchronize chunks only by going backwards. Otherwise, if we decided to go forwards in time,
    we couldn't know which time interval to choose to fill all records in order to synchronize with
    previous chunk.
    """
    def __init__(self, mongo_ts):
        # pymongo Wrapper
        self.mongo_ts = mongo_ts
        # Poloniex
        self.polo = Poloniex()

    def progress(self):
        """
        Shows how much history was grabbed so far in relation to overall available on Poloniex
        """
        cname_series_info = {
            cname: self.mongo_ts.series_info(cname)
            for cname in self.mongo_ts.list_cols()
        }
        for pair, series_info in cname_series_info.items():
            # Get latest id
            df = self.get_chunk(pair, ago(minutes=15), now())
            if df.empty:
                logger.info("%s - No information available", pair)
                continue
            max_id = df_series_info(df)['to_id']

            # Progress bar
            steps = 50
            below_rate = series_info['from_id'] / max_id
            taken_rate = (series_info['to_id'] -
                          series_info['from_id']) / max_id
            above_rate = (max_id - series_info['to_id']) / max_id
            progress = '_' * math.floor(below_rate * steps) + \
                       'x' * (steps - math.floor(below_rate * steps) - math.floor(above_rate * steps)) + \
                       '_' * math.floor(above_rate * steps)

            logger.info("%s - 1 [ %s ] %d - %.1f/100.0%% - %s/%s", pair,
                        progress, series_info['to_id'], taken_rate * 100,
                        format_bytes(series_info['memory']),
                        format_bytes(1 / taken_rate * series_info['memory']))

    def remote_info(self, pairs):
        """
        Detailed info on pairs listed on Poloniex
        """
        for pair in pairs:
            chart_data = Poloniex().returnChartData(pair,
                                                    period=86400,
                                                    start=1,
                                                    end=dt_to_ts(now()))
            from_dt = chart_data[0]['date']
            to_dt = chart_data[-1]['date']

            df = self.get_chunk(pair, ago(minutes=5), now())
            if df.empty:
                logger.info("%s - No information available")
                continue
            max_id = df_series_info(df)['to_id']

            logger.info(
                "%s - %s - %s, %s, %d trades, est. %s", pair,
                dt_to_str(from_dt, fmt='%a %d/%m/%Y'),
                dt_to_str(to_dt, fmt='%a %d/%m/%Y'),
                format_td(to_dt - from_dt), max_id,
                format_bytes(round(df_memory(df) * max_id / len(df.index))))

    def db_info(self):
        """
        Wrapper for mongo_ts.db_info
        """
        self.mongo_ts.db_info()

    def ticker_pairs(self):
        """
        Returns all pairs from ticker
        """
        ticker = self.polo.returnTicker()
        pairs = set(map(lambda x: str(x).upper(), ticker.keys()))
        return pairs

    def get_chunk(self, pair, from_dt, to_dt):
        """
        Returns a chunk of trade history (max 50,000 of the most recent records) of a period of time

        :param pair: pair of symbols
        :param start: date of start
        :param end: date of end
        :return: df
        """
        try:
            series = self.polo.marketTradeHist(pair,
                                               start=dt_to_ts(from_dt),
                                               end=dt_to_ts(to_dt))
            series_df = pd.DataFrame(series)
            series_df = series_df.astype({
                'date': str,
                'amount': float,
                'globalTradeID': int,
                'rate': float,
                'total': float,
                'tradeID': int,
                'type': str
            })
            series_df['date'] = series_df['date'].apply(
                lambda date_str: parse_date(date_str))
            series_df.rename(columns={
                'date': 'dt',
                'tradeID': '_id',
                'globalTradeID': 'globalid'
            },
                             inplace=True)
            series_df = series_df.set_index(['_id'], drop=True)
            return series_df
        except Exception as e:
            logger.error(e)
            return pd.DataFrame()

    def grab(self, pair, from_dt=None, from_id=None, to_dt=None, to_id=None):
        """
        Grabs trade history of a period of time for a pair of symbols.

        * Traverses history from the end date to the start date (backwards)
        * History is divided into chunks of max 50,000 records
        * Chunks are synced by id of their oldest records
        * Once received, each chunk is immediately put into MongoDB to free up RAM
        * Result includes passed dates - [from_dt, to_dt]
        * Result excludes passed ids - (from_id, to_id)
        * Ids have higher priority than dates

        The whole process looks like this:
        1) Start recording history chunk by chunk beginning from to_dt

                [ from_dt/from_id <- xxxxxxxxxxxxxxxxxxxxxxxxxx to_dt ]

            or if to_id is provided, find it first and only then start recording

                [ from_dt/from_id ___________ to_id <- <- <- <- to_dt ]

                [ from_dt/from_id <- xxxxxxxx to_id ___________ to_dt ]


        2) Each chunk is verified for consistency and inserted into MongoDB
        3) Proceed until start date or id are reached, or Poloniex returned nothing

                [ from_dt/from_id xxxxxxxxxxxxxxxxxxxxxxxxxxxxx to_dt ]
                                                |
                                                v
                                        collected history

            or if to_id is provided

                [ from_dt/from_id xxxxxxxxxxx to_id ___________ to_dt ]
                                         |
                                         v
                                 collected history

        4) Verify whole collection

        :param pair: pair of symbols
        :param from_dt: date of start point (only as approximation, program aborts if found)
        :param from_id: id of start point (has higher priority than ts, program aborts if found)
        :param to_dt: date of end point
        :param to_id: id of end point
        :return: None
        """
        if self.mongo_ts.col_non_empty(pair):
            logger.debug("%s - Collection - %s", pair,
                         series_info_str(self.mongo_ts.series_info(pair)))
        else:
            logger.debug("%s - Collection - Empty", pair)

            # Create new collection only if none exists
            if pair not in self.mongo_ts.list_cols():
                self.mongo_ts.create_col(pair)
        logger.debug("%s - Collection - Achieving { %s%s, %s%s, %s }", pair,
                     dt_to_str(from_dt),
                     ' : %d' % from_id if from_id is not None else '',
                     dt_to_str(to_dt),
                     ' : %d' % to_id if to_id is not None else '',
                     format_td(to_dt - from_dt))

        t = timer()

        # Init window params
        # ..................

        # Dates are required to build rolling windows and pass them to Poloniex
        # If start and/or end dates are empty, set the widest period possible
        if from_dt is None:
            from_dt = begin()
        if to_dt is None:
            to_dt = now()
        if to_dt <= from_dt:
            raise Exception("%s - Start date { %s } above end date { %s }" %
                            (pair, dt_to_str(from_dt), dt_to_str(to_dt)))
        if from_id is not None and to_id is not None:
            if to_id <= from_id:
                raise Exception("%s - Start id { %d } above end id { %d }" %
                                (pair, from_id, to_id))

        max_delta = timedelta(days=30)
        window = {
            # Do not fetch more than needed, pick the size smaller or equal to max_delta
            'from_dt': max(to_dt - max_delta, from_dt),
            'to_dt': to_dt,
            # Gets filled after first chunk is fetched
            'anchor_id': None
        }
        # Record only starting from to_id, or immediately if none is provided
        recording = to_id is None
        # After we recorded data, verify consistency in database
        anything_recorded = False

        # Three possibilities to escape the loop:
        #   1) empty result
        #   2) reached the start date/id
        #   3) exception
        while True:
            t2 = timer()

            # Receive and process chunk of data
            # .................................

            logger.debug("%s - Poloniex - Querying { %s, %s, %s }", pair,
                         dt_to_str(window['from_dt']),
                         dt_to_str(window['to_dt']),
                         format_td(window['to_dt'] - window['from_dt']))

            df = self.get_chunk(pair, window['from_dt'], window['to_dt'])
            if df.empty:
                if anything_recorded or window['from_dt'] == from_dt:
                    # If we finished (either by reaching start or receiving no records) -> terminate
                    logger.debug("%s - Poloniex - Nothing returned - aborting",
                                 pair)
                    break
                else:
                    # If Poloniex temporary suspended trading for a pair -> look for older records
                    logger.debug(
                        "%s - Poloniex - Nothing returned - continuing", pair)
                    window['to_dt'] = window['from_dt']
                    window['from_dt'] = max(window['from_dt'] - max_delta,
                                            from_dt)
                    continue

            # If chunk contains end id (newest bound) -> start recording
            # .........................................................

            if not recording:
                # End id found
                if to_id in df.index:
                    logger.debug("%s - Poloniex - End id { %d } found", pair,
                                 to_id)
                    # Start recording
                    recording = True

                    df = df[df.index < to_id]
                    if df.empty:
                        logger.debug(
                            "%s - Poloniex - Nothing returned - aborting",
                            pair)
                        break
                else:
                    series_info = df_series_info(df)
                    logger.debug(
                        "%s - Poloniex - End id { %d } not found in { %s : %d, %s : %d }",
                        pair, to_id, dt_to_str(series_info['from_dt']),
                        series_info['from_id'],
                        dt_to_str(series_info['to_dt']), series_info['to_id'])

                    # If start reached -> terminate
                    if from_id is not None:
                        if any(df.index <= from_id):
                            logger.debug(
                                "%s - Poloniex - Start id { %d } reached - aborting",
                                pair, from_id)
                            break
                    if any(df['dt'] <= from_dt):
                        logger.debug(
                            "%s - Poloniex - Start date { %s } reached - aborting",
                            pair, dt_to_str(from_dt))
                        break

                    series_info = df_series_info(df)
                    window['from_dt'] = max(series_info['from_dt'] - max_delta,
                                            from_dt)
                    window['to_dt'] = series_info['from_dt']
                    continue

            if recording:

                # Synchronize with previous chunk by intersection of their ids
                # ............................................................

                if window['anchor_id'] is not None:
                    # To merge two dataframes, there must be an intersection of ids (anchor)
                    if any(df.index >= window['anchor_id']):
                        df = df[df.index < window['anchor_id']]
                        if df.empty:
                            logger.debug(
                                "%s - Poloniex - Nothing returned - aborting",
                                pair)
                            break
                    else:
                        logger.debug(
                            "%s - Poloniex - Anchor id { %d } is missing - aborting",
                            pair, window['anchor_id'])
                        break

                # If chunk contains start id or date (oldest record) -> finish recording
                # ....................................................................
                if from_id is not None:
                    if any(df.index <= from_id):
                        df = df[df.index > from_id]
                        if df.empty:
                            logger.debug(
                                "%s - Poloniex - Nothing returned - aborting",
                                pair)
                        else:
                            logger.debug("%s - Poloniex - Returned %s - %.2fs",
                                         pair,
                                         series_info_str(df_series_info(df)),
                                         timer() - t2)
                            logger.debug(
                                "%s - Poloniex - Start id { %d } reached - aborting",
                                pair, from_id)
                            if verify_series_df(df):
                                self.mongo_ts.insert_docs(pair, df_to_docs(df))
                                anything_recorded = True
                        break  # escape anyway
                # or at least the approx. date
                elif any(df['dt'] <= from_dt):
                    df = df[df['dt'] >= from_dt]
                    if df.empty:
                        logger.debug(
                            "%s - Poloniex - Nothing returned - aborting",
                            pair)
                    else:
                        logger.debug("%s - Poloniex - Returned %s - %.2fs",
                                     pair, series_info_str(df_series_info(df)),
                                     timer() - t2)
                        logger.debug(
                            "%s - Poloniex - Start date { %s } reached - aborting",
                            pair, dt_to_str(from_dt))
                        if verify_series_df(df):
                            self.mongo_ts.insert_docs(pair, df_to_docs(df))
                            anything_recorded = True
                    break

                # Record data
                # ...........

                # Drop rows with NaNs
                df.dropna(inplace=True)
                if df.empty:
                    logger.debug("%s - Poloniex - Nothing returned - aborting",
                                 pair)
                    break
                # Drop duplicates
                df.drop_duplicates(inplace=True)
                if df.empty:
                    logger.debug("%s - Poloniex - Nothing returned - aborting",
                                 pair)
                    break

                # If none of the start points reached, continue with execution using new window
                logger.debug("%s - Poloniex - Returned %s - %.2fs", pair,
                             series_info_str(df_series_info(df)),
                             timer() - t2)
                # Break on last stored df if the newest chunk is broken
                if not verify_series_df(df):
                    break
                self.mongo_ts.insert_docs(pair, df_to_docs(df))
                anything_recorded = True

                # Continue with next chunk
                # ........................

                series_info = df_series_info(df)
                window['from_dt'] = max(series_info['from_dt'] - max_delta,
                                        from_dt)
                window['to_dt'] = series_info['from_dt']
                window['anchor_id'] = series_info['from_id']

        # Verify collection after recordings
        # ..................................

        if anything_recorded:
            # Generally, series verification always succeeds, because we check each df and sync them properly
            if self.mongo_ts.verify_series(pair):
                logger.debug("%s - Collection - %s - %.2fs", pair,
                             series_info_str(self.mongo_ts.series_info(pair)),
                             timer() - t)
            else:
                raise Exception("%s - Consistency broken - fix required" %
                                pair)
        else:
            logger.debug("%s - Nothing returned - %.2fs", pair, timer() - t)

    def one(self, pair, from_dt=None, to_dt=None, drop=False):
        """
        Grabs data for a pair based on passed params as well as history stored in the underlying collection

        Possible values of from_dt and to_dt:
        * 'oldest' means the from_dt of the collection
        * 'newest' means the to_dt of the collection

        :param pair: pair of symbols
        :param from_dt: date of the start point or command from ['oldest', 'newest']
        :param to_dt: date of the end point or command from ['oldest', 'newest']
        :param drop: delete underlying collection before insert
        :return: None
        """
        t = timer()
        logger.info("%s - ...", pair)

        # Fill dates of collection's bounds
        if self.mongo_ts.col_non_empty(pair):
            series_info = self.mongo_ts.series_info(pair)

            if isinstance(from_dt, str):
                if from_dt == 'oldest':
                    from_dt = series_info['from_dt']
                elif from_dt == 'newest':
                    from_dt = series_info['to_dt']
                else:
                    raise Exception("Unknown command '%s'" % from_dt)
            if isinstance(to_dt, str):
                if to_dt == 'oldest':
                    to_dt = series_info['from_dt']
                elif to_dt == 'newest':
                    to_dt = series_info['to_dt']
                else:
                    raise Exception("Unknown command '%s'" % to_dt)

            # Overwrite means drop completely
            if drop:
                self.mongo_ts.drop_col(pair)

        # If nothing is passed, fetch the widest tail and/or head possible
        if from_dt is None:
            from_dt = begin()
        if to_dt is None:
            to_dt = now()

        if self.mongo_ts.col_non_empty(pair):
            series_info = self.mongo_ts.series_info(pair)

            # Period must be non-zero
            if from_dt >= to_dt:
                raise Exception(
                    "%s - Start date { %s } above end date { %s }" %
                    (pair, dt_to_str(from_dt), dt_to_str(to_dt)))

            if from_dt < series_info['from_dt']:
                logger.debug("%s - Grabbing tail", pair)
                # Collect history up to the oldest record
                self.grab(pair,
                          from_dt=from_dt,
                          to_dt=series_info['from_dt'],
                          to_id=series_info['from_id'])

            if to_dt > series_info['to_dt']:
                logger.debug("%s - Grabbing head", pair)
                # Collect history from the newest record
                self.grab(pair,
                          from_dt=series_info['to_dt'],
                          to_dt=to_dt,
                          from_id=series_info['to_id'])
        else:
            # There is no newest or oldest bounds of empty collection
            if isinstance(from_dt, str) or isinstance(to_dt, str):
                raise Exception(
                    "%s - Collection empty - cannot auto-fill dates" % pair)

            logger.debug("%s - Grabbing full", pair)
            self.grab(pair, from_dt=from_dt, to_dt=to_dt)

        logger.info("%s - Finished - %.2fs", pair, timer() - t)

    def row(self, pairs, from_dt=None, to_dt=None, drop=False):
        """
        Grabs data for each pair in a row

        :param pairs: list of pairs or string command from ['db', 'ticker']
        :param from_dt: date of the start point or command from ['oldest', 'newest']
        :param to_dt: date of the end point or command from ['oldest', 'newest']
        :param drop: delete underlying collection before insert
        :return: None
        """
        if isinstance(pairs, str):
            # All pairs in db
            if pairs == 'db':
                pairs = self.mongo_ts.list_cols()
            # All pairs in ticker
            elif pairs == 'ticker':
                pairs = self.ticker_pairs()
            else:
                regex = re.compile(pairs)
                pairs = list(filter(regex.search, self.ticker_pairs()))
        if len(pairs) == 0:
            raise Exception("List of pairs must be non-empty")
        for pair in pairs:
            t = timer()
            self.one(pair, from_dt=from_dt, to_dt=to_dt, drop=drop)

    def ring(self, pairs, every=None):
        """
        Grabs the most recent data for a row of pairs on repeat

        Requires all pairs to be persistent in the database

        :param pairs: list of pairs or 'db' command
        :param every: pause between iterations
        :return: None
        """
        if isinstance(pairs, str):
            # All pairs in db
            if pairs == 'db':
                pairs = self.mongo_ts.list_cols()
            else:
                regex = re.compile(pairs)
                pairs = list(filter(regex.search, self.ticker_pairs()))
        if len(pairs) == 0:
            raise Exception("List of pairs must be non-empty")
        while True:
            # Collect head every time interval
            self.row(pairs, to_dt=now())
            if every is not None:
                sleep(every)