def consume(topic, follow, fetch_size): topic = Topic(StrictRedis(), topic) start = time.time() cursor = 0 n = 0 try: while True: lower = cursor cursor, batch = topic.consume(cursor, fetch_size) logger.debug('Retrieved %s items from %s to %s.', len(batch), lower, cursor) if not batch: if not follow: logger.debug('Retrieved empty batch (end of stream.)') break else: logger.debug('Retrieved empty batch.') time.sleep(0.1) for n, (offset, item) in enumerate(batch, n + 1): print offset, item except KeyboardInterrupt: pass stop = time.time() logger.info( 'Consumed %s records in %s seconds (%s records/second.)', n, stop - start, n / (stop - start), )
def test_consume_page_sizes(client): name = 'example' size = 10 items = [] generator = itertools.imap( lambda i: [i, str(i)], itertools.count(), ) topic = Topic(client, name) topic.create(size) for offset, payload in itertools.islice(generator, size + 1): topic.produce((payload,)) items.append([offset, payload]) offset, batch = list(topic.consume(0, limit=size)) assert items[:size] == batch offset, batch = list(topic.consume(offset, limit=size)) assert items[size:] == batch
def test_consume_across_pages(client): name = 'example' size = 10 items = [] generator = itertools.imap( lambda i: [i, str(i)], itertools.count(), ) topic = Topic(client, name) topic.create(size) for offset, payload in itertools.islice(generator, size + 1): topic.produce((payload,)) items.append([offset, payload]) # Check with batches crossing pages. offset, batch = topic.consume(5) assert batch == items[5:]