def test_max_batch_size(self): q = Queue() consumer = Consumer(q, TEST_API_KEY, flush_at=100000, flush_interval=3) track = { 'type': 'track', 'event': 'python event', 'distinct_id': 'distinct_id' } msg_size = len(json.dumps(track).encode()) # number of messages in a maximum-size batch n_msgs = int(475000 / msg_size) def mock_post_fn(_, data, **kwargs): res = mock.Mock() res.status_code = 200 self.assertTrue( len(data.encode()) < 500000, 'batch size (%d) exceeds 500KB limit' % len(data.encode())) return res with mock.patch('posthog.request._session.post', side_effect=mock_post_fn) as mock_post: consumer.start() for _ in range(0, n_msgs + 2): q.put(track) q.join() self.assertEquals(mock_post.call_count, 2)
def __init__(self, api_key=None, host=None, debug=False, max_queue_size=10000, send=True, on_error=None, flush_at=100, flush_interval=0.5, gzip=False, max_retries=3, sync_mode=False, timeout=15, thread=1): require('api_key', api_key, string_types) self.queue = queue.Queue(max_queue_size) self.api_key = api_key self.on_error = on_error self.debug = debug self.send = send self.sync_mode = sync_mode self.host = host self.gzip = gzip self.timeout = timeout if debug: self.log.setLevel(logging.DEBUG) if sync_mode: self.consumers = None else: # On program exit, allow the consumer thread to exit cleanly. # This prevents exceptions and a messy shutdown when the # interpreter is destroyed before the daemon thread finishes # execution. However, it is *not* the same as flushing the queue! # To guarantee all messages have been delivered, you'll still need # to call flush(). if send: atexit.register(self.join) for n in range(thread): self.consumers = [] consumer = Consumer( self.queue, api_key, host=host, on_error=on_error, flush_at=flush_at, flush_interval=flush_interval, gzip=gzip, retries=max_retries, timeout=timeout, ) self.consumers.append(consumer) # if we've disabled sending, just don't start the consumer if send: consumer.start()
def test_multiple_uploads_per_interval(self): # Put _flush_at*2_ items in the queue at once, then pause for # _flush_interval_. The consumer should upload 2 times. q = Queue() flush_interval = 0.5 flush_at = 10 consumer = Consumer(q, TEST_API_KEY, flush_at=flush_at, flush_interval=flush_interval) with mock.patch('posthog.consumer.batch_post') as mock_post: consumer.start() for i in range(0, flush_at * 2): track = { 'type': 'track', 'event': 'python event %d' % i, 'distinct_id': 'distinct_id' } q.put(track) time.sleep(flush_interval * 1.1) self.assertEqual(mock_post.call_count, 2)
def test_flush_interval(self): # Put _n_ items in the queue, pausing a little bit more than # _flush_interval_ after each one. # The consumer should upload _n_ times. q = Queue() flush_interval = 0.3 consumer = Consumer(q, TEST_API_KEY, flush_at=10, flush_interval=flush_interval) with mock.patch('posthog.consumer.batch_post') as mock_post: consumer.start() for i in range(0, 3): track = { 'type': 'track', 'event': 'python event %d' % i, 'distinct_id': 'distinct_id' } q.put(track) time.sleep(flush_interval * 1.1) self.assertEqual(mock_post.call_count, 3)