Example #1
0
    def test_request_retry(self):
        # we should retry on general errors
        consumer = Consumer(None, TEST_API_KEY)
        self._test_request_retry(consumer, Exception('generic exception'), 2)

        # we should retry on server errors
        consumer = Consumer(None, TEST_API_KEY)
        self._test_request_retry(consumer,
                                 APIError(500, 'Internal Server Error'), 2)

        # we should retry on HTTP 429 errors
        consumer = Consumer(None, TEST_API_KEY)
        self._test_request_retry(consumer, APIError(429, 'Too Many Requests'),
                                 2)

        # we should NOT retry on other client errors
        consumer = Consumer(None, TEST_API_KEY)
        api_error = APIError(400, 'Client Errors')
        try:
            self._test_request_retry(consumer, api_error, 1)
        except APIError:
            pass
        else:
            self.fail('request() should not retry on client errors')

        # test for number of exceptions raise > retries value
        consumer = Consumer(None, TEST_API_KEY, retries=3)
        self._test_request_retry(consumer,
                                 APIError(500, 'Internal Server Error'), 3)
Example #2
0
    def test_max_batch_size(self):
        q = Queue()
        consumer = Consumer(q, TEST_API_KEY, flush_at=100000, flush_interval=3)
        track = {
            'type': 'track',
            'event': 'python event',
            'distinct_id': 'distinct_id'
        }
        msg_size = len(json.dumps(track).encode())
        # number of messages in a maximum-size batch
        n_msgs = int(475000 / msg_size)

        def mock_post_fn(_, data, **kwargs):
            res = mock.Mock()
            res.status_code = 200
            self.assertTrue(
                len(data.encode()) < 500000,
                'batch size (%d) exceeds 500KB limit' % len(data.encode()))
            return res

        with mock.patch('posthog.request._session.post',
                        side_effect=mock_post_fn) as mock_post:
            consumer.start()
            for _ in range(0, n_msgs + 2):
                q.put(track)
            q.join()
            self.assertEquals(mock_post.call_count, 2)
Example #3
0
 def test_request(self):
     consumer = Consumer(None, TEST_API_KEY)
     track = {
         'type': 'track',
         'event': 'python event',
         'distinct_id': 'distinct_id'
     }
     consumer.request([track])
Example #4
0
 def test_dropping_oversize_msg(self):
     q = Queue()
     consumer = Consumer(q, '')
     oversize_msg = {'m': 'x' * MAX_MSG_SIZE}
     q.put(oversize_msg)
     next = consumer.next()
     self.assertEqual(next, [])
     self.assertTrue(q.empty())
Example #5
0
 def test_next_limit(self):
     q = Queue()
     flush_at = 50
     consumer = Consumer(q, '', flush_at)
     for i in range(10000):
         q.put(i)
     next = consumer.next()
     self.assertEqual(next, list(range(flush_at)))
Example #6
0
 def test_request(self):
     consumer = Consumer(None, TEST_API_KEY)
     track = {
         "type": "track",
         "event": "python event",
         "distinct_id": "distinct_id"
     }
     consumer.request([track])
Example #7
0
 def test_request(self):
     consumer = Consumer(None, 'testsecret')
     track = {
         'type': 'track',
         'event': 'python event',
         'distinct_id': 'distinct_id'
     }
     consumer.request([track])
Example #8
0
    def __init__(self,
                 api_key=None,
                 host=None,
                 debug=False,
                 max_queue_size=10000,
                 send=True,
                 on_error=None,
                 flush_at=100,
                 flush_interval=0.5,
                 gzip=False,
                 max_retries=3,
                 sync_mode=False,
                 timeout=15,
                 thread=1):
        require('api_key', api_key, string_types)

        self.queue = queue.Queue(max_queue_size)
        self.api_key = api_key
        self.on_error = on_error
        self.debug = debug
        self.send = send
        self.sync_mode = sync_mode
        self.host = host
        self.gzip = gzip
        self.timeout = timeout

        if debug:
            self.log.setLevel(logging.DEBUG)

        if sync_mode:
            self.consumers = None
        else:
            # On program exit, allow the consumer thread to exit cleanly.
            # This prevents exceptions and a messy shutdown when the
            # interpreter is destroyed before the daemon thread finishes
            # execution. However, it is *not* the same as flushing the queue!
            # To guarantee all messages have been delivered, you'll still need
            # to call flush().
            if send:
                atexit.register(self.join)
            for n in range(thread):
                self.consumers = []
                consumer = Consumer(
                    self.queue,
                    api_key,
                    host=host,
                    on_error=on_error,
                    flush_at=flush_at,
                    flush_interval=flush_interval,
                    gzip=gzip,
                    retries=max_retries,
                    timeout=timeout,
                )
                self.consumers.append(consumer)

                # if we've disabled sending, just don't start the consumer
                if send:
                    consumer.start()
Example #9
0
 def test_upload(self):
     q = Queue()
     consumer = Consumer(q, TEST_API_KEY)
     track = {
         'type': 'track',
         'event': 'python event',
         'distinct_id': 'distinct_id'
     }
     q.put(track)
     success = consumer.upload()
     self.assertTrue(success)
Example #10
0
 def test_upload(self):
     q = Queue()
     consumer = Consumer(q, TEST_API_KEY)
     track = {
         "type": "track",
         "event": "python event",
         "distinct_id": "distinct_id"
     }
     q.put(track)
     success = consumer.upload()
     self.assertTrue(success)
Example #11
0
 def test_multiple_uploads_per_interval(self):
     # Put _flush_at*2_ items in the queue at once, then pause for
     # _flush_interval_. The consumer should upload 2 times.
     q = Queue()
     flush_interval = 0.5
     flush_at = 10
     consumer = Consumer(q,
                         TEST_API_KEY,
                         flush_at=flush_at,
                         flush_interval=flush_interval)
     with mock.patch('posthog.consumer.batch_post') as mock_post:
         consumer.start()
         for i in range(0, flush_at * 2):
             track = {
                 'type': 'track',
                 'event': 'python event %d' % i,
                 'distinct_id': 'distinct_id'
             }
             q.put(track)
         time.sleep(flush_interval * 1.1)
         self.assertEqual(mock_post.call_count, 2)
Example #12
0
 def test_flush_interval(self):
     # Put _n_ items in the queue, pausing a little bit more than
     # _flush_interval_ after each one.
     # The consumer should upload _n_ times.
     q = Queue()
     flush_interval = 0.3
     consumer = Consumer(q,
                         TEST_API_KEY,
                         flush_at=10,
                         flush_interval=flush_interval)
     with mock.patch('posthog.consumer.batch_post') as mock_post:
         consumer.start()
         for i in range(0, 3):
             track = {
                 'type': 'track',
                 'event': 'python event %d' % i,
                 'distinct_id': 'distinct_id'
             }
             q.put(track)
             time.sleep(flush_interval * 1.1)
         self.assertEqual(mock_post.call_count, 3)
Example #13
0
 def test_pause(self):
     consumer = Consumer(None, TEST_API_KEY)
     consumer.pause()
     self.assertFalse(consumer.running)
Example #14
0
 def test_next(self):
     q = Queue()
     consumer = Consumer(q, '')
     q.put(1)
     next = consumer.next()
     self.assertEqual(next, [1])
Example #15
0
 def test_pause(self):
     consumer = Consumer(None, 'testsecret')
     consumer.pause()
     self.assertFalse(consumer.running)