Ejemplo n.º 1
0
    def __init__(self, write_key=None, host=None, debug=False, max_queue_size=10000,
                 send=True, on_error=None, upload_size=100, upload_interval=0.5,
                 gzip=False, max_retries=10):
        require('write_key', write_key, string_types)

        self.queue = queue.Queue(max_queue_size)
        self.consumer = Consumer(self.queue, write_key, host=host, on_error=on_error,
                                 upload_size=upload_size, upload_interval=upload_interval,
                                 gzip=gzip, retries=max_retries)
        self.write_key = write_key
        self.on_error = on_error
        self.debug = debug
        self.send = send

        if debug:
            self.log.setLevel(logging.DEBUG)

        # if we've disabled sending, just don't start the consumer
        if send:
            # On program exit, allow the consumer thread to exit cleanly.
            # This prevents exceptions and a messy shutdown when the interpreter is
            # destroyed before the daemon thread finishes execution. However, it
            # is *not* the same as flushing the queue! To guarantee all messages
            # have been delivered, you'll still need to call flush().
            atexit.register(self.join)
            self.consumer.start()
Ejemplo n.º 2
0
    def __init__(self,
                 write_key=None,
                 debug=False,
                 max_queue_size=10000,
                 send=True,
                 on_error=None):
        # We need a different queue type to have a client that works properly across separate processes.
        # This client is meant to be shared and used within celery environment (although it should work otherwise).
        # This will hopefully fix: https://github.com/segmentio/analytics-python/issues/51 although it is uncertain
        # what exactly is causing it.
        require('write_key', write_key, str)

        self.queue = JoinableQueue(max_queue_size)
        self.consumer = Consumer(self.queue, write_key, on_error=on_error)
        self.write_key = write_key
        self.on_error = on_error
        self.debug = debug
        self.send = send

        if debug:
            self.log.setLevel(logging.DEBUG)

        # if we've disabled sending, just don't start the consumer
        if send:
            self.consumer.start()
Ejemplo n.º 3
0
 def test_upload(self):
     q = Queue()
     consumer = Consumer(q, 'testsecret')
     track = {'type': 'track', 'event': 'python event', 'userId': 'userId'}
     q.put(track)
     success = consumer.upload()
     self.assertTrue(success)
Ejemplo n.º 4
0
    def test_max_batch_size(self):
        q = Queue()
        consumer = Consumer(q,
                            'testsecret',
                            upload_size=100000,
                            upload_interval=3)
        track = {'type': 'track', 'event': 'python event', 'userId': 'userId'}
        msg_size = len(json.dumps(track).encode())
        # number of messages in a maximum-size batch
        n_msgs = int(475000 / msg_size)

        def mock_post_fn(_, data, **kwargs):
            res = mock.Mock()
            res.status_code = 200
            self.assertTrue(
                len(data.encode()) < 500000,
                'batch size (%d) exceeds 500KB limit' % len(data.encode()))
            return res

        with mock.patch('analytics.request._session.post',
                        side_effect=mock_post_fn) as mock_post:
            consumer.start()
            for _ in range(0, n_msgs + 2):
                q.put(track)
            q.join()
            self.assertEqual(mock_post.call_count, 2)
Ejemplo n.º 5
0
    def _test_request_retry(self, expected_exception, exception_count):
        def mock_post(*args, **kwargs):
            mock_post.call_count += 1
            if mock_post.call_count <= exception_count:
                raise expected_exception

        mock_post.call_count = 0

        with mock.patch('analytics.consumer.post',
                        mock.Mock(side_effect=mock_post)):
            consumer = Consumer(None, 'testsecret')
            track = {
                'type': 'track',
                'event': 'python event',
                'userId': 'userId'
            }
            # request() should succeed if the number of exceptions raised is less
            # than the retries paramater.
            if exception_count <= consumer.retries:
                consumer.request([track])
            else:
                # if exceptions are raised more times than the retries parameter,
                # we expect the exception to be returned to the caller.
                try:
                    consumer.request([track])
                except type(expected_exception) as exc:
                    self.assertEqual(exc, expected_exception)
                else:
                    self.fail(
                        "request() should raise an exception if still failing after %d retries"
                        % consumer.retries)
Ejemplo n.º 6
0
 def test_dropping_oversize_msg(self):
     q = Queue()
     consumer = Consumer(q, '')
     oversize_msg = {'m': 'x' * MAX_MSG_SIZE}
     q.put(oversize_msg)
     next = consumer.next()
     self.assertEqual(next, [])
     self.assertTrue(q.empty())
Ejemplo n.º 7
0
 def test_next_limit(self):
     q = Queue()
     upload_size = 50
     consumer = Consumer(q, '', upload_size)
     for i in range(10000):
         q.put(i)
     next = consumer.next()
     self.assertEqual(next, list(range(upload_size)))
Ejemplo n.º 8
0
 def test_next_limit(self):
     q = Queue()
     flush_at = 50
     consumer = Consumer(q, '', flush_at)
     for i in range(10000):
         q.put(i)
     next = consumer.next()
     self.assertEqual(next, list(range(flush_at)))
Ejemplo n.º 9
0
 def test_request(cls):
     consumer = Consumer(None, 'testsecret')
     track = {
         'type': 'track',
         'event': 'python event',
         'userId': 'userId'
     }
     consumer.request([track])
Ejemplo n.º 10
0
    def __init__(self,
                 write_key=None,
                 host=None,
                 debug=False,
                 max_queue_size=10000,
                 send=True,
                 on_error=None,
                 flush_at=100,
                 flush_interval=0.5,
                 gzip=False,
                 max_retries=3,
                 sync_mode=False,
                 timeout=15,
                 thread=1):
        require('write_key', write_key, string_types)

        self.queue = queue.Queue(max_queue_size)
        self.write_key = write_key
        self.on_error = on_error
        self.debug = debug
        self.send = send
        self.sync_mode = sync_mode
        self.host = host
        self.gzip = gzip
        self.timeout = timeout

        if debug:
            self.log.setLevel(logging.DEBUG)

        if sync_mode:
            self.consumers = None
        else:
            # On program exit, allow the consumer thread to exit cleanly.
            # This prevents exceptions and a messy shutdown when the
            # interpreter is destroyed before the daemon thread finishes
            # execution. However, it is *not* the same as flushing the queue!
            # To guarantee all messages have been delivered, you'll still need
            # to call flush().
            if send:
                atexit.register(self.join)
            for n in range(thread):
                self.consumers = []
                consumer = Consumer(
                    self.queue,
                    write_key,
                    host=host,
                    on_error=on_error,
                    flush_at=flush_at,
                    flush_interval=flush_interval,
                    gzip=gzip,
                    retries=max_retries,
                    timeout=timeout,
                )
                self.consumers.append(consumer)

                # if we've disabled sending, just don't start the consumer
                if send:
                    consumer.start()
Ejemplo n.º 11
0
    def __init__(self, write_key=None, debug=False, max_queue_size=10000,
                 send=True, on_error=None):
        require('write_key', write_key, string_types)

        self.queue = queue.Queue(max_queue_size)
        self.consumer = Consumer(self.queue, write_key, on_error=on_error)
        self.write_key = write_key
        self.on_error = on_error
        self.debug = debug
        self.send = send

        if debug:
            self.log.setLevel(logging.DEBUG)

        # if we've disabled sending, just don't start the consumer
        if send:
            self.consumer.start()
Ejemplo n.º 12
0
 def test_multiple_uploads_per_interval(self):
     # Put _flush_at*2_ items in the queue at once, then pause for
     # _flush_interval_. The consumer should upload 2 times.
     q = Queue()
     flush_interval = 0.5
     flush_at = 10
     consumer = Consumer(q, 'testsecret', flush_at=flush_at,
                         flush_interval=flush_interval)
     with mock.patch('analytics.consumer.post') as mock_post:
         consumer.start()
         for i in range(0, flush_at * 2):
             track = {
                 'type': 'track',
                 'event': 'python event %d' % i,
                 'userId': 'userId'
             }
             q.put(track)
         time.sleep(flush_interval * 1.1)
         self.assertEqual(mock_post.call_count, 2)
Ejemplo n.º 13
0
 def test_flush_interval(self):
     # Put _n_ items in the queue, pausing a little bit more than
     # _flush_interval_ after each one.
     # The consumer should upload _n_ times.
     q = Queue()
     flush_interval = 0.3
     consumer = Consumer(q, 'testsecret', flush_at=10,
                         flush_interval=flush_interval)
     with mock.patch('analytics.consumer.post') as mock_post:
         consumer.start()
         for i in range(0, 3):
             track = {
                 'type': 'track',
                 'event': 'python event %d' % i,
                 'userId': 'userId'
             }
             q.put(track)
             time.sleep(flush_interval * 1.1)
         self.assertEqual(mock_post.call_count, 3)
Ejemplo n.º 14
0
 def test_proxies(cls):
     consumer = Consumer(None, 'testsecret', proxies='203.243.63.16:80')
     track = {'type': 'track', 'event': 'python event', 'userId': 'userId'}
     consumer.request([track])
Ejemplo n.º 15
0
 def test_pause(self):
     consumer = Consumer(None, 'testsecret')
     consumer.pause()
     self.assertFalse(consumer.running)
Ejemplo n.º 16
0
 def test_next(self):
     q = Queue()
     consumer = Consumer(q, '')
     q.put(1)
     next = consumer.next()
     self.assertEqual(next, [1])