Пример #1
0
def test_wait():
    def producer():
        for msg in to_produce:
            queue.put(msg)
            time.sleep(sleep_timeout)
        queue.shutdown()

    def consumer():
        while not queue.to_shutdown or queue.qsize()[0]:
            queue.wait()
            consumed["data"] += list(queue.iter_get(100))

    sleep_timeout = 0.1
    join_timeout = 10
    to_produce = ["%04d" % i for i in range(10)]
    consumed = {"data": []}
    queue = TopicQueue("test_wait")
    p_thread = Thread(target=producer)
    c_thread = Thread(target=consumer)
    p_thread.setDaemon(True)
    c_thread.setDaemon(True)
    c_thread.start()
    p_thread.start()
    p_thread.join(join_timeout)
    c_thread.join(join_timeout)
    assert to_produce == consumed["data"]
Пример #2
0
def test_wait_throttling(sleep_timeout):
    @tornado.gen.coroutine
    def producer():
        for msg in to_produce:
            queue.put(msg)
            yield tornado.gen.sleep(sleep_timeout)
        queue.shutdown()

    @tornado.gen.coroutine
    def consumer():
        while not queue.to_shutdown or queue.qsize()[0]:
            yield queue.wait(rate=rate)
            consumed["data"] += list(queue.iter_get(100))
            consumed["n_reads"] += 1
        io_loop.stop()

    rate = 4
    n_writes = 10
    dt = n_writes * sleep_timeout
    to_produce = ["%04d" % i for i in range(n_writes)]
    consumed = {"data": [], "n_reads": 0}
    queue = TopicQueue("test_wait")
    io_loop = tornado.ioloop.IOLoop()
    io_loop.add_callback(producer)
    io_loop.add_callback(consumer)
    io_loop.start()
    assert to_produce == consumed["data"]
    assert consumed["n_reads"] <= dt * rate + 2
Пример #3
0
def test_wait_throttling(sleep_timeout):
    async def producer():
        for msg in to_produce:
            queue.put(msg)
            await asyncio.sleep(sleep_timeout)
        queue.shutdown()

    async def consumer():
        while not queue.to_shutdown or queue.qsize()[0]:
            await queue.wait(rate=rate)
            consumed["data"] += list(queue.iter_get(100))
            consumed["n_reads"] += 1

    async def test():
        await asyncio.gather(producer(), consumer())

    rate = 4
    n_writes = 10
    dt = n_writes * sleep_timeout
    to_produce = ["%04d" % i for i in range(n_writes)]
    consumed = {"data": [], "n_reads": 0}
    queue = TopicQueue("test_wait")
    with IOLoopContext() as loop:
        loop.run_until_complete(test())
    assert to_produce == consumed["data"]
    assert consumed["n_reads"] <= dt * rate + 2
Пример #4
0
def test_metrics():
    queue = TopicQueue("test_metrics")
    k = ("topic", queue.topic)
    # Initial metrics are zeroed
    metrics = {"other": 1}
    queue.apply_metrics(metrics)
    assert metrics.get("other") == 1  # Untouched
    assert metrics.get(("nsq_msg_put", k)) == 0
    assert metrics.get(("nsq_msg_put_size", k)) == 0
    assert metrics.get(("nsq_msg_get", k)) == 0
    assert metrics.get(("nsq_msg_get_size", k)) == 0
    assert metrics.get(("nsq_msg_requeued", k)) == 0
    assert metrics.get(("nsq_msg_requeued_size", k)) == 0
    # Put 100 messages of 10 octets each
    for i in range(100):
        msg = "%10d" % i
        queue.put(msg)
    queue.apply_metrics(metrics)
    assert metrics.get("other") == 1  # Untouched
    assert metrics.get(("nsq_msg_put", k)) == 100
    assert metrics.get(("nsq_msg_put_size", k)) == 1000
    assert metrics.get(("nsq_msg_get", k)) == 0
    assert metrics.get(("nsq_msg_get_size", k)) == 0
    assert metrics.get(("nsq_msg_requeued", k)) == 0
    assert metrics.get(("nsq_msg_requeued_size", k)) == 0
    # Get 50 messages of 10 octets each
    msgs = list(queue.iter_get(50))
    queue.apply_metrics(metrics)
    assert metrics.get("other") == 1  # Untouched
    assert metrics.get(("nsq_msg_put", k)) == 100
    assert metrics.get(("nsq_msg_put_size", k)) == 1000
    assert metrics.get(("nsq_msg_get", k)) == 50
    assert metrics.get(("nsq_msg_get_size", k)) == 500
    assert metrics.get(("nsq_msg_requeued", k)) == 0
    assert metrics.get(("nsq_msg_requeued_size", k)) == 0
    # Return 10 messages back to queue
    queue.return_messages(msgs[:10])
    queue.apply_metrics(metrics)
    assert metrics.get("other") == 1  # Untouched
    assert metrics.get(("nsq_msg_put", k)) == 100
    assert metrics.get(("nsq_msg_put_size", k)) == 1000
    assert metrics.get(("nsq_msg_get", k)) == 50
    assert metrics.get(("nsq_msg_get_size", k)) == 500
    assert metrics.get(("nsq_msg_requeued", k)) == 10
    assert metrics.get(("nsq_msg_requeued_size", k)) == 100
    # Get 60 messages (50 left + 10 returned)
    list(queue.iter_get(60))
    queue.apply_metrics(metrics)
    assert metrics.get("other") == 1  # Untouched
    assert metrics.get(("nsq_msg_put", k)) == 100
    assert metrics.get(("nsq_msg_put_size", k)) == 1000
    assert metrics.get(("nsq_msg_get", k)) == 110
    assert metrics.get(("nsq_msg_get_size", k)) == 1100
    assert metrics.get(("nsq_msg_requeued", k)) == 10
    assert metrics.get(("nsq_msg_requeued_size", k)) == 100
Пример #5
0
 def get_topic_queue(self, topic: str) -> TopicQueue:
     q = self.topic_queues.get(topic)
     if q:
         return q
     # Create when necessary
     with self.topic_queue_lock:
         q = self.topic_queues.get(topic)
         if q:
             return q  # Created in concurrent task
         q = TopicQueue(topic)
         self.topic_queues[topic] = q
         self.loop.create_task(self.nsq_publisher_guard(q))
         return q
Пример #6
0
 def get_topic_queue(self, topic):
     q = self.topic_queues.get(topic)
     if q:
         return q
     # Create when necessary
     with self.topic_queue_lock:
         q = self.topic_queues.get(topic)
         if q:
             return q  # Created in concurrent task
         q = TopicQueue(topic, io_loop=self.ioloop)
         self.topic_queues[topic] = q
         self.ioloop.add_callback(self.nsq_publisher_guard, q)
         return q
Пример #7
0
 def get_topic_queue(self, topic):
     q = self.topic_queues.get(topic)
     if q:
         return q
     # Create when necessary
     with self.topic_queue_lock:
         q = self.topic_queues.get(topic)
         if q:
             return q  # Created in concurrent task
         q = TopicQueue(topic)
         self.topic_queues[topic] = q
         self.topic_shutdown[topic] = tornado.locks.Lock()
         self.ioloop.add_callback(self.nsq_publisher, topic)
         return q
Пример #8
0
def test_qsize():
    queue = TopicQueue("test_qsize")
    # Empty queue
    assert queue.qsize() == (0, 0)
    qn = 0
    qs = 0
    # Put
    for i in range(10):
        msg = "x" * (i + 1)
        qs += len(msg)
        qn += 1
        queue.put(msg)
        assert queue.qsize() == (qn, qs)
    # Pull
    while qn > 0:
        msg = list(queue.iter_get())[0]
        qn -= 1
        qs -= len(msg)
        assert queue.qsize() == (qn, qs)
    # Empty queue
    assert queue.qsize() == (0, 0)
Пример #9
0
def test_wait():
    async def producer():
        for msg in to_produce:
            queue.put(msg)
            await asyncio.sleep(sleep_timeout)
        queue.shutdown()

    async def consumer():
        while not queue.to_shutdown or queue.qsize()[0]:
            await queue.wait(1)
            consumed["data"] += list(queue.iter_get(100))

    async def test():
        await asyncio.gather(producer(), consumer())

    sleep_timeout = 0.1
    to_produce = ["%04d" % i for i in range(10)]
    consumed = {"data": []}
    queue = TopicQueue("test_wait")
    with IOLoopContext() as loop:
        loop.run_until_complete(test())
    assert to_produce == consumed["data"]
Пример #10
0
def test_wait():
    @tornado.gen.coroutine
    def producer():
        for msg in to_produce:
            queue.put(msg)
            yield tornado.gen.sleep(sleep_timeout)
        queue.shutdown()

    @tornado.gen.coroutine
    def consumer():
        while not queue.to_shutdown or queue.qsize()[0]:
            yield queue.wait(1)
            consumed["data"] += list(queue.iter_get(100))
        io_loop.stop()

    sleep_timeout = 0.1
    to_produce = ["%04d" % i for i in range(10)]
    consumed = {"data": []}
    queue = TopicQueue("test_wait")
    io_loop = tornado.ioloop.IOLoop()
    io_loop.add_callback(producer)
    io_loop.add_callback(consumer)
    io_loop.start()
    assert to_produce == consumed["data"]
Пример #11
0
def test_shutdown():
    queue = TopicQueue("test_shutdown")
    # Fill queue  with 10 items
    to_produce = ["%04d" % i for i in range(10)]
    for msg in to_produce:
        queue.put(msg)
    # Consume 5 of them
    consumed = []
    consumed += list(queue.iter_get(5))
    # Shutdown the queue
    queue.shutdown()
    # Try to shutdown queue twice, raise Runtime error
    with pytest.raises(RuntimeError):
        queue.shutdown()
    # Try to put an item, raise Runtime error
    with pytest.raises(RuntimeError):
        queue.put("9999")
    # Try to return a message, raise Runtime error
    with pytest.raises(RuntimeError):
        queue.return_messages(["9999"])
    # Consume all other items
    consumed += list(queue.iter_get(10))
    # Check all items are consumed
    assert to_produce == consumed
Пример #12
0
def test_get_limit(input, get_kwargs, expected):
    queue = TopicQueue("test_get_limit")
    for msg in input:
        queue.put(msg)
    result = list(queue.iter_get(**get_kwargs))
    assert result == expected
Пример #13
0
def test_put_order(put_kwargs, expected):
    queue = TopicQueue("test_put_order")
    for kw in put_kwargs:
        queue.put(**kw)
    result = list(queue.iter_get(len(put_kwargs)))
    assert result == expected
Пример #14
0
def test_is_empty(items, expected):
    queue = TopicQueue("test_is_empty")
    for item in items:
        queue.put(item)
    assert queue.is_empty() is expected