def test_exclusive_consume(self): """Testing exclusive basic_consume""" conn = Connection(self.amqp_url) channel = conn.allocate_channel() channel.queue_declare('exclusive-consume') channel.basic_consume(queue='exclusive-consume', exclusive=True) channel2 = conn.allocate_channel() with self.assertRaises(AccessRefused): channel2.basic_consume(queue='exclusive-consume')
def tearDown(self): conn = Connection(self.amqp_url) conn.connect() channel = conn.allocate_channel() for queue in self.declared_queues: try: channel.queue_delete(queue=queue) except Exception: channel = conn.allocate_channel() for exchange in self.declared_exchanges: try: channel.exchange_delete(exchange=exchange) except Exception: channel = conn.allocate_channel() conn.close()
def test_bug3_loop(self): i = [0] conn = Connection(self.amqp_url) qname = 'test-bug3-%s' % random.random() def cb(msg): i[0] += 1 msg.ack() self.assertEqual(msg.body, 'x') if i[0] == 3: conn.close() channel = conn.allocate_channel() channel.queue_declare(queue=qname) for _ in range(3): channel.basic_publish( routing_key=qname, body='x' ) channel.basic_consume(queue=qname, callback=cb) conn.join() self._epilogue(qname, 0)
def test_greenlet_no_interleaving(self): "Messages published at the same time and channel are not garbled." conn = Connection(self.amqp_url) messages = [ 'asdf' * 10000000, 'ghjk' * 10000000 ] def publish_message(msg): channel.basic_publish( routing_key='large-messages', body=msg ) channel = conn.allocate_channel() channel.queue_declare(queue='large-messages') q = channel.basic_consume(queue='large-messages') pool = Pool() pool.map_async(publish_message, messages) for i in range(2): msg = q.get() try: messages.remove(msg.body) except ValueError: raise AssertionError("Received unknown message") msg.ack()
def test_channel_interleaving(self): """Messages published on different channels are interleaved. We test that if we try to publish a small message while a large message is uploading on a different channel, the small message will be received first. """ conn = Connection(self.amqp_url) messages = [ 'ghjk', 'asdf' * 10000000, ] def publish_message(msg): with conn.channel() as channel: channel.basic_publish( routing_key='large-messages', body=msg ) channel = conn.allocate_channel() channel.queue_declare(queue='large-messages') q = channel.basic_consume(queue='large-messages') gevent.spawn(publish_message, messages[1]) gevent.spawn_later(0.1, publish_message, messages[0]) for i in range(2): msg = q.get() assert msg.body == messages[i] msg.ack()
def test_parallel_queue_declare(self): conn = Connection(self.amqp_url) conn.connect() channel = conn.allocate_channel() def declare(name): return channel.queue_declare(queue=name) g = Group() res = g.map(declare, queues) assert len(res) == len(queues) assert all(isinstance(r, FrameQueueDeclareOk) for r in res)
def test_bug3_loop(self): i = [0] conn = Connection(self.amqp_url) qname = 'test-bug3-%s' % random.random() def cb(msg): i[0] += 1 msg.ack() self.assertEqual(msg.body, 'x') if i[0] == 3: conn.close() channel = conn.allocate_channel() channel.queue_declare(queue=qname) for _ in range(3): channel.basic_publish(routing_key=qname, body='x') channel.basic_consume(queue=qname, callback=cb) conn.join() self._epilogue(qname, 0)