def listen_events(cls, routing_key, exchange=BUS_EXCHANGE_NAME): exchange = Exchange(exchange, type=BUS_EXCHANGE_TYPE) with Connection(BUS_URL) as conn: queue = Queue(BUS_QUEUE_NAME, exchange=exchange, routing_key=routing_key, channel=conn.channel()) queue.declare() queue.purge() cls.bus_queue = queue
def add_binding(queue_name, routing_key, exchange_name=None): exchange_name = exchange_name or world.config['bus']['exchange_name'] exchange = Exchange(exchange_name, type=world.config['bus']['exchange_type']) with Connection(world.config['bus_url']) as conn: queue = Queue(queue_name, exchange=exchange, routing_key=routing_key, channel=conn.channel()) queue.declare() queue.purge() _queues[queue_name] = queue
def listen_events(self, routing_key, exchange=BUS_EXCHANGE_XIVO): with Connection(self._url) as conn: queue = Queue(BUS_QUEUE_NAME, exchange=exchange, routing_key=routing_key, channel=conn.channel()) queue.declare() queue.purge() self.bus_queue = queue
def accumulator(self, routing_key, exchange=None): exchange = exchange or self._default_exchange queue_name = 'test-{}'.format(str(uuid.uuid4())) with Connection(self._url) as conn: queue = Queue(name=queue_name, exchange=exchange, routing_key=routing_key, channel=conn.channel()) queue.declare() queue.purge() accumulator = BusMessageAccumulator(self._url, queue) return accumulator
def accumulator(self, routing_key=None, exchange=None, headers=None): exchange = exchange or self._default_exchange queue_name = 'test-{}'.format(str(uuid.uuid4())) with Connection(self._url) as conn: if routing_key: queue = Queue( name=queue_name, exchange=exchange, routing_key=routing_key, channel=conn.channel(), ) elif headers: queue = Queue( name=queue_name, exchange=exchange, bindings=[binding(exchange=exchange, arguments=headers)], channel=conn.channel(), ) else: raise Exception('Need a routing key or a header') queue.declare() queue.purge() accumulator = BusMessageAccumulator(self._url, queue) return accumulator
def test_purge(self): b = Queue('foo', self.exchange, 'foo', channel=get_conn().channel()) b.purge() assert 'queue_purge' in b.channel
def test_purge(self): b = Queue('foo', self.exchange, 'foo', channel=get_conn().channel()) b.purge() self.assertIn('queue_purge', b.channel)
def queue_purge(self, queue_name): queue = Queue(queue_name, channel=self._channel()) print "Purging queue %s" % queue return queue.purge()
class DistLock(object): def __init__(self, name): self.name = name self.exchange = Exchange(self.name) self.routing_key = 'lock_routing_' + self.name self.acquire_requests_routing_key = 'acquire_routing_' + self.name self.lock_requests_q = Queue( name='lock_requests_q_' + self.name, exchange=self.exchange, routing_key=self.routing_key, channel=Connection(), ) self.acquire_requests_q = Queue( name='acquire_requests_q_' + self.name, exchange=self.exchange, routing_key=self.acquire_requests_routing_key, channel=Connection(), ) self._lock_manager = Consumer( Connection(), queues=[self.lock_requests_q], on_message=self._on_message, ) self._lock_manager.consume() self.lock_requests_q.declare() self.acquire_requests_q.declare() self.redis_connection = redis.StrictRedis() self.lock_monitor_thread = threading.Thread(target=self._manage_lock) self.consumer_thread = threading.Thread(target=self._consume_messages) self.lock_monitor_thread.daemon = True self.consumer_thread.daemon = True self._start_consumer() def _start_consumer(self): self.lock_requests_q.purge() self.acquire_requests_q.purge() self.redis_connection.set('is_consuming_' + self.name, True) self.redis_connection.delete('current_lock_owner_' + self.name) self.consumer_thread.start() self.lock_monitor_thread.start() def _stop_consumer(self): self.redis_connection.delete('is_consuming_' + self.name) self.redis_connection.delete('current_lock_owner_' + self.name) self.consumer_thread.join() self._lock_manager.connection.release() ## TODO: look this up self.lock_requests_q.purge() self.acquire_requests_q.purge() def _consume_messages(self): for _ in eventloop(self._lock_manager.connection, timeout=1, ignore_timeouts=True): pass def _on_message(self, message): print(message.payload) message.ack() p = Producer( Connection(), exchange=self.exchange, auto_declare=True, ) if message.payload.get('request') == 'RELEASE': # inform the current lock owner that lock has been released p.publish( dict(request='RELEASED', id=message.payload.get('id')), routing_key=message.payload.get('id'), exchange=self.exchange, ) self.redis_connection.delete('current_lock_owner_') else: p.publish(dict(request='ENQUEUE', id=message.payload.get('id')), routing_key=self.acquire_requests_routing_key, exchange=self.exchange) def _manage_lock(self): p = Producer( Connection(), self.exchange, auto_declare=True, ) while self.redis_connection.get('is_consuming_' + self.name): if not self.redis_connection.get('current_lock_owner_' + self.name): # Get next candidate owner from queue message = self.acquire_requests_q.get() if not message: continue print(message.payload) self.redis_connection.set('current_lock_owner_' + self.name, message.payload.get('id')) # Inform the candidate owner that lock has been granted # message not deleted until ack'ed message.ack() p.publish( dict(request='GRANTED', id=message.payload.get('id')), routing_key=message.payload.get('id'), exchange=self.exchange, ) def __del__(self): self._stop_consumer()
if channel is None: # connection should provide a default channel channel = conn.default_channel if type(queue) == str: # see if the queue parameter was a name string try: queue = Queue(name=queue, channel=channel, **self._queue_kwargs) except Exception, e: logging.exception(e) if type(queue) is not Queue: raise ValueError('No valid queue available') queue.declare() queue.purge() logging.info("Binding queue '%s' to exchange '%s' with:" % (queue.name, exchange)) routing_keys = routing_keys or ['#'] for rk in routing_keys: try: logging.debug("rk: %s" % rk) queue.bind_to(exchange=exchange, routing_key=rk) except Exception, e: logging.exception(str(e)) logging.info('Done: binding') consumer_tag = '%s::%s::consuming_exchange' % (self.name, queue.name) queue.consume(consumer_tag, callback=self._consumer_producer_callback, no_ack=True) self._queues.append(queue)
class TestVerifyTask(unittest.TestCase): def setUp(self): # Open connection to RabbitMQ self.conn = Connection(config['broker_url']) self.channel = self.conn.channel() # Declare Verify queue q = config['queues']['verify'] self.verifyQ = Queue(q['name'], channel=self.channel, exchange=Exchange(q['name']), routing_key=q['name'], max_priority=q['max_task_priority']) self.verifyQ.declare() # Declare API queue q = config['queues']['api'] self.apiQ = Queue(q['name'], channel=self.channel, exchange=Exchange(q['name']), routing_key=q['name'], max_priority=q['max_task_priority']) self.apiQ.declare() def tearDown(self): # Delete Verify queue self.apiQ.delete() # Delete API queue self.verifyQ.delete() # Close connection self.conn.close() def test_verify(self): data = [{ 'filename': '/var/store/15525119098910.pdf', 'algorithm': 'md5', 'checksum': 'ec4e3b91d2e03fdb17db55ff46da43b2' }, { 'filename': '/var/store/15525119098910.pdf', 'algorithm': 'sha512', 'checksum': 'bc803d8abccf18d89765d6ae9fb7d490ad07f57a48e4987acc1' '73af4e65f143a4d215ffb59e9eebeb03849baab5a6e016e2806' 'a2cd0e84b14c778bdb84afbbf4' }] for i in data: self.assertTrue(path.exists(i['filename'])) # Queues cleanup self.verifyQ.purge() self.apiQ.purge() # Random DFO ID dfo_id = randint(1, 2147483647) # Send task q = config['queues']['verify'] producer = self.conn.Producer() producer.publish( routing_key=q['name'], body=[[dfo_id, i['filename'], 'test', i['algorithm']], {}, {}], headers={ 'task': 'verify_dfo', 'id': str(uuid.uuid1()) }) # Wait for result message for max 5 seconds msg = None wait = 0 while wait <= 5 and msg is None: msg = self.apiQ.get(no_ack=False) if msg is None: sleep(1) wait += 1 # Tests self.assertFalse(msg is None) self.assertTrue(msg.payload[0][0] == dfo_id) self.assertTrue(msg.payload[0][1] == i['algorithm']) self.assertTrue(msg.payload[0][2] == i['checksum'])