def handle(self, *args, **options): state = current_app.events.State() print 'Current Tasks: %s' % current_app.tasks.keys() def announce_succeeded_tasks(event): state.event(event) task_id = event['uuid'] print('TASK SUCCEEDED: %s[%s] %s' % ( event['name'], task_id, state[task_id].info(), )) def announce_failed_tasks(event): state.event(event) task_id = event['uuid'] print('TASK FAILED: %s[%s] %s' % ( event['name'], task_id, state[task_id].info(), )) def announce_dead_workers(event): state.event(event) hostname = event.get('hostname', None) print('Event type %s received' % event.get('type', 'undefined')) if hostname and not state.workers[hostname].alive: print('Worker %s missed heartbeats' % (hostname, )) with current_app.connection() as connection: recv = current_app.events.Receiver(connection, handlers={ 'task-failed': announce_failed_tasks, 'task-succeeded': announce_succeeded_tasks, 'worker-heartbeat': announce_dead_workers, }) recv.capture(limit=None, timeout=None, wakeup=True)
def test_info(self): l = MyKombuConsumer(self.ready_queue, timer=self.timer) l.qos = QoS(l.task_consumer, 10) info = l.info self.assertEqual(info['prefetch_count'], 10) self.assertFalse(info['broker']) l.connection = current_app.connection() info = l.info self.assertTrue(info['broker'])
def _producer(self): connection = current_app.connection() publisher = current_app.amqp.TaskProducer(connection) inqueue = self.inqueue while 1: task, args, kwargs, options, receipt = inqueue.get() result = task.apply_async(args, kwargs, publisher=publisher, **options) receipt.finished(result)
def get_rabbitmq_info(full=False): try: props = current_app.connection().connection.server_properties if full: return props return {'version': props['version']} except (ConnectionError, OSError): logger.exception("Could not connect to RabbitMQ.") return { 'version': 'unknown', 'error': 'Error connecting to RabbitMQ. Check the logs for more detail.' }
def manage_indexer_queues(): """Peeks into queues and spawns bulk indexers.""" channel = current_celery_app.connection().channel() indexers = current_indexer_registry.all() for name, indexer in indexers.items(): queue = indexer.mq_queue.bind(channel) _, num_messages, num_consumers = queue.queue_declare() max_consumers = current_app.config["INDEXER_MAX_BULK_CONSUMERS"] if num_messages > 0 and num_consumers < max_consumers: process_bulk_queue.delay(indexer_name=name)
def test_active_queues(self): import kombu x = kombu.Consumer( current_app.connection(), [kombu.Queue("foo", kombu.Exchange("foo"), "foo"), kombu.Queue("bar", kombu.Exchange("bar"), "bar")], auto_declare=False, ) consumer = Mock() consumer.task_consumer = x panel = self.create_panel(consumer=consumer) r = panel.handle("active_queues") self.assertListEqual(list(sorted(q["name"] for q in r)), ["bar", "foo"])
def test_active_queues(self): import kombu x = kombu.Consumer(current_app.connection(), [kombu.Queue('foo', kombu.Exchange('foo'), 'foo'), kombu.Queue('bar', kombu.Exchange('bar'), 'bar')], auto_declare=False) consumer = Mock() consumer.task_consumer = x panel = self.create_panel(consumer=consumer) r = panel.handle('active_queues') self.assertListEqual(list(sorted(q['name'] for q in r)), ['bar', 'foo'])
def check_disabled_workers(): """ Check if disabled workers have tasks in queue, then re-route them """ from .models import Worker from .tasks import route_envelope class Counter: pass c = current_app.connection() counter = Counter() counter.count = 0 def process_message(body, message): identifier = body.get('args', [])[0] attempts = body.get('args')[1] log.info( '[{}] Republishing mail into routing task (attempts={})...'.format( identifier, attempts)) route_envelope.apply_async(body.get('args')) message.ack() counter.count += 1 for worker in Worker.objects.filter(enabled=False).only('ip'): queues = [ worker.get_queue(connection), worker.get_queue(connection, retry=True) ] for queue in queues: queue = worker.get_queue(connection) size = worker.get_queue_size(queue) if size: log.info( "{} tasks remaining in disabled queue {}. Republishing them..." .format(size, queue.name)) task_consumer = current_app.amqp.TaskConsumer( c, queues=[queue], callbacks=[process_message]) with task_consumer: while counter.count <= size: try: connection.drain_events(timeout=2) except socket.timeout: return
def test_manage_indexer_queues(app, service, identity_simple, input_data): # register the indexer current_indexer_registry.register(service.indexer, service.id) channel = current_celery_app.connection().channel() # create a record item = service.create(identity_simple, input_data) service.record_cls.index.refresh() uuid = item._record.id # send to reindex assert service.reindex(identity_simple, es_query=_es_query(uuid)) # check there is one item in the queue queue = service.indexer.mq_queue.bind(channel) _, num_messages, _ = queue.queue_declare() assert num_messages == 1 # manage manage_indexer_queues() # check the queue is empty _, num_messages, _ = queue.queue_declare() assert num_messages == 0
def dispatch_queued(): """ Reroute all queued tasks This task is not used for now but maybe a management command could be usefull. """ from .tasks import route_envelope class Counter: pass connection = current_app.connection() counter = Counter() counter.count = 0 def process_message(body, message): identifier = body.get('args', [])[0] attempts = body.get('args')[1] log.info( 'Republishing {} mail into routing task (attempts={})...'.format( identifier, attempts)) message.ack() route_envelope.apply_async(body.get('args')) counter.count += 1 queue = get_queue(connection, settings.MAILSEND['QUEUED_MAIL_QUEUE']) size = get_queue_size(queue) if size: log.info("Rerouting {} tasks from {} queue...".format( size, settings.MAILSEND['QUEUED_MAIL_QUEUE'])) task_consumer = current_app.amqp.TaskConsumer( connection, queues=[queue], callbacks=[process_message]) with task_consumer: while counter.count <= size: try: connection.drain_events(timeout=2) except socket.timeout: return
def get_rabbitmq_info(full=False): try: props = current_app.connection(transport_options={ 'max_retries': 5 }).connection.server_properties if full: return props return {'version': props['version']} except OSError: logger.exception("Could not connect to RabbitMQ.") return { 'version': 'unknown', 'error': 'Error connecting to RabbitMQ. Check the logs for more detail.' } except kombu_OperationalError as e: logger.exception("Could not connect to RabbitMQ. Error: %s" % e) return { 'version': 'unknown', 'error': 'Error connecting to RabbitMQ. Check the logs for more detail.' }
class Connection(current_app.connection().__class__): obj = None def drain_events(self, **kwargs): self.obj.connection = None
def get_rabbitmq_info(): return current_app.connection().connection.server_properties
class Connection(current_app.connection().__class__): obj = None def drain_events(self, **kwargs): self.obj.connection = None raise socket.error('foo')
def celery_redis_instance(): broker_uri = current_app.connection().as_uri(include_password=True) url = urlparse(broker_uri) return redis.Redis(host=url.hostname, port=url.port, password=url.password)