def purge_queue() -> None: """ Deletes all MessageLog objects with status `IN_PROGRESS` or `PUBLISHED` add iterate through and purge all RabbitMQ queues """ queued_messages = MessageLog.objects.filter( status__in=['IN_PROGRESS', 'PUBLISHED']) queued_messages.delete() try: carrot_settings = settings.CARROT except AttributeError: carrot_settings = { 'default_broker': DEFAULT_BROKER, } queues = carrot_settings.get('queues', [{ 'name': 'default', 'host': DEFAULT_BROKER }]) for queue in queues: if type(queue['host']) is str: filters = {'url': queue['host']} else: filters = queue['host'] host = VirtualHost(**filters) channel = host.blocking_connection.channel() channel.queue_purge(queue=queue['name'])
def get_host_from_name(name: str) -> VirtualHost: """ Gets a host object from a given queue name based on the Django configuration If no queue name is provided (as may be the case from some callers), this function returns a VirtualHost based on the CARROT.default_broker value. May raise an exception if the given queue name is not registered in the settings. """ try: carrot_settings = settings.CARROT except AttributeError: carrot_settings = { 'default_broker': DEFAULT_BROKER, 'queues': [{ 'name': 'default', 'host': DEFAULT_BROKER }] } try: if not name: try: conf = carrot_settings.get('default_broker', {}) except AttributeError: conf = {} if not conf: conf = {'url': DEFAULT_BROKER} elif isinstance(conf, str): conf = {'url': conf} return VirtualHost(**conf) queues = carrot_settings.get('queues', []) queue_host = list(filter(lambda queue: queue['name'] == name, queues))[0]['host'] try: vhost = VirtualHost(**queue_host) except TypeError: vhost = VirtualHost(url=queue_host) return vhost except IndexError: raise CarrotConfigException( 'Cannot find queue called %s in settings.CARROT queue list' % name)
def runner(options): BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(BASE_DIR) vhost = { 'host': options.host, 'port': options.port, 'name': options.name, 'username': options.username, 'password': options.password, 'secure': options.secure, } _vhost = VirtualHost(**vhost) settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'local', } }, ROOT_URLCONF='carrot.urls', INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', 'django.contrib.staticfiles', 'carrot', ), CARROT={ 'default_broker': str(_vhost), }, TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ], STATIC_URL='/static/', ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=0, ) failures = test_runner.run_tests(['carrot']) if failures: sys.exit(failures)
def test_consumer_set(self, *args): alt_settings = { 'queues': [{ 'name': 'test', 'durable': True, 'queue_arguments': {'blah': True}, 'exchange_arguments': {'blah': True}, }] } with override_settings(CARROT=alt_settings): cs = ConsumerSet(VirtualHost('amqp://*****:*****@localhost:5672/test'), 'test', logger) cs.start_consuming() cs.stop_consuming()
def handle(self, **options): """ The actual handler process. Performs the following actions: - Initiates and starts a new :class:`carrot.objects.ScheduledTaskManager`, which schedules all *active* :class:`carrot.objects.ScheduledTask` instances to run at the given intervals. This only happens if the **--no-scheduler** argument has not been provided - otherwise, the service only creates consumer objects - Loops through the queues registered in your Django project's settings module, and starts a new :class:`carrot.objects.ConsumerSet` for them. Each ConsumerSet will contain **n** :class:`carrot.objects.Consumer` objects, where **n** is the concurrency setting for the given queue (as defined in the Django settings) - Enters into an infinite loop which monitors your database for changes to your database - if any changes to the :class:`carrot.objects.ScheduledTask` queryset are detected, carrot updates the scheduler accordingly On receiving a **KeyboardInterrupt**, **SystemExit** or SIGTERM, the service first turns off each of the schedulers in turn (so no new tasks can be published to RabbitMQ), before turning off the Consumers in turn. The more Consumers/ScheduledTask objects you have, the longer this will take. :param options: provided by **argparse** (see above for the full list of available options) """ signal.signal(signal.SIGTERM, self.terminate) # check if carrot service is already running, and warn the user if so running_pids = [] for q in psutil.process_iter(): if 'python' in q.name(): if len(q.cmdline()) > 1 and 'manage.py' in q.cmdline( )[1] and 'carrot' in q.cmdline()[2]: if not q._pid == os.getpgid(0): running_pids.append(q._pid) if running_pids: self.stdout.write( self.style.WARNING( 'WARNING: Carrot service is already running with the following PID. Running more ' 'than one instance of carrot may lead to a memory leak:\n%s' % '\n'.join([str(pid) for pid in running_pids]))) run_scheduler = options['run_scheduler'] try: queues = [ q for q in settings.CARROT['queues'] if q.get('consumable', True) ] except (AttributeError, KeyError): queues = [{'name': 'default', 'host': DEFAULT_BROKER}] if run_scheduler: self.scheduler = ScheduledTaskManager() try: # scheduler if self.scheduler: self.scheduler.start() self.stdout.write( self.style.SUCCESS('Successfully started scheduler')) # logger loglevel = getattr(logging, options.get('loglevel', 'DEBUG')) logger = logging.getLogger('carrot') logger.setLevel(loglevel) file_handler = logging.FileHandler(options['logfile']) file_handler.setLevel(loglevel) stream_handler = logging.StreamHandler() stream_handler.setLevel(loglevel) formatter = logging.Formatter(LOGGING_FORMAT) file_handler.setFormatter(formatter) stream_handler.setFormatter(formatter) logger.addHandler(file_handler) logger.addHandler(stream_handler) # consumers for queue in queues: kwargs = { 'queue': queue['name'], 'logger': logger, 'concurrency': queue.get('concurrency', 1), } if queue.get('consumer_class', None): kwargs['consumer_class'] = queue.get('consumer_class') try: vhost = VirtualHost(**queue['host']) except TypeError: vhost = VirtualHost(url=queue['host']) c = ConsumerSet(host=vhost, **kwargs) c.start_consuming() self.active_consumer_sets.append(c) self.stdout.write( self.style.SUCCESS( 'Successfully started %i consumers for queue %s' % (c.concurrency, queue['name']))) self.stdout.write( self.style.SUCCESS( 'All queues consumer sets started successfully. Full logs are at %s.' % options['logfile'])) qs = ScheduledTask.objects.filter(active=True) self.pks = [t.pk for t in qs] while True: time.sleep(1) if not self.run: self.terminate() if self.scheduler or options['testmode']: new_qs = ScheduledTask.objects.filter(active=True) if new_qs.count() > len(self.pks): print( 'New active scheduled tasks have been added to the queryset' ) new_tasks = new_qs.exclude( pk__in=self.pks) or [ScheduledTask()] for new_task in new_tasks: print('adding new task %s' % new_task) self.scheduler.add_task(new_task) self.pks = [t.pk for t in new_qs] elif new_qs.count() < len(self.pks): self.pks = [t.pk for t in new_qs] if options['testmode']: print('TESTMODE:', options['testmode']) raise SystemExit() except Exception as err: self.stderr.write(self.style.ERROR(err)) except (KeyboardInterrupt, SystemExit): # self.terminate() pass
def test_consumer(self, *args): consumer = Consumer(VirtualHost('amqp://*****:*****@localhost:5672/test'), 'test', logger, 'test') consumer.task_log = ['blah'] log = MessageLog.objects.create(task='carrot.tests.test_task', uuid=1234, status='PUBLISHED', task_args='()') consumer.get_task_type({'type': 'carrot.tests.test_task'}, None) p = Properties() self.assertEqual(consumer.get_message_log(p, None), log) p.message_id = 4321 consumer.get_message_log(p, None) consumer.fail(log, 'test error') consumer.connection = consumer.connect() consumer.run() consumer.reconnect() consumer.on_connection_open(consumer.connection) consumer.channel = consumer.connection.channel consumer.on_channel_open(consumer.channel) consumer.on_exchange_declare() consumer.on_queue_declare() consumer.on_bind() p.message_id = 1234 consumer.on_message(consumer.channel, p, p, b'{}') log.status = 'PUBLISHED' log.save() consumer.on_message(consumer.channel, p, p, b'{}') consumer.on_channel_closed(consumer.channel, 1, 'blah') p.headers = {'type':'carrot.tests.test_task'} log.delete() log = MessageLog.objects.create(task='carrot.tests.test_task', uuid=1234, status='PUBLISHED', task_args='()') self.assertEqual(str(log), 'carrot.tests.test_task') consumer.on_message(consumer.channel, p, p, b'{}') log.delete() p.headers = {'type': 'carrot.tests.dict_task'} log = MessageLog.objects.create(task='carrot.tests.dict_task', uuid=1234, status='PUBLISHED', task_args='()') consumer.on_message(consumer.channel, p, p, b'{}') log.delete() p.headers = {'type':'carrot.tests.failing_task'} log = MessageLog.objects.create(task='carrot.tests.failing_task', uuid=1234, status='PUBLISHED', task_args='()') consumer.on_message(consumer.channel, p, p, b'{}') log.delete() log = MessageLog.objects.create(task='carrot.tests.test_task', uuid=1234, status='PUBLISHED', task_args='()') consumer.serializer = MessageSerializer() consumer.on_message(consumer.channel, p, p, b'{}') log.delete() log = MessageLog.objects.create(task='carrot.tests.test_task', uuid=1234, status='PUBLISHED', task_args='()') consumer.serializer.failing_method = 'get_task' consumer.on_message(consumer.channel, p, p, b'{}') consumer.active_message_log = log consumer.on_consumer_cancelled(1) consumer.stop() consumer.on_cancel() consumer.channel = None consumer.stop() consumer.close_connection() consumer.on_channel_closed(consumer.channel, 1, 'blah') consumer.on_connection_closed(consumer.connection) consumer.shutdown_requested = True consumer.on_channel_closed(consumer.channel, 1, 'blah') consumer.on_connection_closed(consumer.connection)