def test_get_all_workers(self): worker1 = get_worker() worker2 = get_worker('test') workers_collections = [ {'config': {'URL': 'redis://'}, 'all_workers': [worker1]}, {'config': {'URL': 'redis://localhost/1'}, 'all_workers': [worker2]}, ] result = get_all_workers_by_configuration({'URL': 'redis://'}, workers_collections) self.assertEqual(result, [worker1])
def test_get_all_workers(self): worker1 = get_worker() worker2 = get_worker('test') workers_collections = [ {'config': {'some_config': 1}, 'all_workers': [worker1]}, {'config': {'some_config': 2}, 'all_workers': [worker2]}, ] result = get_all_workers_by_configuration({'some_config': 1}, workers_collections) self.assertEqual(result, [worker1])
def test_download_sample_item(self): """Test download sample, should return 200. Cannot actually download the sample because this an Nginx redirect to 'internal'.""" url = reverse('download-canary', args={self.canary.identifier}) response = self.client.get(url, **self.headers) get_worker(worker_class=SimpleWorker).work(burst=True) self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_workers(self): """Worker index page should show workers for a specific queue""" queue_index = get_queue_index('django_rq_test') worker1 = get_worker('django_rq_test', name=uuid.uuid4().hex) worker1.register_birth() worker2 = get_worker('test3') worker2.register_birth() response = self.client.get(reverse('rq_workers', args=[queue_index])) self.assertEqual(response.context['workers'], [worker1])
def test_workers(self): """Worker index page should show workers for a specific queue""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') worker1 = get_worker('django_rq_test', name=uuid.uuid4().hex) worker1.register_birth() worker2 = get_worker('test3') worker2.register_birth() response = self.client.get( reverse('rq_workers', args=[queue_index]) ) self.assertEqual(response.context['workers'], [worker1])
def test_action_requeue_jobs(self): def failing_job(): raise ValueError queue = get_queue('django_rq_test') failed_queue_index = get_failed_queue_index('django_rq_test') # enqueue some jobs that will fail jobs = [] job_ids = [] for _ in range(0, 3): job = queue.enqueue(failing_job) jobs.append(job) job_ids.append(job.id) # do those jobs = fail them worker = get_worker('django_rq_test') worker.work(burst=True) # check if all jobs are really failed for job in jobs: self.assertTrue(job.is_failed) # renqueue failed jobs from failed queue self.client.post(reverse('rq_actions', args=[failed_queue_index]), {'action': 'requeue', 'job_ids': job_ids}) # check if we requeue all failed jobs for job in jobs: self.assertFalse(job.is_failed)
def test_get_worker_default(self): """ By default, ``get_worker`` should return worker for ``default`` queue. """ worker = get_worker() queue = worker.queues[0] self.assertEqual(queue.name, 'default')
def test_get_worker_specified(self): """ Checks if a worker with specified queues is created when queue names are given. """ w = get_worker('test') self.assertEqual(len(w.queues), 1)
def test_get_worker_custom_classes(self): w = get_worker('test', job_class='django_rq.tests.fixtures.DummyJob', queue_class='django_rq.tests.fixtures.DummyQueue', worker_class='django_rq.tests.fixtures.DummyWorker') self.assertIs(w.job_class, DummyJob) self.assertIsInstance(w.queues[0], DummyQueue) self.assertIsInstance(w, DummyWorker)
def handle(self, *args, **kwargs): try: worker = get_worker(NOTIFICATIONS_QUEUE_NAME) use_connection(worker.connection) worker.push_exc_handler(notifications_error_handler) worker.work() except ConnectionError as e: raise SystemExit(e)
def test_get_worker_custom_classes(self): w = get_worker('test', job_class='django_rq.tests.DummyJob', queue_class='django_rq.tests.DummyQueue', worker_class='django_rq.tests.DummyWorker') self.assertIs(w.job_class, DummyJob) self.assertIsInstance(w.queues[0], DummyQueue) self.assertIsInstance(w, DummyWorker)
def handle(self, *args, **kwargs): try: worker = get_worker(QUEUE_NAME) use_connection(worker.connection) worker.push_exc_handler(notifications_error_handler) worker.work() except ConnectionError as e: raise SystemExit(e)
def handle(self, *args, **options): try: w = get_worker(*args) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e)
def test_get_worker_specified(self): """ Checks if a worker with specified queues is created when queue names are given. """ w = get_worker('test') self.assertEqual(len(w.queues), 1) queue = w.queues[0] self.assertEqual(queue.name, 'test')
def test_worker_details(self): """Worker index page should show workers for a specific queue""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') worker = get_worker('django_rq_test', name=uuid.uuid4().hex) worker.register_birth() response = self.client.get( reverse('rq_worker_details', args=[queue_index, worker.key])) self.assertEqual(response.context['worker'], worker)
def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) sentry_dsn = options.get('sentry-dsn') if sentry_dsn is None: sentry_dsn = getattr(settings, 'SENTRY_DSN', None) # Verbosity is defined by default in BaseCommand for all commands verbosity = options.get('verbosity') if verbosity >= 2: level = 'DEBUG' elif verbosity == 0: level = 'WARNING' else: level = 'INFO' setup_loghandlers(level) sentry_dsn = options.get('sentry-dsn') or getattr( settings, 'SENTRY_DSN', None) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'default_worker_ttl': options['worker_ttl'], } w = get_worker(*args, **worker_kwargs) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) # Close any opened DB connection before any fork reset_db_connections() if sentry_dsn: try: from rq.contrib.sentry import register_sentry register_sentry(sentry_dsn) except ImportError: self.stdout.write( self.style.ERROR( "Please install sentry-sdk using `pip install sentry-sdk`" )) sys.exit(1) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) sentry_dsn = options.get('sentry-dsn') if sentry_dsn is None: sentry_dsn = getattr(settings, 'SENTRY_DSN', None) # Verbosity is defined by default in BaseCommand for all commands verbosity = options.get('verbosity') if verbosity >= 2: level = 'DEBUG' elif verbosity == 0: level = 'WARNING' else: level = 'INFO' setup_loghandlers(level) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'default_worker_ttl': options['worker_ttl'], } w = get_worker(*args, **worker_kwargs) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) # Close any opened DB connection before any fork reset_db_connections() if sentry_dsn: try: from rq.contrib.sentry import register_sentry register_sentry(sentry_dsn) except ImportError: self.stdout.write(self.style.ERROR("Please install sentry-sdk using `pip install sentry-sdk`")) sys.exit(1) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
def test_requeue_job(self): """ Ensure that a failed job gets requeued when rq_requeue_job is called """ def failing_job(): raise ValueError queue = get_queue('default') job = queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) self.client.post(reverse('rq_requeue_job', args=[queue.connection_name, queue.name, job.id]), {'requeue': 'Requeue'}) self.assertIn(job, queue.jobs) job.delete()
def test_requeue_job(self): """ Ensure that a failed job gets requeued when rq_requeue_job is called """ def failing_job(): raise ValueError queue = get_queue("default") queue_index = get_failed_queue_index("default") job = queue.enqueue(failing_job) worker = get_worker("default") worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) self.client.post(reverse("rq_requeue_job", args=[queue_index, job.id]), {"requeue": "Requeue"}) self.assertIn(job, queue.jobs) job.delete()
def test_requeue_job(self): """ Ensure that a failed job gets requeued when rq_requeue_job is called """ def failing_job(): raise ValueError queue = get_queue('default') queue_index = get_failed_queue_index('default') job = queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) job.refresh() self.assertTrue(job.is_failed) self.client.post(reverse('rq_requeue_job', args=[queue_index, job.id]), {'requeue': 'Requeue'}) self.assertIn(job, queue.jobs) job.delete()
def test_requeue_all(self): """ Ensure that requeueing all failed job work properly """ def failing_job(): raise ValueError queue = get_queue('default') queue_index = get_queue_index('default') job = queue.enqueue(failing_job) queue.enqueue(failing_job) worker = get_worker('default') worker.work(burst=True) response = self.client.get(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(response.context['total_jobs'], 2) # After requeue_all is called, jobs are enqueued response = self.client.post(reverse('rq_requeue_all', args=[queue_index])) self.assertEqual(len(queue), 2)
def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) sentry_dsn = options.get('sentry-dsn') or getattr( settings, 'SENTRY_DSN', None) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'default_worker_ttl': options['worker_ttl'], } w = get_worker(*args, **worker_kwargs) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) # Close any opened DB connection before any fork reset_db_connections() if sentry_dsn: try: from raven import Client from raven.transport.http import HTTPTransport from rq.contrib.sentry import register_sentry client = Client(sentry_dsn, transport=HTTPTransport) register_sentry(client, w) except ImportError: self.stdout.write( self.style.ERROR( "Please install sentry. For example `pip install raven`" )) sys.exit(1) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
def test_get_statistics(self): """get_statistics() returns the right number of workers""" queues = [{ 'connection_config': { 'DB': 0, 'HOST': 'localhost', 'PORT': 6379, }, 'name': 'async' }] with patch('django_rq.utils.QUEUES_LIST', new_callable=PropertyMock(return_value=queues)): worker = get_worker('async', name=uuid4().hex) worker.register_birth() statistics = get_statistics() data = statistics['queues'][0] self.assertEqual(data['name'], 'async') self.assertEqual(data['workers'], 1) worker.register_death()
def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) sentry_dsn = options.get('sentry-dsn') or getattr(settings, 'SENTRY_DSN', None) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'default_worker_ttl': options['worker_ttl'], } w = get_worker(*args, **worker_kwargs) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) # Close any opened DB connection before any fork reset_db_connections() if sentry_dsn: try: from raven import Client from raven.transport.http import HTTPTransport from rq.contrib.sentry import register_sentry client = Client(sentry_dsn, transport=HTTPTransport) register_sentry(client, w) except ImportError: self.stdout.write(self.style.ERROR("Please install sentry. For example `pip install raven`")) sys.exit(1) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
def __process_jobs(self): get_worker().work(burst=True)
def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) sentry_dsn = options.get('sentry-dsn') if sentry_dsn is None: sentry_dsn = getattr(settings, 'SENTRY_DSN', None) # Verbosity is defined by default in BaseCommand for all commands verbosity = options.get('verbosity') if verbosity >= 2: level = 'DEBUG' elif verbosity == 0: level = 'WARNING' else: level = 'INFO' setup_loghandlers(level) sentry_dsn = options.get('sentry-dsn') or getattr( settings, 'SENTRY_DSN', None) try: # Instantiate a worker worker_kwargs = { 'worker_class': options['worker_class'], 'queue_class': options['queue_class'], 'job_class': options['job_class'], 'name': options['name'], 'default_worker_ttl': options['worker_ttl'], } w = get_worker(*args, **worker_kwargs) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) # Close any opened DB connection before any fork reset_db_connections() if sentry_dsn: try: from raven import Client from raven.transport.http import HTTPTransport from rq.contrib.sentry import register_sentry from raven.exceptions import InvalidDsn try: client = Client(sentry_dsn, transport=HTTPTransport) register_sentry(client, w) except InvalidDsn: self.stdout.write( self.style.ERROR( "Invalid DSN. If you use `sentry-sdk` package you have to disable the django-rq sentry plugin by setting `--sentry-dsn=\"\"`." )) sys.exit(1) except ImportError: self.stdout.write( self.style.ERROR( "Please install sentry. For example `pip install raven`" )) sys.exit(1) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
def test_custom_class(self): worker = get_worker('test') self.assertIsInstance(worker, DummyWorker)
def test_default_worker_class(self): worker = get_worker('test') self.assertIsInstance(worker, Worker)
def handle(self, *args, **options): try: w = get_worker(*args) w.work() except ConnectionError as e: print(e)