Beispiel #1
0
class TestDeferredRegistry(RQTestCase):

    def setUp(self):
        super(TestDeferredRegistry, self).setUp()
        self.registry = DeferredJobRegistry(connection=self.testconn)

    def test_key(self):
        self.assertEqual(self.registry.key, 'rq:deferred:default')

    def test_add(self):
        """Adding a job to DeferredJobsRegistry."""
        job = Job()
        self.registry.add(job)
        job_ids = [as_text(job_id) for job_id in
                   self.testconn.zrange(self.registry.key, 0, -1)]
        self.assertEqual(job_ids, [job.id])

    def test_register_dependency(self):
        """Ensure job creation and deletion works properly with DeferredJobRegistry."""
        queue = Queue(connection=self.testconn)
        job = queue.enqueue(say_hello)
        job2 = queue.enqueue(say_hello, depends_on=job)

        registry = DeferredJobRegistry(connection=self.testconn)
        self.assertEqual(registry.get_job_ids(), [job2.id])

        # When deleted, job removes itself from DeferredJobRegistry
        job2.delete()
        self.assertEqual(registry.get_job_ids(), [])
Beispiel #2
0
    def test_register_dependency(self):
        """Ensure job creation and deletion works properly with DeferredJobRegistry."""
        queue = Queue(connection=self.testconn)
        job = queue.enqueue(say_hello)
        job2 = queue.enqueue(say_hello, depends_on=job)

        registry = DeferredJobRegistry(connection=self.testconn)
        self.assertEqual(registry.get_job_ids(), [job2.id])

        # When deleted, job removes itself from DeferredJobRegistry
        job2.delete()
        self.assertEqual(registry.get_job_ids(), [])
Beispiel #3
0
    def test_deferred_jobs(self):
        """Ensure that active jobs page works properly."""
        queue = get_queue('django_rq_test')
        queue_index = get_queue_index('django_rq_test')

        job = queue.enqueue(access_self)
        registry = DeferredJobRegistry(queue.name, queue.connection)
        registry.add(job, 2)
        response = self.client.get(
            reverse('rq_deferred_jobs', args=[queue_index])
        )
        self.assertEqual(response.context['jobs'], [job])
Beispiel #4
0
    def test_register_dependency(self):
        """Ensure dependency registration works properly."""
        origin = 'some_queue'
        registry = DeferredJobRegistry(origin, self.testconn)

        job = Job.create(func=fixtures.say_hello, origin=origin)
        job._dependency_id = 'id'
        job.save()

        self.assertEqual(registry.get_job_ids(), [])
        job.register_dependency()
        self.assertEqual(as_text(self.testconn.spop('rq:job:id:dependents')), job.id)
        self.assertEqual(registry.get_job_ids(), [job.id])
Beispiel #5
0
class TestDeferredRegistry(RQTestCase):

    def setUp(self):
        super(TestDeferredRegistry, self).setUp()
        self.registry = DeferredJobRegistry(connection=self.testconn)

    def test_add(self):
        """Adding a job to DeferredJobsRegistry."""
        job = Job()
        self.registry.add(job)
        job_ids = [as_text(job_id) for job_id in
                   self.testconn.zrange(self.registry.key, 0, -1)]
        self.assertEqual(job_ids, [job.id])
Beispiel #6
0
    def test_enqueue_deferred(self):
        """Enqueuing a deferred job"""
        q = Queue('example', async=False)
        j = q.enqueue(say_hello, args=('John',), deferred=True)

        # Job is deferred
        self.assertEqual(j.get_status(), JobStatus.DEFERRED)

        # Job is registered in appropriate registry
        registry = DeferredJobRegistry(q.name, connection=self.testconn)
        self.assertEqual(registry.get_job_ids(), [j.id])

        # Job result isn't available though we're using a sync queue
        self.assertIsNone(j.result)

        # Freeing the job
        j.perform()
Beispiel #7
0
    def test_enqueue_dependents_on_multiple_queues(self):
        """Enqueueing dependent jobs on multiple queues pushes jobs in the queues
        and removes them from DeferredJobRegistry for each different queue."""
        q_1 = Queue("queue_1")
        q_2 = Queue("queue_2")
        parent_job = Job.create(func=say_hello)
        parent_job.save()
        job_1 = q_1.enqueue(say_hello, depends_on=parent_job)
        job_2 = q_2.enqueue(say_hello, depends_on=parent_job)

        # Each queue has its own DeferredJobRegistry
        registry_1 = DeferredJobRegistry(q_1.name, connection=self.testconn)
        self.assertEqual(
            set(registry_1.get_job_ids()),
            set([job_1.id])
        )
        registry_2 = DeferredJobRegistry(q_2.name, connection=self.testconn)
        self.assertEqual(
            set(registry_2.get_job_ids()),
            set([job_2.id])
        )

        # After dependents is enqueued, job_1 on queue_1 and
        # job_2 should be in queue_2
        self.assertEqual(q_1.job_ids, [])
        self.assertEqual(q_2.job_ids, [])
        q_1.enqueue_dependents(parent_job)
        q_2.enqueue_dependents(parent_job)
        self.assertEqual(set(q_1.job_ids), set([job_1.id]))
        self.assertEqual(set(q_2.job_ids), set([job_2.id]))
        self.assertFalse(self.testconn.exists(parent_job.dependents_key))

        # DeferredJobRegistry should also be empty
        self.assertEqual(registry_1.get_job_ids(), [])
        self.assertEqual(registry_2.get_job_ids(), [])
Beispiel #8
0
def enqueue_job(request, queue_index, job_id):
    """ Enqueue deferred jobs
    """
    queue_index = int(queue_index)
    queue = get_queue_by_index(queue_index)
    job = Job.fetch(job_id, connection=queue.connection)

    if request.method == 'POST':
        queue.enqueue_job(job)

        # Remove job from correct registry if needed
        if job.get_status() == JobStatus.DEFERRED:
            registry = DeferredJobRegistry(queue.name, queue.connection)
            registry.remove(job)
        elif job.get_status() == JobStatus.FINISHED:
            registry = FinishedJobRegistry(queue.name, queue.connection)
            registry.remove(job)

        messages.info(request, 'You have successfully enqueued %s' % job.id)
        return redirect('rq_job_detail', queue_index, job_id)

    context_data = {
        'queue_index': queue_index,
        'job': job,
        'queue': queue,
    }
    return render(request, 'django_rq/delete_job.html', context_data)
Beispiel #9
0
    def test_enqueue_dependents(self):
        """Enqueueing dependent jobs pushes all jobs in the depends set to the queue
        and removes them from DeferredJobQueue."""
        q = Queue()
        parent_job = Job.create(func=say_hello)
        parent_job.save()
        job_1 = q.enqueue(say_hello, depends_on=parent_job)
        job_2 = q.enqueue(say_hello, depends_on=parent_job)

        registry = DeferredJobRegistry(q.name, connection=self.testconn)
        self.assertEqual(
            set(registry.get_job_ids()),
            set([job_1.id, job_2.id])
        )
        # After dependents is enqueued, job_1 and job_2 should be in queue
        self.assertEqual(q.job_ids, [])
        q.enqueue_dependents(parent_job)
        self.assertEqual(set(q.job_ids), set([job_2.id, job_1.id]))
        self.assertFalse(self.testconn.exists(parent_job.dependents_key))

        # DeferredJobRegistry should also be empty
        self.assertEqual(registry.get_job_ids(), [])
Beispiel #10
0
def deferred_jobs(request, queue_index):
    queue_index = int(queue_index)
    queue = get_queue_by_index(queue_index)

    registry = DeferredJobRegistry(queue.name, queue.connection)

    items_per_page = 100
    num_jobs = len(registry)
    page = int(request.GET.get('page', 1))
    jobs = []

    if num_jobs > 0:
        last_page = int(ceil(num_jobs / items_per_page))
        page_range = range(1, last_page + 1)
        offset = items_per_page * (page - 1)
        job_ids = registry.get_job_ids(offset, offset + items_per_page - 1)

        for job_id in job_ids:
            try:
                jobs.append(Job.fetch(job_id, connection=queue.connection))
            except NoSuchJobError:
                pass

    else:
        page_range = []

    context_data = {
        'queue': queue,
        'queue_index': queue_index,
        'jobs': jobs,
        'num_jobs': num_jobs,
        'page': page,
        'page_range': page_range,
        'job_status': 'Deferred',
    }
    return render(request, 'django_rq/jobs.html', context_data)
Beispiel #11
0
def stats(request):
    queues = []
    for index, config in enumerate(QUEUES_LIST):

        queue = get_queue_by_index(index)
        connection = queue.connection

        queue_data = {
            'name': queue.name,
            'jobs': queue.count,
            'index': index,
            'connection_kwargs': connection.connection_pool.connection_kwargs
        }

        if queue.name == 'failed':
            queue_data['workers'] = '-'
            queue_data['finished_jobs'] = '-'
            queue_data['started_jobs'] = '-'
            queue_data['deferred_jobs'] = '-'

        else:
            connection = get_connection(queue.name)
            all_workers = Worker.all(connection=connection)
            queue_workers = [worker for worker in all_workers if queue in worker.queues]
            queue_data['workers'] = len(queue_workers)

            finished_job_registry = FinishedJobRegistry(queue.name, connection)
            started_job_registry = StartedJobRegistry(queue.name, connection)
            deferred_job_registry = DeferredJobRegistry(queue.name, connection)
            queue_data['finished_jobs'] = len(finished_job_registry)
            queue_data['started_jobs'] = len(started_job_registry)
            queue_data['deferred_jobs'] = len(deferred_job_registry)

        queues.append(queue_data)

    context_data = admin.site.each_context(request)
    context_data.update({
        'title': _('Django RQ'),
        'queues': queues
    })
    return render(request, 'django_rq/stats.html', context_data)
Beispiel #12
0
def stats(request):
    queues = []
    workers_collections = collect_workers_by_connection(QUEUES_LIST)
    for index, config in enumerate(QUEUES_LIST):

        queue = get_queue_by_index(index)
        connection = queue.connection

        queue_data = {
            'name': queue.name,
            'jobs': queue.count() if queue.name == 'scheduled' else queue.count,
            'index': index,
            'connection_kwargs': connection.connection_pool.connection_kwargs
        }

        if queue.name in ['failed', 'scheduled']:
            queue_data['workers'] = '-'
            queue_data['finished_jobs'] = '-'
            queue_data['started_jobs'] = '-'
            queue_data['deferred_jobs'] = '-'

        else:
            connection = get_connection(queue.name)
            all_workers = get_all_workers_by_configuration(
                config['connection_config'],
                workers_collections
            )
            queue_workers = [worker for worker in all_workers if queue in worker.queues]
            queue_data['workers'] = len(queue_workers)

            finished_job_registry = FinishedJobRegistry(queue.name, connection)
            started_job_registry = StartedJobRegistry(queue.name, connection)
            deferred_job_registry = DeferredJobRegistry(queue.name, connection)
            queue_data['finished_jobs'] = len(finished_job_registry)
            queue_data['started_jobs'] = len(started_job_registry)
            queue_data['deferred_jobs'] = len(deferred_job_registry)

        queues.append(queue_data)

    context_data = {'queues': queues}
    return render(request, 'django_rq/stats.html', context_data)
Beispiel #13
0
    def test_job_delete_removes_itself_from_registries(self):
        """job.delete() should remove itself from job registries"""
        connection = self.testconn
        job = Job.create(func=fixtures.say_hello, status=JobStatus.FAILED,
                         connection=self.testconn, origin='default')
        job.save()
        registry = FailedJobRegistry(connection=self.testconn)
        registry.add(job, 500)

        job.delete()
        self.assertFalse(job in registry)

        job = Job.create(func=fixtures.say_hello, status=JobStatus.FINISHED,
                         connection=self.testconn, origin='default')
        job.save()

        registry = FinishedJobRegistry(connection=self.testconn)
        registry.add(job, 500)

        job.delete()
        self.assertFalse(job in registry)

        job = Job.create(func=fixtures.say_hello, status=JobStatus.STARTED,
                         connection=self.testconn, origin='default')
        job.save()

        registry = StartedJobRegistry(connection=self.testconn)
        registry.add(job, 500)

        job.delete()
        self.assertFalse(job in registry)

        job = Job.create(func=fixtures.say_hello, status=JobStatus.DEFERRED,
                         connection=self.testconn, origin='default')
        job.save()

        registry = DeferredJobRegistry(connection=self.testconn)
        registry.add(job, 500)

        job.delete()
        self.assertFalse(job in registry)
Beispiel #14
0
    def test_enqueue_dependents_on_multiple_queues(self):
        """Enqueueing dependent jobs on multiple queues pushes jobs in the queues
        and removes them from DeferredJobRegistry for each different queue."""
        q_1 = Queue("queue_1")
        q_2 = Queue("queue_2")
        parent_job = Job.create(func=say_hello)
        parent_job.save()
        job_1 = q_1.enqueue(say_hello, depends_on=parent_job)
        job_2 = q_2.enqueue(say_hello, depends_on=parent_job)

        # Each queue has its own DeferredJobRegistry
        registry_1 = DeferredJobRegistry(q_1.name, connection=self.testconn)
        self.assertEqual(
            set(registry_1.get_job_ids()),
            set([job_1.id])
        )
        registry_2 = DeferredJobRegistry(q_2.name, connection=self.testconn)

        parent_job.set_status(JobStatus.FINISHED)

        self.assertEqual(
            set(registry_2.get_job_ids()),
            set([job_2.id])
        )

        # After dependents is enqueued, job_1 on queue_1 and
        # job_2 should be in queue_2
        self.assertEqual(q_1.job_ids, [])
        self.assertEqual(q_2.job_ids, [])
        q_1.enqueue_dependents(parent_job)
        q_2.enqueue_dependents(parent_job)
        self.assertEqual(set(q_1.job_ids), set([job_1.id]))
        self.assertEqual(set(q_2.job_ids), set([job_2.id]))
        self.assertFalse(self.testconn.exists(parent_job.dependents_key))

        # DeferredJobRegistry should also be empty
        self.assertEqual(registry_1.get_job_ids(), [])
        self.assertEqual(registry_2.get_job_ids(), [])
Beispiel #15
0
def empty_queue(queue_name, registry_name):
    if registry_name == "queued":
        q = Queue(queue_name)
        q.empty()
    elif registry_name == "failed":
        ids = FailedJobRegistry(queue_name).get_job_ids()
        for id in ids:
            delete_job_view(id)
    elif registry_name == "deferred":
        ids = DeferredJobRegistry(queue_name).get_job_ids()
        for id in ids:
            delete_job_view(id)
    elif registry_name == "started":
        ids = StartedJobRegistry(queue_name).get_job_ids()
        for id in ids:
            delete_job_view(id)
    elif registry_name == "finished":
        ids = FinishedJobRegistry(queue_name).get_job_ids()
        for id in ids:
            delete_job_view(id)
    return dict(status="OK")
Beispiel #16
0
def get_queue_registry_jobs_count(queue_name, registry_name, offset, per_page):
    queue = Queue(queue_name)
    if registry_name != 'queued':
        if per_page >= 0:
            per_page = offset + (per_page - 1)

        if registry_name == 'failed':
            current_queue = FailedJobRegistry(queue_name)
        elif registry_name == 'deferred':
            current_queue = DeferredJobRegistry(queue_name)
        elif registry_name == 'started':
            current_queue = StartedJobRegistry(queue_name)
        elif registry_name == 'finished':
            current_queue = FinishedJobRegistry(queue_name)
    else:
        current_queue = queue
    total_items = current_queue.count

    job_ids = current_queue.get_job_ids(offset, per_page)
    current_queue_jobs = [queue.fetch_job(job_id) for job_id in job_ids]
    jobs = [serialize_job(job) for job in current_queue_jobs]

    return (total_items, jobs)
Beispiel #17
0
def serialize_queues(instance_number, queues):
    if scheduler_is_here:
        result_list = [
            dict(
                name=q.name,
                count=q.count,
                queued_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="queued",
                    per_page="8",
                    page="1",
                ),
                failed_job_registry_count=FailedJobRegistry(q.name).count,
                failed_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="failed",
                    per_page="8",
                    page="1",
                ),
                started_job_registry_count=StartedJobRegistry(q.name).count,
                started_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="started",
                    per_page="8",
                    page="1",
                ),
                deferred_job_registry_count=DeferredJobRegistry(q.name).count,
                deferred_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="deferred",
                    per_page="8",
                    page="1",
                ),
                finished_job_registry_count=FinishedJobRegistry(q.name).count,
                finished_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="finished",
                    per_page="8",
                    page="1",
                ),
                scheduled_job_registry_count=ScheduledJobRegistry(q.name).count,
                scheduled_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="scheduled",
                    per_page="8",
                    page="1",
                ),
            )
            for q in queues
        ]
    else:
        result_list = [
            dict(
                name=q.name,
                count=q.count,
                queued_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="queued",
                    per_page="8",
                    page="1",
                ),
                failed_job_registry_count=FailedJobRegistry(q.name).count,
                failed_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="failed",
                    per_page="8",
                    page="1",
                ),
                started_job_registry_count=StartedJobRegistry(q.name).count,
                started_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="started",
                    per_page="8",
                    page="1",
                ),
                deferred_job_registry_count=DeferredJobRegistry(q.name).count,
                deferred_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="deferred",
                    per_page="8",
                    page="1",
                ),
                finished_job_registry_count=FinishedJobRegistry(q.name).count,
                finished_url=url_for(
                    ".jobs_overview",
                    instance_number=instance_number,
                    queue_name=q.name,
                    registry_name="finished",
                    per_page="8",
                    page="1",
                ),
            )
            for q in queues
        ]
    return result_list
Beispiel #18
0
 def setUp(self):
     super(TestDeferredRegistry, self).setUp()
     self.registry = DeferredJobRegistry(connection=self.testconn)
Beispiel #19
0
def get_statistics():
    queues = []
    workers_collections = collect_workers_by_connection(QUEUES_LIST)
    for index, config in enumerate(QUEUES_LIST):

        queue = get_queue_by_index(index)
        connection = queue.connection
        connection_kwargs = connection.connection_pool.connection_kwargs

        # Raw access to the first item from left of the redis list.
        # This might not be accurate since new job can be added from the left
        # with `at_front` parameters.
        # Ideally rq should supports Queue.oldest_job
        last_job_id = connection.lindex(queue.key, 0)
        last_job = queue.fetch_job(
            last_job_id.decode('utf-8')) if last_job_id else None
        if last_job:
            oldest_job_timestamp = to_localtime(last_job.enqueued_at)\
                .strftime('%Y-%m-%d, %H:%M:%S')
        else:
            oldest_job_timestamp = "-"

        # parse_class is not needed and not JSON serializable
        try:
            del (connection_kwargs['parser_class'])
        except KeyError:
            pass

        queue_data = {
            'name': queue.name,
            'jobs': queue.count,
            'oldest_job_timestamp': oldest_job_timestamp,
            'index': index,
            'connection_kwargs': connection_kwargs
        }

        if queue.name == 'failed':
            queue_data['workers'] = '-'
            queue_data['finished_jobs'] = '-'
            queue_data['started_jobs'] = '-'
            queue_data['deferred_jobs'] = '-'

        elif RQ_SCHEDULER_INSTALLED and queue.name == 'scheduled':
            scheduler = get_scheduler(connection=queue.connection)
            queue_data['jobs'] = scheduler.count()
            queue_data['workers'] = '-'
            queue_data['finished_jobs'] = '-'
            queue_data['started_jobs'] = '-'
            queue_data['deferred_jobs'] = '-'

        else:
            connection = get_connection(queue.name)
            all_workers = get_all_workers_by_configuration(
                config['connection_config'], workers_collections)
            queue_workers = [
                worker for worker in all_workers if queue in worker.queues
            ]
            queue_data['workers'] = len(queue_workers)

            finished_job_registry = FinishedJobRegistry(queue.name, connection)
            started_job_registry = StartedJobRegistry(queue.name, connection)
            deferred_job_registry = DeferredJobRegistry(queue.name, connection)
            queue_data['finished_jobs'] = len(finished_job_registry)
            queue_data['started_jobs'] = len(started_job_registry)
            queue_data['deferred_jobs'] = len(deferred_job_registry)

        queues.append(queue_data)
    return {'queues': queues, 'display_scheduled_jobs': RQ_SCHEDULER_INSTALLED}
def _get_deferred_count(rq, queue_name='default'):
    reg = DeferredJobRegistry(queue_name,
                              connection=rq.connection)
    return reg.count
Beispiel #21
0
 def deferred_job_registry(self):
     """Returns this queue's DeferredJobRegistry."""
     from rq.registry import DeferredJobRegistry
     return DeferredJobRegistry(queue=self, job_class=self.job_class)
Beispiel #22
0
 def setUp(self):
     super(TestDeferredRegistry, self).setUp()
     self.registry = DeferredJobRegistry(connection=self.testconn)
Beispiel #23
0
    def enqueue_dependents(self, job, pipeline=None):
        """Enqueues all jobs in the given job's dependents set and clears it.

        When called without a pipeline, this method uses WATCH/MULTI/EXEC.
        If you pass a pipeline, only MULTI is called. The rest is up to the
        caller.
        """
        from .registry import DeferredJobRegistry

        pipe = pipeline if pipeline is not None else self.connection.pipeline()
        dependents_key = job.dependents_key

        while True:
            try:
                # if a pipeline is passed, the caller is responsible for calling WATCH
                # to ensure all jobs are enqueued
                if pipeline is None:
                    pipe.watch(dependents_key)

                dependent_job_ids = [as_text(_id)
                                     for _id in pipe.smembers(dependents_key)]

                jobs_to_enqueue = [
                    dependent_job for dependent_job
                    in self.job_class.fetch_many(
                        dependent_job_ids,
                        connection=self.connection,
                        serializer=self.serializer
                    ) if dependent_job.dependencies_are_met(
                        exclude_job_id=job.id,
                        pipeline=pipe
                    )
                ]

                pipe.multi()

                for dependent in jobs_to_enqueue:
                    registry = DeferredJobRegistry(dependent.origin,
                                                   self.connection,
                                                   job_class=self.job_class)
                    registry.remove(dependent, pipeline=pipe)
                    if dependent.origin == self.name:
                        self.enqueue_job(dependent, pipeline=pipe)
                    else:
                        queue = self.__class__(name=dependent.origin, connection=self.connection)
                        queue.enqueue_job(dependent, pipeline=pipe)

                pipe.delete(dependents_key)

                if pipeline is None:
                    pipe.execute()

                break
            except WatchError:
                if pipeline is None:
                    continue
                else:
                    # if the pipeline comes from the caller, we re-raise the
                    # exception as it it the responsibility of the caller to
                    # handle it
                    raise
Beispiel #24
0
 def getDeferredExperiments(cls):
     with Connection(redis.from_url(current_app.config['REDIS_URL'])) as conn:
         registry = DeferredJobRegistry('default', connection=conn)
         return [Job.fetch(id, connection=conn) for id in registry.get_job_ids()]