def test_empty_removes_jobs(self): """Emptying a queue deletes the associated job objects""" q = Queue('example') job = q.enqueue(say_hello) self.assertTrue(Job.exists(job.id)) q.empty() self.assertFalse(Job.exists(job.id))
def after_create(self, context, resource): """Add custom jobs""" if resource.get("package_id") is None: # `resource` is a package dictionary, because we implemented # both IResourceController and IPackageController. Stop here. # https://github.com/ckan/ckan/issues/2949 return depends_on = [] extensions = [config.get("ckan.plugins")] package_job_id = f"{resource['package_id']}_{resource['position']}_" # Are we waiting for symlinking (ckanext-dcor_depot)? # (This makes wait_for_resource really fast ;) if "dcor_depot" in extensions: # Wait for the resource to be moved to the depot. jid_sl = package_job_id + "symlink" depends_on.append(jid_sl) redis_connect = ckan_redis_connect() # Add the fast jobs first. if resource.get('mimetype') in DC_MIME_TYPES: jid_format = package_job_id + "format" if not Job.exists(jid_format, connection=redis_connect): toolkit.enqueue_job(jobs.set_format_job, [resource], title="Set mimetype for resource", queue="dcor-short", rq_kwargs={ "timeout": 500, "job_id": jid_format, "depends_on": copy.copy(depends_on) }) jid_dcparams = package_job_id + "dcparms" if not Job.exists(jid_dcparams, connection=redis_connect): toolkit.enqueue_job(jobs.set_dc_config_job, [resource], title="Set DC parameters for resource", queue="dcor-short", rq_kwargs={ "timeout": 500, "job_id": jid_dcparams, "depends_on": copy.copy(depends_on) }) # The SHA256 job comes last. jid_sha256 = package_job_id + "sha256" if not Job.exists(jid_sha256, connection=redis_connect): toolkit.enqueue_job(jobs.set_sha256_job, [resource], title="Set SHA256 hash for resource", queue="dcor-normal", rq_kwargs={ "timeout": 3600, "job_id": jid_sha256, "depends_on": copy.copy(depends_on) })
def status(self): if Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) job.refresh() return job.status else: return "unknown"
def get_job_state(job_key): response = { "message": JobMessage.NOT_FOUND, "job_state": JobState.NOT_FOUND, "content": {} } if not Job.exists(job_key, conn): return response job = Job.fetch(job_key, connection=conn) response["content"].update(args=job.args) response["content"].update(kwargs=job.kwargs) if job.is_finished: response["message"] = JobMessage.FINISHED response["job_state"] = JobState.FINISHED response["content"].update(result=job.result) elif job.is_failed: response["message"] = JobMessage.FAILED response["job_state"] = JobState.FAILED response["traceback"].update(traceback=job.exc_info) elif job.is_queued: response["message"] = JobMessage.WAITING response["job_state"] = JobState.WAITING else: response["message"] = JobMessage.RUNNING response["job_state"] = JobState.RUNNING return response
def test_clear_queue(self): """Test that the queue clear actually clears the queue.""" queue = get_queue("django_rq_test") queue_index = get_queue_index("django_rq_test") job = queue.enqueue(access_self) self.client.post(reverse("rq_clear", args=[queue_index]), {"post": "yes"}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids)
def message(self): if self.task_result is not None and self.task_result != "": return self.task_result elif Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) return job.meta.get("message", None) else: return "unknown"
def status(self): if self.task_result is not None and self.task_result != "": return self.task_result elif Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) return job.status else: return "unknown"
def last_progress_update(self): if self.task_result is not None and self.task_result != "": return "run complete" elif Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) return job.meta.get("updated", None) else: return "unknown"
def progress(self): if self.task_result is not None and self.task_result != "": return 100 elif Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) return job.meta.get("progress", 0) else: return "unknown"
def test_clear_queue(self): """Test that the queue clear actually clears the queue.""" queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) self.client.post(reverse('rq_clear', args=[queue_index]), {'post': 'yes'}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids)
def cancel_all(queue_name): queue = Queue(queue_name) count = 0 for job_id in queue.get_job_ids(): if Job.exists(job_id, queue.connection): cancel_job(job_id) count += 1 return dict(status='OK', count=count)
def get_task(task_id): if Job.exists(task_id, connection=connection): job = Job.fetch(task_id, connection=connection) if job.get_status() == "finished": return job.result else: return job.get_status() else: return "No such task."
def fetch(self): if self.job_id: job_id = str(self.job_id) if self._enqueued_job: self._enqueued_job.refresh() else: connection = get_connection(self.queue) if RqJob.exists(job_id, connection=connection): self._enqueued_job = RqJob.fetch( job_id, connection=connection) return self._enqueued_job
def test_delete_job(self): """ In addition to deleting job from Redis, the job id also needs to be deleted from Queue. """ queue = get_queue('django_rq_test') job = queue.enqueue(access_self) self.client.post(reverse('rq_delete_job', args=[queue.connection_name, queue.name, job.id]), {'post': 'yes'}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids)
def test_delete_job(self): """ In addition to deleting job from Redis, the job id also needs to be deleted from Queue. """ queue = get_queue("django_rq_test") queue_index = get_queue_index("django_rq_test") job = queue.enqueue(access_self) self.client.post(reverse("rq_delete_job", args=[queue_index, job.id]), {"post": "yes"}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids)
def after_create(self, context, resource): """Generate condensed dataset""" if resource.get('mimetype') in DC_MIME_TYPES: pkg_job_id = f"{resource['package_id']}_{resource['position']}_" jid_condense = pkg_job_id + "condense" if not Job.exists(jid_condense, connection=ckan_redis_connect()): toolkit.enqueue_job(generate_condensed_resource_job, [resource], title="Create condensed dataset", queue="dcor-long", rq_kwargs={"timeout": 3600, "job_id": jid_condense})
def test_delete_job(self): """ In addition to deleting job from Redis, the job id also needs to be deleted from Queue. """ queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') job = queue.enqueue(access_self) self.client.post(reverse('rq_delete_job', args=[queue_index, job.id]), {'post': 'yes'}) self.assertFalse(Job.exists(job.id, connection=queue.connection)) self.assertNotIn(job.id, queue.job_ids)
def compute(self): """ Add any metadata to this object from the model run output """ try: self.set_trackline() except: app.logger.warning("Could not process trackline results. URL may be invalid?") if Job.exists(self.task_id, connection=redis_connection): job = Job.fetch(self.task_id, connection=redis_connection) self.task_result = unicode(job.meta.get("outcome", "")) self.save()
def test_action_delete_jobs(self): queue = get_queue('django_rq_test') queue_index = get_queue_index('django_rq_test') # enqueue some jobs job_ids = [] for _ in range(0, 3): job = queue.enqueue(access_self) job_ids.append(job.id) # remove those jobs using view self.client.post(reverse('rq_actions', args=[queue_index]), {'action': 'delete', 'job_ids': job_ids}) # check if jobs are removed for job_id in job_ids: self.assertFalse(Job.exists(job_id, connection=queue.connection)) self.assertNotIn(job_id, queue.job_ids)
def after_create(self, context, resource): """Generate preview data""" if resource.get('mimetype') in DC_MIME_TYPES: pkg_job_id = f"{resource['package_id']}_{resource['position']}_" depends_on = [] extensions = [config.get("ckan.plugins")] # Are we waiting for symlinking (ckanext-dcor_depot)? # (This makes wait_for_resource really fast ;) if "dcor_depot" in extensions: # Wait for the resource to be moved to the depot. jid_sl = pkg_job_id + "symlink" depends_on.append(jid_sl) jid_preview = pkg_job_id + "preview" if not Job.exists(jid_preview, connection=ckan_redis_connect()): toolkit.enqueue_job(create_preview_job, [resource], title="Create resource preview image", queue="dcor-normal", rq_kwargs={ "timeout": 3600, "job_id": jid_preview, "depends_on": copy.copy(depends_on)})