def get_async_response(job_uuid, request, xform, count=0): """ Returns the status of an async task for the given job_uuid. """ def _get_response(): export = get_object_or_404(Export, task_id=job_uuid) return export_async_export_response(request, export) try: job = AsyncResult(job_uuid) if job.state == 'SUCCESS': resp = _get_response() else: resp = async_status(celery_state_to_status(job.state)) # append task result to the response if job.result: result = job.result if isinstance(result, dict): resp.update(result) else: resp.update({'progress': str(result)}) except (OperationalError, ConnectionError) as e: report_exception("Connection Error", e, sys.exc_info()) if count > 0: raise ServiceUnavailable return get_async_response(job_uuid, request, xform, count + 1) except BacklogLimitExceeded: # most likely still processing resp = async_status(celery_state_to_status('PENDING')) return resp
def get_async_csv_submission_status(job_uuid): """ Gets CSV Submision progress or result Can be used to pol long running submissions :param str job_uuid: The submission job uuid returned by _submit_csv.delay :return: Dict with import progress info (insertions & total) :rtype: Dict """ if not job_uuid: return async_status(FAILED, u'Empty job uuid') job = AsyncResult(job_uuid) try: result = (job.result or job.state) if isinstance(result, (Exception)): return async_status(celery_state_to_status(job.state), job.result.message) if isinstance(result, (str, unicode)): return async_status(celery_state_to_status(job.state)) except BacklogLimitExceeded: return async_status(celery_state_to_status('PENDING')) return result
def get_async_csv_submission_status(job_uuid): """ Gets CSV Submision progress or result Can be used to pol long running submissions :param str job_uuid: The submission job uuid returned by _submit_csv.delay :return: Dict with import progress info (insertions & total) :rtype: Dict """ if not job_uuid: return async_status(FAILED, u'Empty job uuid') job = AsyncResult(job_uuid) try: # result = (job.result or job.state) if job.state not in ['SUCCESS', 'FAILURE']: response = async_status(celery_state_to_status(job.state)) response.update(job.info) return response if job.state == 'FAILURE': return async_status(celery_state_to_status(job.state), text(job.result)) except BacklogLimitExceeded: return async_status(celery_state_to_status('PENDING')) return job.get()
def get_async_response(job_uuid, request, xform, count=0): try: job = AsyncResult(job_uuid) if job.state == 'SUCCESS': export_id = job.result export = get_object_or_404(Export, id=export_id) resp = _export_async_export_response(request, xform, export) else: resp = async_status(celery_state_to_status(job.state)) # append task result to the response if job.result: resp.update(job.result) if isinstance(job.result, dict) else \ resp.update({'progress': str(job.result)}) except ConnectionError as e: if count > 0: raise ServiceUnavailable(unicode(e)) return get_async_response(job_uuid, request, xform, count + 1) except BacklogLimitExceeded: # most likely still processing resp = async_status(celery_state_to_status('PENDING')) return resp
def get_async_response(job_uuid, request, xform, count=0): try: job = AsyncResult(job_uuid) if job.state == 'SUCCESS': export_id = job.result export = Export.objects.get(id=export_id) resp = _export_async_export_response(request, xform, export) else: resp = async_status(celery_state_to_status(job.state)) except ConnectionError, e: if count > 0: raise ServiceUnavailable(unicode(e)) return get_async_response(job_uuid, request, xform, count + 1)
def test_celery_state_to_status(self): self.assertEqual(async_status.PENDING, async_status.celery_state_to_status(states.PENDING)) self.assertEqual(async_status.STARTED, async_status.celery_state_to_status(states.STARTED)) self.assertEqual(async_status.RETRY, async_status.celery_state_to_status(states.RETRY)) self.assertEqual(async_status.FAILED, async_status.celery_state_to_status(states.FAILURE)) self.assertEqual(async_status.SUCCESSFUL, async_status.celery_state_to_status(states.SUCCESS)) self.assertEqual(async_status.FAILED, async_status.celery_state_to_status('123456'))
def test_celery_state_to_status(self): self.assertEqual(async_status.PENDING, async_status.celery_state_to_status(states.PENDING)) self.assertEqual(async_status.STARTED, async_status.celery_state_to_status(states.STARTED)) self.assertEqual(async_status.RETRY, async_status.celery_state_to_status(states.RETRY)) self.assertEqual(async_status.FAILED, async_status.celery_state_to_status(states.FAILURE)) self.assertEqual(async_status.SUCCESSFUL, async_status.celery_state_to_status(states.SUCCESS)) self.assertEqual(async_status.FAILED, async_status.celery_state_to_status('123456'))
resp = _export_async_export_response(request, xform, export) else: resp = async_status(celery_state_to_status(job.state)) # append task result to the response if job.result: resp.update(job.result) if isinstance(job.result, dict) else \ resp.update({'progress': str(job.result)}) except ConnectionError, e: if count > 0: raise ServiceUnavailable(unicode(e)) return get_async_response(job_uuid, request, xform, count + 1) except BacklogLimitExceeded: # most likely still processing resp = async_status(celery_state_to_status('PENDING')) return resp def response_for_format(data, format=None): if format == 'xml': formatted_data = data.xml elif format == 'xls': if not data.xls: raise Http404() formatted_data = data.xls else: formatted_data = json.loads(data.json) return Response(formatted_data)