def recivekeyTask(msg): try: # list = decrypt(keyring.get_password("DRMDEMO", 'private'), a2b_hex(msg['keyfile'])).decode().split(',') list = msg['keyfile'].split(',') # print(list) except Exception as e: return e try: one = AsyncResult(id=saveDbOneTask.delay(msg['productId'], list[0]).id) two = AsyncResult(id=saveDbTwoTask.delay(msg['productId'], list[1]).id) three = AsyncResult( id=saveDbThreeTask.delay(msg['productId'], list[2]).id) four = AsyncResult( id=saveDbFourTask.delay(msg['productId'], list[3]).id) five = AsyncResult( id=saveDbFiveTask.delay(msg['productId'], list[4]).id) while True: if one.successful() & two.successful() & three.successful( ) & four.successful() & five.successful(): break except Exception as e: return e msg = {'type': 2, 'status': msg['status'], 'data': int(msg['productId'])} json_msg = json.dumps(msg) try: uploadQueue.put(json_msg) except Exception as e: return e return 'allot save db success'
def get_result(self, request): from celery.result import AsyncResult res = AsyncResult(request.GET.get('async_id')) if 'ajax' in self.request.GET: data = { 'async_id': res.id, 'ready': res.ready() } if res.ready(): if res.successful(): smes = self.get_success_message(res.info) if smes: messages.success(self.request, smes) # TODO: Do not store message if the ajax client stats that it will not redirect # but handle the mssage itself data.update({ 'redirect': self.get_success_url(res.info), 'message': self.get_success_message(res.info) }) else: messages.error(self.request, self.get_error_message(res.info)) # TODO: Do not store message if the ajax client stats that it will not redirect # but handle the mssage itself data.update({ 'redirect': self.get_error_url(), 'message': self.get_error_message(res.info) }) return JsonResponse(data) else: if res.ready(): if res.successful(): return self.success(res.info) else: return self.error(res.info) return render(request, 'pretixpresale/waiting.html')
def get_result(self, request): from celery.result import AsyncResult res = AsyncResult(request.GET.get('async_id')) if 'ajax' in self.request.GET: data = {'async_id': res.id, 'ready': res.ready()} if res.ready(): if res.successful(): smes = self.get_success_message(res.info) if smes: messages.success(self.request, smes) # TODO: Do not store message if the ajax client stats that it will not redirect # but handle the mssage itself data.update({ 'redirect': self.get_success_url(res.info), 'message': self.get_success_message(res.info) }) else: messages.error(self.request, self.get_error_message(res.info)) # TODO: Do not store message if the ajax client stats that it will not redirect # but handle the mssage itself data.update({ 'redirect': self.get_error_url(), 'message': self.get_error_message(res.info) }) return JsonResponse(data) else: if res.ready(): if res.successful(): return self.success(res.info) else: return self.error(res.info) return render(request, 'pretixpresale/waiting.html')
def getTasksInfo(tasks, forgetIfFinished=True): if tasks is None: return None, False, None if isinstance(tasks, str): tasks = json.loads(tasks) errors = [] for t in range(len(tasks)): result = AsyncResult(tasks[t]['id']) if result.ready(): tasks[t]['successful'] = result.successful() if tasks[t]['successful']: tasks[t]['info'] = None else: try: error = str(result.get()) errors.append(error) except Exception as e: error = str(e) errors.append(error) tasks[t]['info'] = {} tasks[t]['info']['message'] = error if forgetIfFinished: result.forget() elif result.info is not None: tasks[t]['info'] = result.info if result.status is not None: tasks[t]['status'] = result.status if 'children' in tasks[t]: numDone = 0 for key in tasks[t]['children']: cResult = AsyncResult(tasks[t]['children'][key]['id']) if cResult.ready(): numDone += 1 tasks[t]['children'][key][ 'successful'] = cResult.successful() if tasks[t]['children'][key]['successful']: tasks[t]['children'][key]['info'] = None else: try: error = str(cResult.get()) errors.append(error) except Exception as e: error = str(e) errors.append(error) tasks[t]['children'][key]['info'] = {} tasks[t]['children'][key]['info'][ 'message'] = error if forgetIfFinished: cResult.forget() elif cResult.info is not None: tasks[t]['children'][key]['info'] = cResult.info if cResult.status is not None: tasks[t]['children'][key]['status'] = cResult.status tasks[t]['num_done'] = numDone lastResult = AsyncResult(tasks[-1]['id']) hasFinished = lastResult.ready() return tasks, hasFinished, errors
def test_successful(self): ok_res = AsyncResult(self.task1["id"]) nok_res = AsyncResult(self.task3["id"]) nok_res2 = AsyncResult(self.task4["id"]) self.assertTrue(ok_res.successful()) self.assertFalse(nok_res.successful()) self.assertFalse(nok_res2.successful())
def test_successful(self): ok_res = AsyncResult(self.task1["id"]) nok_res = AsyncResult(self.task3["id"]) nok_res2 = AsyncResult(self.task4["id"]) self.assertTrue(ok_res.successful()) self.assertFalse(nok_res.successful()) self.assertFalse(nok_res2.successful()) pending_res = AsyncResult(gen_unique_id()) self.assertFalse(pending_res.successful())
def test_successful(self): ok_res = AsyncResult(self.task1['id']) nok_res = AsyncResult(self.task3['id']) nok_res2 = AsyncResult(self.task4['id']) self.assertTrue(ok_res.successful()) self.assertFalse(nok_res.successful()) self.assertFalse(nok_res2.successful()) pending_res = AsyncResult(uuid()) self.assertFalse(pending_res.successful())
def get(self, request, *args, **kwargs): task_id = self.kwargs.get('task_id', None) res = AsyncResult(task_id) if task_id else None status = res and res.ready() if status: return JsonResponse( { 'task_ready': status, 'task_successful': res.successful(), 'task_result': res.result if res.successful() else None } ) return JsonResponse({'task_ready': status})
def list(self, request, *args, **kwargs): uid = request.query_params.get('uid', False) if not uid: return Response(status=status.HTTP_400_BAD_REQUEST) res = AsyncResult(uid, app=app) if res.successful(): return Response({ "ready": res.successful(), "schedule_id": res.get() }) return Response({"ready": res.successful(), "schedule_id": None})
def query_results(owner, app_name, job_id): cluster_type = get_cluster_type(owner, app_name) if cluster_type == "single-core": async_result = AsyncResult(job_id) print("celery result", async_result.state) if async_result.ready() and async_result.successful(): return "YES" elif async_result.failed(): return "FAIL" else: return "NO" elif cluster_type == "dask": addr = dask_scheduler_address(owner, app_name) with Client(addr) as client: fut = Future(job_id, client=client) print("dask result", fut.status) if fut.done() and fut.status != "error": return "YES" elif fut.done() and fut.status in ("error", "cancelled"): return "FAIL" else: return "NO" else: return json.dumps({"error": "model does not exist."}), 404
def get(self, request, *args, **kwargs): task_id = self.kwargs['task_id'] task_result = AsyncResult(task_id) data = { 'task_id': task_id, 'executed': False, 'successful': False, 'download_link': '' } if not task_result.ready(): return Response(data, status=status.HTTP_200_OK) if not task_result.successful(): return Response({'details': 'Internal server error.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) task_result = task_result.result if isinstance(task_result, TaskError): return Response(task_result.data, task_result.status) data['executed'] = True data['successful'] = True download_link = reverse('download', kwargs={ 'youtube_id': task_result['youtube_id'], 'audio_format': task_result['audio_format'], }) data['download_link'] = download_link return Response(data, status=status.HTTP_200_OK)
async def get_result_task(id): """ ### Запросить статус по задаче по внутреннему идентификатору """ task = AsyncResult(id=id, app=celery_app) if task.successful(): return {'res':task.get()}
def results(owner, app_name, job_id): cluster_type = get_cluster_type(owner, app_name) if cluster_type == "single-core": async_result = AsyncResult(job_id) if async_result.ready() and async_result.successful(): return json.dumps(async_result.result) elif async_result.failed(): print("traceback", async_result.traceback) return json.dumps({ "status": "WORKER_FAILURE", "traceback": async_result.traceback }) else: return make_response("not ready", 202) elif cluster_type == "dask": addr = dask_scheduler_address(owner, app_name) with Client(addr) as client: fut = Future(job_id, client=client) if fut.done() and fut.status != "error": return fut.result() elif fut.done() and fut.status in ("error", "cancelled"): return json.dumps({ "status": "WORKER_FAILURE", "traceback": fut.traceback() }) else: return make_response("not ready", 202) else: return json.dumps({"error": "model does not exist."}), 404
def get(self, request, task_id): cae = None if task_id is None: result = {"status": "error", "message": "Task id not provided"} else: res = AsyncResult(task_id) if res.ready(): cae = CustomActionExecution.objects.get(task_id=task_id) cae.status = "task finished" runtime_delta = datetime.now() - cae.created cae.runtime = runtime_delta.seconds status = res.status if res.failed(): result = {"status": "error", "message": "Task failed"} cae.status = "task failed" elif res.successful(): cae.status = "task succeeded" task_result = res.result cae.task_result = json.dumps(task_result) download_link = self._get_download_link(task_id) result = { "status": "completed", "download_link": download_link } else: result = {"status": "error", "message": status} else: result = {"status": "waiting"} if cae: cae.save() return Response(result)
def list(self, request): task_id = request.GET.get('task') username = task_id[37:] user = self.request.user if not user.is_staff and user.username != username: return Response(status=status.HTTP_403_FORBIDDEN) task = AsyncResult(task_id) result_format = request.GET.get('result') if task.successful(): result = task.get() if result_format == 'json': response = HttpResponse(result.json, content_type="application/json") response['Content-Encoding'] = 'gzip' return response elif result_format == 'report': return HttpResponse(result.report) else: return HttpResponse(result.detailed_summary) elif task.failed(): return Response({'exception': str(task.result)}, status=status.HTTP_400_BAD_REQUEST) else: return Response({'task': task.id, 'state': task.state})
def get(self, request, task_id): result_object = AsyncResult(task_id) response = {'status': result_object.state} if result_object.successful(): response['output'] = result_object.result return JsonResponse(response)
def check_status(task_id): task = AsyncResult(task_id, app=tasks.celery) if task.ready(): if task.successful(): # Task finished successfully status, result_value = task.result if status: # Valid result app.logger.info("result: %s", pprint.pformat(result_value)) return flask.jsonify(result_value) else: # Handled exception response = flask.make_response(result_value, 400) response.mimetype = 'text/plain' return response else: # Unhandled exception exc = task.result response = flask.make_response( traceback.format_exception_only(type(exc), exc), 500) response.mimetype = 'text/plain' return response else: # if task.ready() status = { 'result_id': task.id, 'state': 'PENDING', 'progress': 0, } if task.state == 'PROGRESS': status.update(task.result) response = flask.jsonify(status) response.status_code = 202 return response
def status(request, task_id): """ Returns status of a queued task. """ res={} task_log=TaskAuthentication.objects.get(task_id=task_id) if task_log.user_id==get_user_id(request): result=AsyncResult(task_id) res['status']=TASK_NEW #no such tasks in queue if result: if(result.ready()): if(result.successful()): result=result.get() if isinstance(result,dict): res['status']=TASK_FAILED res['error_message']=result['error_message'] else: res['status']=TASK_SUCCESSFUL #succcessfull else: res['status']=TASK_RUNNING #waiting in queue else: logging.debug("Unauthorized User") res['status']=TASK_UNKNOWN response = json.dumps(res) return HttpResponse(response, mimetype="application/json")
def update_tasks(): # Get all dump tasks in progress: status = 1 qs = DumpTask.objects.filter(status=1) for dumpTask in qs: try: celeryTask = AsyncResult(dumpTask.id) # print('celeryTask', celeryTask.status) # If celery task is accomplished, update according data if celeryTask.successful(): result = celeryTask.result dumpTask.status = 2 # celery task is ready dumpTask.file_name = result["file"] dumpTask.file_size = size(os.path.getsize( storagePath + result["file"])) dumpTask.elapsed_time = result["elapsedTime"] dumpTask.save() except: pass dumpTask.delete() # Get all ready tasks, delete task if dump file does not exists qs = DumpTask.objects.filter(status=2) for dumpTask in qs: if not os.path.exists(storagePath + dumpTask.file_name): dumpTask.delete()
def transcode(request): url = request.GET.get('url', False) if not url or not is_oga_url(url): return HttpResponse('Not a valid url', status=400) file_hash = request.GET.get('hash', False) mimetype = request.GET.get('mimetype', False) subname = request.GET.get('subname', False) job = AsyncResult(url) if job.successful() and job.result.successful(): found, file_descr, asset = get_asset(job, file_hash, subname, mimetype) if found: task_id = url+file_hash+mimetype+subname+'preview' job = schedule(task_id, lambda: tasks.generate_preview((file_descr, asset), {'path': '/tmp/transcoded'}, task_id=task_id), request) if job.state == 'SUCCESS' and job.result: data = job.result elif job.state == 'FAILURE' and job.result: data = str(job.result) else: data = job.state data = {'state': job.state, 'result': data} return HttpResponse(json.dumps(data, indent=4), content_type="application/json") else: data = {'Error': 'Combination of hash, mimetype, subname not found!'} if settings.DEBUG: from thrift.protocol.TJSONProtocol import TSimpleJSONProtocol from damn_at.serialization import SerializeThriftMsg data['assets'] = [json.loads(SerializeThriftMsg(ass, TSimpleJSONProtocol)) for ass in asset] return HttpResponse(json.dumps(data, indent=4), content_type="application/json", status=400) else: return HttpResponse('wait for it!', status=400)
def download(request): url = request.GET.get('url', False) if not url or not is_oga_url(url): return HttpResponse('Not a valid url', status=400) file_hash = request.GET.get('hash', False) job = AsyncResult(url) if job.successful() and job.result.successful(): descriptions = [] found = False for result in job.result: descriptions.extend(result.get().values()) for description in descriptions: for asset in description.assets: if description.file.hash == file_hash: found = description break if found: print 'streaming', found.file.filename filename = found.file.filename wrapper = FileWrapper(file(filename)) response = HttpResponse(wrapper, content_type='application/octet-stream') response['Content-Length'] = os.path.getsize(filename) response['Content-Disposition'] = 'attachment; filename='+os.path.basename(filename) return response else: data = {'Error': 'Hash not found!'} if settings.DEBUG: from thrift.protocol.TJSONProtocol import TSimpleJSONProtocol from damn_at.serialization import SerializeThriftMsg data['assets'] = [json.loads(SerializeThriftMsg(ass, TSimpleJSONProtocol)) for ass in asset] return HttpResponse(json.dumps(data, indent=4), content_type="application/json", status=400) else: return HttpResponse('wait for it!', status=400)
def cost_graph(request, id): res = AsyncResult(id) get_object_or_404(AssignResult, task_id=res.id, tournament=request.user.profile.tournament) if res.successful(): cost_obj = res.result[1] costs = [] best_costs = [] total = 0 dissect = {} try: costs = cost_obj['cost_graph']['total'] best_costs = cost_obj['best_cost_graph'] total = len(costs) dissect = cost_obj['cost_graph'] del dissect['total'] except: pass return render(request, "jury/cost_graph.html", context={ "costs": costs, "best_costs": best_costs, "total": total, "dissect": dissect })
def get(self, request, *args, **kwargs): result = AsyncResult(kwargs['taskid']) log_html = u'' if result.ready(): if result.successful(): rows, failed, remaining = result.result log_html = [] has_error = False for i, row in enumerate(rows): if i: has_error = has_error or not not row[-1] row_tmpl = u'<tr><td>%s</td></tr>' col_join = u'</td><td>' else: row_tmpl = u'<tr><th>%s</th></tr>' col_join = u'</th><th>' log_html.append(row_tmpl % col_join.join(escape(x) for x in row)) log_html = u'<table class="table">%s</table>' % u''.join(log_html) if has_error: log_html = u'<div class="alert alert-danger" role="alert">At least one row was not transferred. Please see log below for details.</div>' + log_html else: log_html = u'<div class="alert alert-success" role="alert">All rows successfully added.</div>' + log_html else: log_html = u'<div class="alert alert-danger" role="alert">%s</div>' % escape(unicode(result.result)) context_dict = { 'completed': result.ready(), 'log_html': log_html, } return self.render_json_response(context_dict)
def status(request, task_id): """ Returns status of a queued task. """ res = {} task_log = TaskAuthentication.objects.get(task_id=task_id) if task_log.user_id == get_user_id(request): result = AsyncResult(task_id) res['status'] = TASK_NEW #no such tasks in queue if result: if (result.ready()): if (result.successful()): result = result.get() if isinstance(result, dict): res['status'] = TASK_FAILED res['error_message'] = result['error_message'] else: res['status'] = TASK_SUCCESSFUL #succcessfull else: res['status'] = TASK_RUNNING #waiting in queue else: logging.debug("Unauthorized User") res['status'] = TASK_UNKNOWN response = json.dumps(res) return HttpResponse(response, mimetype="application/json")
def get(self, request): trn = request.user.profile.tournament form = AssignForm() results = [] for result in AssignResult.objects.filter( tournament=trn).order_by("-created"): res = AsyncResult(result.task_id) p = None cost = None if res.state == "PROGRESS": p = 100 * res.info["current"] / res.info["total"] if res.successful(): if len(res.result[0]) > 0: cost = res.result[1] if type(cost) == dict: cost = cost['best_cost'] else: cost = "errors" results.append({ "task": result, "state": res.state, "progress": p, "cost": cost }) return render(request, "jury/assignments.html", context={ "assignments": results, "form": form })
def initialize_in_judge(self): lock = cache.lock("testcase_{}_{}_{}_initialize_in_judge".format( self.problem.problem.pk, self.problem.pk, self.pk), timeout=60) if lock.acquire(blocking=False): try: refreshed_obj = type(self).objects.with_transaction(self._transaction).get(pk=self.pk) if refreshed_obj.judge_initialization_successful: return if self.judge_initialization_task_id: result = AsyncResult(self.judge_initialization_task_id) if result.failed() or result.successful(): self.judge_initialization_task_id = None self.save() elif result.state == "PENDING": result.revoke() self.judge_initialization_task_id = None self.save() else: logger.debug("Waiting for task {} in state {}".format( self.judge_initialization_task_id, result.state )) if not self.judge_initialization_task_id: self.judge_initialization_task_id = TestCaseJudgeInitialization().delay(self).id self.save() finally: lock.release()
def check_tasks(request): """ Checking tasks status """ try: tasks = json.loads(request.data['tasks']) except: return Response({}, status=status.HTTP_400_BAD_REQUEST) failed_tasks, success_tasks, ready_tasks = [], [], [] for task in tasks: task_result = AsyncResult(task) if task_result.failed(): failed_tasks.append({ 'task': task, 'message': task_result.info.args[0] }) if task_result.successful(): success_tasks.append(task) if task_result.ready(): ready_tasks.append(task) if len(tasks) == len(success_tasks): return Response({'code': 'SUCCESS'}) # all task success if len(tasks) != len(ready_tasks): return Response({'code': 'WAIT'}) return Response({'code': 'FAILED', 'tasks': failed_tasks})
def taskReady(jobObj, redirect="error"): """Checks if celery task is ready. Args: celeryID: the id of a celery task. redirect: page to redirect to on error. Returns: True,None: celery task finished successfully. False, HttpResponseRedirect: celerytask failed. False,False: celery task is still processing. """ task = AsyncResult(jobObj.celeryUID) if task.ready(): if task.successful(): return True, None else: return False, HttpResponseRedirect(reverse(redirect)) # In the case the task isn't ready, we don't really have a clear idea why. # It is possible the task has left the celery task table, and there for is marked as Pending, # but it is really done. We store an alternate flag in our own tables to manage this. if jobObj.state == symTyperTask.DONE: return True, None elif jobObj.state == symTyperTask.ERROR : return False, HttpResponseRedirect(reverse(redirect)) else: return False, None
def _get_results(result: AsyncResult, return_keys_only=True, merge_children_results=False) -> dict: results = {} if not result: return results try: if result.successful(): _results = copy.deepcopy(result.result) if isinstance( result.result, dict) else result.result if _results: if return_keys_only: results = get_task_results(_results) else: # Delete the RETURN_KEYS_KEY _results.pop(RETURN_KEYS_KEY, None) results = _results if merge_children_results: children = result.children if children: for child in children: child_results = get_results( child, return_keys_only=return_keys_only, merge_children_results=merge_children_results) results.update(child_results) except Exception as e: logger.error(e) logger.error(traceback.format_exc()) return results
class Progress(object): def __init__(self, task_id): self.task_id = task_id self.result = AsyncResult(task_id) def get_info(self): if self.result.ready(): success = self.result.successful() with allow_join_result(): return { 'complete': True, 'success': success, 'progress': _get_completed_progress(), 'result': self.result.get(self.task_id) if success else str(self.result.info), } elif self.result.state == PROGRESS_STATE: return { 'complete': False, 'success': None, 'progress': self.result.info, } elif self.result.state in ['PENDING', 'STARTED']: return { 'complete': False, 'success': None, 'progress': _get_unknown_progress(), } return self.result.info
def _get_async_result(self, node_name, node_id): # pylint: disable=invalid-name,redefined-builtin cache = Config.async_result_cache[self._flow_name] trace_msg = { 'flow_name': self._flow_name, 'node_args': self._node_args, 'parent': self._parent, 'dispatcher_id': self._dispatcher_id, 'queue': Config.dispatcher_queues[self._flow_name], 'node_id': node_id, 'node_name': node_name, 'selective': self._selective } with self._node_state_cache_lock.get_lock(self._flow_name): try: Trace.log(Trace.NODE_STATE_CACHE_GET, trace_msg) res = cache.get(node_id) Trace.log(Trace.NODE_STATE_CACHE_HIT, trace_msg) except CacheMissError: Trace.log(Trace.NODE_STATE_CACHE_MISS, trace_msg) res = AsyncResult(id=node_id) # we can cache only results of tasks that have finished or failed, not the ones that are going to # be processed if res.successful() or res.failed(): Trace.log(Trace.NODE_STATE_CACHE_ADD, trace_msg) cache.add(node_id, res) return res
class Progress(object): def __init__(self, task_id): self.task_id = task_id self.result = AsyncResult(task_id) def get_info(self): print(self.result.info) if self.result.ready(): return { 'complete': True, 'success': self.result.successful(), 'progress': _get_completed_progress(), 'state': self.result.state } elif self.result.state == PROGRESS_STATE: return { 'complete': False, 'success': None, 'progress': self.result.info, 'state': self.result.state } elif self.result.state in ['PENDING', 'STARTED']: print(' in pending') return { 'complete': False, 'success': None, 'progress': _get_unknown_progress(), 'state': self.result.state } return self.result.info
def export_status(self, taskid): "export status" result = AsyncResult(taskid) if result is None or taskid not in session['taskids']: msg = _('The task status requested has expired or does not exist') flash(msg) log.info(msg) redirect(url(controller='accounts', action='index')) if result.ready(): finished = True flash.pop_messages() if isinstance(result.result, Exception): msg = _('Error occured in processing %s') % result.result if c.user.is_superadmin: flash_alert(msg) log.info(msg) else: flash_alert(_('Backend error occured during processing.')) log.info(msg) redirect(url(controller='accounts', action='index')) results = dict( f=True if not result.result['global_error'] else False, id=taskid, global_error=result.result['global_error']) audit_log(c.user.username, 5, unicode(auditmsgs.ACCOUNTEXPORT_MSG), request.host, request.remote_addr, arrow.utcnow().datetime) else: try: session['acexport-count'] += 1 except KeyError: session['acexport-count'] = 1 session.save() if (session['acexport-count'] >= 10 and result.state in ['PENDING', 'RETRY', 'FAILURE']): result.revoke() del session['acexport-count'] session.save() msg = _('The export could not be processed, try again later') flash_alert(msg) log.info(msg) redirect(url(controller='accounts', action='index')) finished = False results = dict(f=None, global_error=None) c.finished = finished c.results = results c.success = result.successful() dwn = request.GET.get('d', None) if finished and (dwn and dwn == 'y'): response.content_type = 'text/csv' response.headers['Cache-Control'] = 'max-age=0' csvdata = result.result['f'] disposition = 'attachment; filename=accounts-export-%s.csv' % \ taskid response.headers['Content-Disposition'] = str(disposition) response.headers['Content-Length'] = len(csvdata) return csvdata return self.render('/accounts/exportstatus.html')
def post_ajax(self, request, *args, **kwargs): url = self.parse_url(request.POST.get('url', '').strip()) client_ip = get_client_ip(request) client_convert_count = ActivityLog.objects\ .get_current_day_convert_count_by_ip(client_ip) daily_limit = settings.DAILY_CONVERT_LIMIT limit_reached = client_convert_count >= daily_limit if url and not limit_reached: task = tasks.convert.delay(url, client_ip) result = AsyncResult(task.id) # TODO: We're tying up resources here as we're waiting for the task # to finish. Remove this later and have the AJAX request retry # until result.ready(). result.wait() data = { 'task_id': task.id, 'is_ready': False, } if result.successful(): if result.result: youtube_id = result.result['youtube_id'] filename = result.result['filename'] download_link = reverse( 'download_view', kwargs={'youtube_id': youtube_id, 'filename': filename} ) data['message'] = 'Conversion successful!' data['is_ready'] = True data['youtube_id'] = youtube_id data['title'] = result.result['title'] data['filename'] = filename data['download_link'] = download_link return self.render_json_response(data, status=200) data['message'] = 'Could not convert the video. Please make ' \ 'sure the URL you entered is correct and ' \ 'the video is no more than {} minutes long.'\ .format(settings.MAX_DURATION_SECONDS / 60) return self.render_json_response(data, status=200) data['message'] = 'Something went wrong :(' return self.render_json_response(data, status=500) if limit_reached: logger.warn('Client reached convert limit: %s', client_ip) message = "Sorry, but you've reached your daily convert limit " \ "of {}. Please try again tomorrow.".format(daily_limit) return self.render_json_response({'message': message}, status=200) return self.render_json_response({'message': 'Please provide a URL.'}, status=200)
def post_ajax(self, request, *args, **kwargs): url = self.parse_url(request.POST.get('url', '').strip()) client_ip = get_client_ip(request) client_convert_count = ActivityLog.objects\ .get_current_day_convert_count_by_ip(client_ip) daily_limit = settings.DAILY_CONVERT_LIMIT limit_reached = client_convert_count >= daily_limit if url and not limit_reached: task = tasks.convert.delay(url, client_ip) result = AsyncResult(task.id) # TODO: We're tying up resources here as we're waiting for the task # to finish. Remove this later and have the AJAX request retry # until result.ready(). result.wait() data = { 'task_id': task.id, 'is_ready': False, } if result.successful(): if result.result: youtube_id = result.result['youtube_id'] filename = result.result['filename'] download_link = reverse('download_view', kwargs={ 'youtube_id': youtube_id, 'filename': filename }) data['message'] = 'Conversion successful!' data['is_ready'] = True data['youtube_id'] = youtube_id data['title'] = result.result['title'] data['filename'] = filename data['download_link'] = download_link return self.render_json_response(data, status=200) data['message'] = 'Could not convert the video. Please make ' \ 'sure the URL you entered is correct and ' \ 'the video is no more than {} minutes long.'\ .format(settings.MAX_DURATION_SECONDS / 60) return self.render_json_response(data, status=200) data['message'] = 'Something went wrong :(' return self.render_json_response(data, status=500) if limit_reached: logger.warn('Client reached convert limit: %s', client_ip) message = "Sorry, but you've reached your daily convert limit " \ "of {}. Please try again tomorrow.".format(daily_limit) return self.render_json_response({'message': message}, status=200) return self.render_json_response({'message': 'Please provide a URL.'}, status=200)
def is_processing(self): if self._background_process_ids: for process_id in self._background_process_ids.copy(): res = AsyncResult(process_id) if res.successful() or res.failed(): self.remove_processing(process_id) else: return True return bool(self._background_process_ids)
def get_result_by_id(task_id): """Returns result from a queue """ task = AsyncResult(task_id, backend=celery_app.backend) try: return task.result if task.successful() else None except Exception as ex: xprint("EXCEPTION DURING TASK PROCESSING {}".format(ex)) return None
def async_get_task(task_id): res = AsyncResult(task_id) if res.successful(): return jsonify( {'result': { 'status': res.status, 'result': res.result }}) return jsonify({'result': {'status': res.status}})
def get_result(self, request): res = AsyncResult(request.GET.get('async_id')) if 'ajax' in self.request.GET: return JsonResponse(self._return_ajax_result(res, timeout=0.25)) else: if res.ready(): if res.successful() and not isinstance(res.info, Exception): return self.success(res.info) else: return self.error(res.info) return render(request, 'pretixpresale/waiting.html')
def compute_lost_capacity_async(): """Uses Celery to call the compute_all_lost_capacity_percentages task. If the task is already running (according to the cache) but not yet successful, nothing happens and the old task is returned. Otherwise, a new task is started, its task_id is cached and the task is returned. Using the cache for locking isn't ideal, but in my defense, it's also what they recommend in the Cookbook section of the Celery manual. It forces the cache to be shared between all instances of this site and isn't sufficiently persistent. If two threads are both trying to start up the task, it's possible that this thread is blocked but the task isn't known yet. In that case, the function returns None.""" task_id = cache.get(LOCK_KEY) if task_id is not None: if task_id == 'pending': # Another task is starting, we don't have to return None logger.debug("found task_id: %s" % (task_id,)) result = AsyncResult(task_id) logger.debug("task state is %s." % (result.state,)) if not result.ready(): # We're still busy return result elif not result.successful(): logger.critical( "lost capacity task finished with an exception: %s" % (str(result.result),)) cache.delete(LOCK_KEY) else: logger.debug("found no task_id.") # Now, as far as we know, the cache is empty if not cache.add(LOCK_KEY, 'pending'): # If add() returns False, the key already exists -- some other thread # also added it. return task = compute_all_lost_capacity_percentages.delay() cache.set(LOCK_KEY, task.task_id, DURATION) logger.debug("made a new task with id %s." % task.task_id) return task
def query_task_status_block(task_id): from celery.result import AsyncResult res = AsyncResult(task_id) while not res.successful(): if res.state.lower() == 'failure': print "Task {0} FAILED!".format(task_id) break import time time.sleep(0.3) print res.state
def is_processing(self): if self._background_process_ids: for process_id in tuple(self._background_process_ids): res = AsyncResult(process_id) if (res.successful() or res.failed()): self.remove_processing(process_id) else: return True if self._background_process_ids: return True else: return False
def get_result(self, request): from celery.result import AsyncResult res = AsyncResult(request.GET.get('async_id')) if 'ajax' in self.request.GET: return JsonResponse(self._return_celery_result(res, timeout=0.25)) else: if res.ready(): if res.successful(): return self.success(res.info) else: return self.error(res.info) return render(request, 'pretixpresale/waiting.html')
def test_oga(self): import redis rs1 = redis.Redis(host="localhost", db=0) rs1.flushdb() res = tasks.oga('http://opengameart.org/content/fieldstone-fireplace') job_id = res.id print 'STARTING', res.id while True: stats, ready = tasks.collect_stats(job_id) print stats, ready, tasks.has_completed(job_id) if ready: job = AsyncResult(job_id) print 'test_oga1:', job.info, job.state, job.ready(), job.successful(), job.result break print tasks.collect_stats(job_id) if job.successful(): print 'test_oga2:', [x.result for x in job.result] else: print 'test_oga3: FAILED' descriptions = [] for result in job.result: descriptions.extend(result.get().values()) group = tasks.generate_previews_for_filedescriptions(descriptions) group_id = group.id while True: stats, ready = tasks.collect_stats_transcoding(group_id) print stats, ready if ready: break assert True
def email_status(value): org_id = int(value) org = Organization.objects.get(pk=org_id) org_tasks = EmailTasks.objects.filter(organization_id=org_id) for task in org_tasks: res = AsyncResult(task.task_id) if res.successful(): org.inv_status = 'SNT' org.save() break return org.get_inv_status_display()
def update(request): for t_r in TranscodeRequest.objects.filter(celery_task_id=None): t_torrent = get_trans_torrent(t_r.what_torrent) t_torrent.sync_t_torrent() if t_torrent.torrent_done == 1: run_transcode_task(t_r) for t_r in TranscodeRequest.objects.filter(date_completed=None).exclude(celery_task_id=None): result = AsyncResult(t_r.celery_task_id) if result.successful(): t_r.date_completed = timezone.now() t_r.save() return HttpResponse('')
def audit_export_status(self, taskid): "Audit log export status" result = AsyncResult(taskid) if result is None or taskid not in session['taskids']: msg = _('The task status requested has expired or does not exist') flash(msg) log.info(msg) redirect(url('status-audit-logs')) if result.ready(): finished = True flash.pop_messages() if isinstance(result.result, Exception): msg = _('Error occured in processing %s') % result.result if c.user.is_superadmin: flash_alert(msg) log.info(msg) else: flash_alert(_('Backend error occured during processing.')) log.info(msg) redirect(url('status-audit-logs')) else: session['exportauditlog-counter'] += 1 session.save() if (session['exportauditlog-counter'] >= 20 and result.state in ['PENDING', 'RETRY', 'FAILURE']): result.revoke() del session['exportauditlog-counter'] session.save() flash_alert(_('The audit log export failed, try again later')) redirect(url('status-audit-logs')) finished = False c.finished = finished c.results = result.result c.success = result.successful() dwn = request.GET.get('d', None) if finished and (dwn and dwn == 'y'): audit_log(c.user.username, 5, unicode(AUDITLOGEXPORT_MSG), request.host, request.remote_addr, arrow.utcnow().datetime) response.content_type = result.result['content_type'] response.headers['Cache-Control'] = 'max-age=0' respdata = result.result['f'] disposition = 'attachment; filename=%s' % result.result['filename'] response.headers['Content-Disposition'] = str(disposition) response.headers['Content-Length'] = len(respdata) return respdata return self.render('/status/auditexportstatus.html')
def get_image_build(username, image_build_id): try: image_build = ImageBuild.objects.get(id=image_build_id, owner=username) except ImageBuild.DoesNotExist: raise PhantomWebException("Could not find image build %s. Doesn't exist." % image_build_id) ret = {"id": image_build.id, "owner": username, "cloud_name": image_build.cloud_name} if image_build.status == "successful": ret["ready"] = True elif image_build.status == "submitted": result = AsyncResult(image_build.celery_task_id) ready = result.ready() ret["ready"] = ready if ready: if result.successful(): image_build.returncode = result.result["returncode"] if image_build.returncode == 0: image_build.status = "successful" else: image_build.status = "failed" for cloud_name in result.result["artifacts"]: image_build_artifact = ImageBuildArtifact.objects.create( image_build_id=image_build.id, cloud_name=cloud_name, image_name=result.result["artifacts"][cloud_name]) image_build_artifact.save() image_build.full_output = result.result["full_output"] image_build.save() else: image_build.status = "failed" image_build.returncode = -1 image_build.full_output = str(result.result) image_build.save() ret["status"] = image_build.status if image_build.status != "submitted": ret["returncode"] = image_build.returncode ret["full_output"] = image_build.full_output ret["artifacts"] = {} try: artifacts = ImageBuildArtifact.objects.filter(image_build_id=image_build_id) for artifact in artifacts: ret["artifacts"][artifact.cloud_name] = artifact.image_name except ImageBuildArtifact.DoesNotExist: raise PhantomWebException("Could not find image build artifact for image build id %s. Doesn't exist." % image_build_id) return ret
def import_status(self, taskid): "import domains status" result = AsyncResult(taskid) if result is None or taskid not in session['taskids']: msg = _('The task status requested has expired or does not exist') flash(msg) log.info(msg) redirect(url(controller='organizations', action='index')) if result.ready(): finished = True flash.pop_messages() if isinstance(result.result, Exception): msg = _('Error occured in processing %s') % result.result if c.user.is_superadmin: flash_alert(msg) log.info(msg) else: flash_alert(_('Backend error occured during processing.')) log.info(msg) redirect(url(controller='organizations')) update_serial.delay() info = auditmsgs.IMPORTORG_MSG % dict(o='-') audit_log(c.user.username, 3, unicode(info), request.host, request.remote_addr, arrow.utcnow().datetime) else: session['dimport-counter'] += 1 session.save() if (session['dimport-counter'] >= 10 and result.state in ['PENDING', 'RETRY', 'FAILURE']): result.revoke() try: os.unlink(session['dimport-file']) except OSError: pass del session['dimport-file'] del session['dimport-counter'] session.save() flash_alert(_('The import could not be processed,' ' try again later')) redirect(url(controller='organizations')) finished = False c.finished = finished c.results = result.result c.success = result.successful() return self.render('/organizations/importstatus.html')
def export_status(self, taskid): "export status" result = AsyncResult(taskid) if result is None or taskid not in session["taskids"]: flash(_("The task status requested has expired or does not exist")) redirect(url(controller="domains", action="index")) if result.ready(): finished = True flash.pop_messages() if isinstance(result.result, Exception): if c.user.is_superadmin: flash_alert(_("Error occured in processing %s") % result.result) else: flash_alert(_("Backend error occured during processing.")) redirect(url(controller="domains")) results = dict( f=True if not result.result["global_error"] else False, id=taskid, global_error=result.result["global_error"], ) else: session["dexport-count"] += 1 if session["dexport-count"] >= 10 and result.state in ["PENDING", "RETRY", "FAILURE"]: result.revoke() flash_alert(_("The export could not be processed," " try again later")) del session["dexport-count"] session.save() redirect(url(controller="domains")) finished = False results = dict(f=None, global_error=None) c.finished = finished c.results = results c.success = result.successful() d = request.GET.get("d", None) if finished and (d and d == "y"): info = EXPORTDOM_MSG % dict(d="all") audit_log(c.user.username, 5, info, request.host, request.remote_addr, datetime.now()) response.content_type = "text/csv" response.headers["Cache-Control"] = "max-age=0" csvdata = result.result["f"] disposition = "attachment; filename=domains-export-%s.csv" % taskid response.headers["Content-Disposition"] = disposition response.headers["Content-Length"] = len(csvdata) return csvdata return render("/domains/exportstatus.html")
def celery_task_log(request, task_id): task = AsyncResult(task_id) out = dict() if task.ready() and task.successful(): out = {'status':'SUCCESS'} elif task.ready() and task.failed(): out = {'status': 'FAILED', 'output': str(task.result)} elif task.status == "log": if 'log' in task.result: task_log = task.result['log'] else: task_log = [] out = {'status':'log', 'output':task_log} else: out = {'status': task.status} out['taskId'] = task_id; return JsonResponse(out)
def get(self, request, *args, **kwargs): """ Get method for celery task state. """ serializer = self.serializer_class(data=request.DATA) if serializer.is_valid(): task_id = serializer.data['task_id'] c_task = AsyncResult(task_id) user_token = Token.objects.get(key=request.auth) user = UserInfo.objects.get(user_id=user_token.user.user_id) if c_task.ready(): if c_task.successful(): return Response({'success': c_task.result}) return Response({'error': c_task.result["exc_message"]}) else: return Response({'state': c_task.state}) return Response(serializer.errors)
def get(self, task_id): """ Get task result by task-id **Example request**: .. sourcecode:: http GET /tasks/result/9ec42ba0-be59-488f-a445-4a007d83b954/ HTTP/1.1 Accept: application/json Accept-Encoding: gzip, deflate, compress Content-Type: application/json; charset=utf-8 Host: localhost:8888 User-Agent: HTTPie/0.8.0 **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Length: 84 Content-Type: application/json; charset=UTF-8 Etag: "0aef8448588cf040f1daa7a0244c0a7b93abfd71" Server: TornadoServer/3.2 { "result": 3, "state": "SUCCESS", "task-id": "9ec42ba0-be59-488f-a445-4a007d83b954" } :statuscode 200: no error :statuscode 400: invalid request """ result = AsyncResult(task_id, app=self.application.celery_app) response = {"task-id": task_id, "state": result.state} if result.ready(): if result.successful(): response["result"] = result.result else: response["traceback"] = result.traceback response["error"] = result.result self.write(response)
def create_task_result_dict(initial_data): """ Convert initial data we put in session to dict for REST API. This will use the id to look up current data about task to return to user. Args: task (dict): Initial data about task stored in session. Returns: dict: Updated data about task. """ initial_state = initial_data['initial_state'] task_id = initial_data['id'] task_type = initial_data['task_type'] task_info = initial_data['task_info'] state = "processing" result = None # initial_state is a workaround for EagerResult used in testing. # In production initial_state should usually be pending. async_result = AsyncResult(task_id) if initial_state == SUCCESS: state = "success" result = initial_data['result'] elif initial_state in (FAILURE, REVOKED): state = "failure" result = initial_data['result'] elif async_result.successful(): state = "success" result = async_result.get() elif async_result.failed(): state = "failure" result = {'error': str(async_result.result)} return { "id": task_id, "status": state, "result": result, "task_type": task_type, "task_info": task_info }
def get_task(project_slug, task_id): """Get task information. """ job = AsyncResult(task_id, app=celery) if job.successful(): result = job.get() elif job.failed(): try: job.get() except RemoteExecuteError as exception: result = exception.message except URLError as exception: result = 'URLError: ' + str(exception) else: result = None return jsonify(task=dict( id=task_id, status=job.status, retval=result, ))