def task_status(task_id): pretask_id = request.args["parent-0"] # i.e. preprocess response = {} if pretask_id == "uploadpred": response = { 'state': 'SUCCESS', 'current': 1, 'total': 1, 'status': '', 'result': '', 'taskid': task_id } else: # see which task is active: task = celery.AsyncResult(pretask_id) # preprocess if task.state == 'SUCCESS': # if preprocess is finished then check prediction task = celery.AsyncResult(task_id) # prediction if task.state == 'PENDING': response = { 'state': task.state, 'current': 0, 'total': 1, 'status': 'Pending...' } task.forget() else: #task.state == 'PROGRESS' or 'SUCCESS'? # to handle worker die # TODO: detect when worker dies and handle if isinstance(task.info, billiard.exceptions.WorkerLostError): print( "Worker is killed for %s, returning an error message to the user..." % str(task_id)) response = { 'state': 'ERROR', 'error': 'There was an error running the job. The webserver authors have recorded the error and will work to address it. Please re-submit your job. We apologize for the inconvenience!' } else: response = { 'state': task.state, 'current': task.info.get('current', 0), 'total': task.info.get('total', 1), 'status': task.info.get('status', '') } if 'error' in task.info: response['error'] = task.info['error'] if 'result' in task.info: response['result'] = task.info['result'] response['taskid'] = task.info['taskid'] # TODO: need to forget task #task.forget() #??? not sure if needed return jsonify(response)
def task_status_json(task_id): """ JSON API endpoint to view the state of a task :param task_id: :return: """ task = celery.AsyncResult(task_id) if task.state == 'PENDING': response = {'state': task.state, 'status': 'Pending...'} elif task.state != 'FAILURE': response = {'state': task.state, 'status': task.info.get('status', '')} if 'error' in task.info: response['error'] = task.info['error'] if 'result' in task.info: response['result'] = task.info['result'] else: # something went wrong in the background job response = { 'state': task.state, 'status': str(task.info), # this is the exception raised } # update the response with the result of the task response["data"] = task.info return jsonify(response)
def get(self, task_id): """ GET request method for class GetTaskStatus URL: '/status/<task_id>' :param task_id: task id returned after starting POST request GetUrlContents :return: Task status """ result = celery.AsyncResult(task_id) return f"{result.state if result.state != 'PENDING' else 'MISSING'}", RESULT_STATES_CODES.get( result.state)
def task_status(task_id): task = celery.AsyncResult(task_id) if task.state == 'PENDING': return {'state': 'PENDING', 'progress': "0"} if task.state == 'PROGRESS': return {'state': 'PROGRESS', 'progress': task.info['progress']} if task.state == 'SUCCESS': return {'state': 'SUCCESS', 'progress': "100"} else: return {'state': 'FAILURE', 'status': "0"}
def get(self) -> Dict: """ Get status for celery task Returns: Dict: Status of celery task """ data = PageTaskStatus.parser.parse_args() res = celery.AsyncResult(data["task_id"]) return {"status": res.state}
def raw_progress(self, task_id): celery_task = celery.AsyncResult(task_id) status = celery_task.status progress = 0 if status == 'SUCCESS': progress = 100 elif status == 'PROGRESS': progress = celery_task.result.get('progress', 0) return jsonify({'status': status, 'progress': int(progress * 100)})
def task_status(task_id): pretask_id = request.args["parent-0"] # i.e. preprocess response = {} if pretask_id == "uploadpred": response = { 'state': 'SUCCESS', 'current': 1, 'total': 1, 'status': '', 'result': '', 'taskid': task_id } else: # see which task is active: task = celery.AsyncResult(pretask_id) # preprocess if task.state == 'SUCCESS': # if preprocess is finished then check prediction task = celery.AsyncResult(task_id) # prediction if task.state == 'PENDING': response = { 'state': task.state, 'current': 0, 'total': 1, 'status': 'Pending...' } task.forget() else: #task.state == 'PROGRESS' or 'SUCCESS'? response = { 'state': task.state, 'current': task.info.get('current', 0), 'total': task.info.get('total', 1), 'status': task.info.get('status', '') } if 'error' in task.info: response['error'] = task.info['error'] if 'result' in task.info: response['result'] = task.info['result'] response['taskid'] = task.info['taskid'] # TODO: need to forget task #task.forget() #??? not sure if needed return jsonify(response)
def get(self, task_id): from app import celery result = celery.AsyncResult(task_id) state, retval = result.state, result.result response_data = dict(id=task_id, status=state) if state in states.EXCEPTION_STATES: traceback = result.traceback response_data.update({ 'exc': get_full_cls_name(retval.__class__), 'traceback': traceback }) return {'task': response_data}
def test(task_id): from app import celery result = None if task_id is None: from app.jobs import add_together result = {'result': add_together.delay(2, 3).id} else: task = celery.AsyncResult(task_id) result = {'state': task.state} if task.ready(): result['result'] = task.result from flask import jsonify return jsonify(result)
def check_if_complete(): task_id = request.args.get('task_id') if task_id: async_res = celery.AsyncResult(task_id) if async_res.ready() == True: print 'yay' result = pickle.loads(async_res.get()) topic_plot = result['topic_plot'] frame_plot = result['frame_plot'] return jsonify(state=async_res.state, meta=async_res.info, topic_plot=topic_plot, frame_plot=frame_plot) else: print 'nay' return jsonify(state=async_res.state, meta=async_res.info)
def proxy(path): method = request.method hash = utils.get_hash_of_request(method, path) response_from_redis = None if R_SERVER.get(hash): R_SERVER.expire(hash, utils.EXPIRE_TIME_SEC) response_from_redis = pickle.loads(R_SERVER.get(hash)) else: task = celery.AsyncResult(hash) if task.status == 'PENDING' or task.status == 'SUCCESS': task.forget() response = make_response( jsonify({ 'PROXY INFO': 'Task was accepted for running', }), 200) task = celery_tasks.make_request_to_out.apply_async( args=(method, path), task_id=hash, ) elif task.status == 'SENT': response = make_response( jsonify({ 'PROXY INFO': 'Task has already been accepted for running. Wait for proccessing...', }), 200) else: task.forget() response = make_response( jsonify({ 'PROXY WARNING': 'Task finished with another status', 'PROXY REQUEST DETAILS': { 'PROXY TASK STATUS': task.status, 'PROXY METHOD': method, 'PROXY PATH': path, } }), 200) if response_from_redis is not None: response = Response( response_from_redis.content, response_from_redis.status_code, ) return response
def collector(project_name, network, collector_id, task_id=None): """ Loads the detail / control page for a collector """ # Redirects an admin back to the homepage b/c nothing is loaded into the session yet if g.project is None: flash( u'Please navigate to the New Collector page from your homepage panel.' ) return redirect(url_for('index')) form = ProcessControlForm(request.form) # Loads collector info for the page db = DB() resp = db.get_collector_detail(g.project['project_id'], collector_id, project_name) collector = resp['collector'] # Loads active status resp = db.check_process_status(g.project['project_id'], 'collect', collector_id=collector_id) active_status = resp['message'] # If a start/stop/restart is in progress, display the status task_status = None if task_id: resp = celery.AsyncResult(task_id) if resp.state == 'PENDING': task_status = 'Collector start/shutdown still in progress...' db.set_active_collectors('stop', collector_id, 'config') else: task_status = 'Collector start/shutdown completed.' db.set_active_collectors(session['command'], collector_id, 'config') return render_template('collector.html', collector=collector, active_status=active_status, form=form, task_status=task_status, project_name=project_name, network=network, projectid=g.project['project_id'])
def print_entry(self, result): # username = result[0].username # username = (username, "/user_profile?id={}".format(result[0].id)) # email = result[0].email # region = c.all_regions if result[0].region == None else result[0].region # is_admin = _("Нет") # if result[0].is_admin: # is_admin = _("Да") # telephone = result[0].telephone # added_patients_count = 0 if result[1] == None else result[1] # return [username, email, region, is_admin, telephone, added_patients_count] download_name = result.download_name created_date = result.created_date progress = celery.AsyncResult(result.task_id).state return [download_name, created_date, progress]
def progress(self, task_id): celery_task = celery.AsyncResult(task_id) status = celery_task.status if status == 'SUCCESS': mutation_search, dataset_uri = celery_task.result if dataset_uri: dataset = UsersMutationsDataset.query.filter_by( uri=dataset_uri).one() dataset.data = mutation_search db.session.commit() return redirect(url_for('SearchView:mutations', task_id=task_id)) progress = celery_task.result.get('progress', 0) if status == 'PROGRESS' else 0 return make_response( template('search/progress.html', task=celery_task, progress=int(progress * 100), status=status))
def specific_task(id): tasks = Task.query.all() for task in tasks: task.status = celery.AsyncResult(task.key).status return render_template('status.html', id=id, tasks=tasks)
def network_home(project_name, network, task_id=None): """ Renders a project account's homepage """ # Loads project details if an admin if g.admin is not None: _aload_project(project_name) # Grabs collectors for the given network if not g.project['collectors']: collectors = None else: collectors = [ c for c in g.project['collectors'] if c['network'] == network ] for collector in collectors: collector['num_terms'] = 0 if collector['terms_list'] is not None: collector['num_terms'] = len(collector['terms_list']) g.project['num_collectors'] = len(collectors) processor_form = ProcessControlForm(request.form) inserter_form = ProcessControlForm(request.form) # Loads processor active status db = DB() resp = db.check_process_status(g.project['project_id'], 'process', module=network) processor_active_status = resp['message'] # Loads inserter active status resp = db.check_process_status(g.project['project_id'], 'insert', module=network) inserter_active_status = resp['message'] # Loads count of tweets in the storage DB count = db.get_storage_counts(g.project['project_id'], network) # If a start/stop/restart is in progress, display the status task_status = None if task_id: resp = celery.AsyncResult(task_id) if resp.state == 'PENDING': processor_task_status = 'Processor/Inserter start/shutdown still in progress...' else: processor_task_status = 'Processor/Inserter start/shutdown completed.' return render_template('network_home.html', network=network, collectors=collectors, project_detail=g.project, processor_active_status=processor_active_status, inserter_active_status=inserter_active_status, task_status=task_status, count=count, processor_form=processor_form, inserter_form=inserter_form)
def get_status(self) -> str: if not self.status or self.status not in ['SUCCESS', 'FAILURE']: self.status = celery.AsyncResult(self.task_id).state db.session.commit() return self.status
def mutations(self): """Render search form and results (if any) for proteins or mutations""" task_id = request.args.get('task_id', None) use_celery = current_app.config.get('USE_CELERY', False) filter_manager = SearchViewFilters() if request.method == 'POST': textarea_query = request.form.get('mutations', False) vcf_file = request.files.get('vcf-file', False) store_on_server = request.form.get('store_on_server', False) if not use_celery: mutation_search = MutationSearch(vcf_file, textarea_query, filter_manager) if store_on_server: name = request.form.get('dataset_name', None) if not name: name = 'Custom Dataset' if current_user.is_authenticated: user = current_user else: user = None flash( 'To browse uploaded mutations easily in the ' 'future, please <a href="/register">register</a> or <a href="login">log in</a>.', 'warning') dataset = UsersMutationsDataset( name=name, data=mutation_search if not use_celery else None, owner=user) db.session.add(dataset) db.session.commit() if use_celery: mutation_search = search_task.delay( # vcf_file is not serializable but list of lines is vcf_file.readlines() if vcf_file else None, textarea_query, filter_manager, dataset.uri if store_on_server else None) return redirect( url_for('SearchView:progress', task_id=mutation_search.task_id)) elif task_id: celery_task = celery.AsyncResult(task_id) if celery_task.status == 'PENDING': flash( 'This search either expired or does not exist. Please try specifying a new one', 'warning') return redirect(url_for('SearchView:mutations')) mutation_search, dataset_uri = celery_task.result if dataset_uri: url = url_for('SearchView:user_mutations', uri=dataset_uri, _external=True) flash( 'Your mutations have been saved on the server.' '<p>You can access the results later using following URL: ' '<a href="' + url + '">' + url + '</a></p>', 'success') celery_task.forget() else: mutation_search = MutationSearch() response = make_response( template('search/index.html', target='mutations', mutation_types=Mutation.types, hidden_results_cnt=mutation_search.hidden_results_cnt, results=mutation_search.results, widgets=make_widgets(filter_manager), without_mutations=mutation_search.without_mutations, query=mutation_search.query, badly_formatted=mutation_search.badly_formatted)) return response
def get(self, result_id): result = celery.AsyncResult(id=result_id) return jsonify({'code': 200, 'result': str(result.get())})
def check_task(task_id: str) -> str: res = celery.AsyncResult(task_id) if res.state == states.PENDING: return res.state else: return str(res.result)