def pause(task_id): Task.update_state(self=celery, task_id=task_id, state='PAUSED') # state update return jsonify({ 'task_id': task_id, 'status': str(AsyncResult(task_id, app=celery).state) }), 200
def resume_task(task_id): Task.update_state(self=celery, task_id=task_id, state='PROCESSING') file_path, last_row = get_file_info(task_id) csv_upload.delay( path=file_path, start_row=last_row, resume=True, task_id=task_id ) # send the task to csv_upload with state updated to PROCESSING return jsonify({'task_id': task_id, 'status': 'PROCESSING'}), 200
def revoke_operation(task_id): celery_task.update_state(self=celery, task_id=task_id, state='REVOKED') return jsonify({ 'task_id': task_id, 'status': AsyncResult(task_id, app=celery).state, 'message': 'Task Stopped!!' }), 200
def pause_operation(task_id): celery_task.update_state(self=celery, task_id=task_id, state='PAUSING') return jsonify({ 'task_id': task_id, 'status': AsyncResult(task_id, app=celery).state, 'message': 'Task Paused!!' }), 200
async def resume(task_id: str, background_task: BackgroundTasks): if AsyncResult(task_id).status != "PAUSE": background_task.add_task(background_on_message, AsyncResult(task_id)) return {"message": "task already runing"} Task.update_state(self=test_celery, task_id=task_id, state="RESUME") task = None while True: task = AsyncResult(task_id) if task.status != "PAUSE": break background_task.add_task(background_on_message, task) return {"message": "task resumed"}
def test_only_one_running_task(self): celery_task = CeleryTask() celery_task.name = "Test Name" with only_one_running_task(celery_task): with self.assertRaises(LockError): with only_one_running_task(celery_task): pass with mock.patch.dict(WORKER_STOPPED, {True: True}): with self.assertRaisesMessage(LockError, "Worker is stopping"): with only_one_running_task(celery_task): pass
def __init__(self, *args, **kwargs): ''' A task will be initialized for every process, but not for every task! ''' Task.__init__(self, *args, **kwargs) self.__result_database_storage = None self.__apk_storage = None self.__script_hashes = None self.__androscripts = None # register signal to prefetch apks task_prerun.connect(self.prefetch_apk) log.debug("%s init", self)
def terminate(task_id): print(AsyncResult(task_id, app=celery).state) delete_rows(task_id) # delete the csv_entries by the task to be terminated Task.update_state(self=celery, task_id=task_id, state='REVOKED') # state update to REVOKED terminated_entry = TerminatedTasks( task_id=task_id) # write the task to TerminatedTasks table db.session.add(terminated_entry) db.session.commit() task = Tasks.query.filter_by(id=task_id).first() task.state = 'REVOKED' # update the state in Tasks table db.session.commit() return jsonify({ 'task_id': task_id, 'status': str(AsyncResult(task_id, app=celery).state) }), 200
def apply_async(self, *args, **kwargs): with metrics.timer('jobs.delay', instance=self.name): return Task.apply_async(self, *args, **kwargs)
def cancel_task(tid): Task.update_state(self=long_task, task_id=tid, state='CANCEL') return 'Your task will be cancelled !'
def resume_task(tid): Task.update_state(self=long_task, task_id=tid, state='RESUME') return 'Your task will be resumed !'
def pause_task(tid): Task.update_state(self=long_task, task_id=tid, state='PAUSING') return 'Your task will be paused !'
def parse( # noqa: C901 task: Task, source_name: str, contents: str, parse_kwargs: Optional[Mapping[str, bool]] = None, user_id: Optional[int] = None, enrich_citations: bool = True, ): """Parse a BEL document and store in the database.""" from .core import manager if not task.request.called_directly: task.update_state(state='STARTED') t = time.time() _encoding = 'utf-8' source_bytes = contents.encode(_encoding) if parse_kwargs is None: parse_kwargs = {} parse_kwargs.setdefault('citation_clearing', True) parse_kwargs.setdefault('infer_origin', False) parse_kwargs.setdefault('identifier_validation', True) if current_app.config['DISALLOW_PRIVATE']: parse_kwargs['public'] = True else: parse_kwargs.setdefault('public', True) report = Report( user_id=user_id, source_name=source_name, source=source_bytes, source_hash=hashlib.sha512(source_bytes).hexdigest(), encoding=_encoding, **parse_kwargs, ) manager.session.add(report) try: graph = parse_graph( report=report, manager=manager, task=task, ) except (ResourceError, requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e: message = f'Parsing Failed for {source_name}. Connection to resource could not be established: {e}' return finish_parsing(manager.session, report, 'Parsing Failed.', message) except InconsistentDefinitionError as e: message = f'Parsing Failed for {source_name} because {e.definition} was redefined on line {e.line_number}' return finish_parsing(manager.session, report, 'Parsing Failed.', message) except Exception as e: message = f'Parsing Failed for {source_name} from a general error: {e}' return finish_parsing(manager.session, report, 'Parsing Failed.', message) # Integrity checking if not graph.name: return finish_parsing( manager.session, report, 'Parsing Failed.', f'Parsing Failed for {source_name} because SET DOCUMENT Name was missing.', ) if not graph.version: return finish_parsing( manager.session, report, 'Parsing Failed.', f'Parsing Failed for {source_name} because SET DOCUMENT Version was missing.', ) # Enrichment enrich_pubmed_citations( manager, graph) # this makes a commit so we need to store the identifier report_id = report.id if report.infer_origin: enrich_protein_and_rna_origins(graph) send_graph_summary_mail(graph, report, time.time() - t) # TODO split into second task celery_logger.info(f'inserting {graph} with {manager.engine.url}') try: network = manager.insert_graph(graph) except IntegrityError as e: manager.session.rollback() return finish_parsing( manager.session, report, 'Upload Failed.', f'Upload Failed for {source_name}: {e}', ) except OperationalError: manager.session.rollback() return finish_parsing( manager.session, report, 'Upload Failed.', f'Upload Failed for {source_name} because database is locked', ) except Exception as e: manager.session.rollback() return finish_parsing( manager.session, report, 'Upload Failed.', f'Upload Failed for {source_name}: {e}', ) # save in a variable because these get thrown away after commit network_id = network.id celery_logger.info(f'Stored network={network_id}.') celery_logger.info(f'Filling report={report_id} for network={network_id}') fill_out_report(graph=graph, network=network, report=report) report.time = time.time() - t celery_logger.info( f'Committing report={report_id} for network={network_id}') try: manager.session.commit() except Exception as e: manager.session.rollback() message = f'Problem filling out report={report_id} for {source_name}: {e}' make_mail(report, 'Filling out report failed', message) celery_logger.exception(message) return -1 else: make_mail(report, 'Parsing succeeded', f'Parsing succeeded for {source_name}') return dict(network_id=network_id, report_id=report_id) finally: manager.session.close()