def get_download_context(download_id, message=None, require_result=False): """ :param require_result: If set to True, is_ready will not be set to True unless result is also available. If check_state=False, this is ignored. """ download_data = DownloadBase.get(download_id) if download_data is None: download_data = DownloadBase(download_id=download_id) task = download_data.task task_status = get_task_status( task, is_multiple_download_task=isinstance(download_data, MultipleTaskDownload)) if task_status.failed(): raise TaskFailedError(task_status.error) if require_result: is_ready = task_status.success() and task_status.result is not None else: is_ready = task_status.success() return { 'result': task_status.result, 'error': task_status.error, 'is_ready': is_ready, 'is_alive': is_alive() if heartbeat_enabled() else True, 'progress': task_status.progress._asdict(), 'download_id': download_id, 'allow_dropbox_sync': isinstance(download_data, FileDownload) and download_data.use_transfer, 'has_file': download_data is not None and download_data.has_file, 'custom_message': message, }
def rebuild_saved_export(export_instance_id, last_access_cutoff=None, manual=False): """Kicks off a celery task to rebuild the export. If this is called while another one is already running for the same export instance, it will just noop. """ download_data = _get_saved_export_download_data(export_instance_id) status = get_task_status(download_data.task) if manual: if status.not_started() or status.missing(): # cancel pending task before kicking off a new one download_data.task.revoke() if status.started(): return # noop - make the user wait before starting a new one else: if status.not_started() or status.started(): return # noop - one's already on the way # associate task with the export instance download_data.set_task( _start_export_task.apply_async( args=[export_instance_id, last_access_cutoff], queue=EXPORT_DOWNLOAD_QUEUE if manual else SAVED_EXPORTS_QUEUE, ))
def get_download_context(download_id, message=None, require_result=False): """ :param require_result: If set to True, is_ready will not be set to True unless result is also available. If check_state=False, this is ignored. """ download_data = DownloadBase.get(download_id) if download_data is None: download_data = DownloadBase(download_id=download_id) task = download_data.task task_status = get_task_status( task, is_multiple_download_task=isinstance(download_data, MultipleTaskDownload)) if task_status.failed(): # Celery replaces exceptions with a wrapped one that we can't directly import # so I think our best choice is to match off the name, even though that's hacky exception_name = (task.result.__class__.__name__ if isinstance(task.result, Exception) else None) raise TaskFailedError(task_status.error, exception_name=exception_name) if require_result: is_ready = task_status.success() and task_status.result is not None else: is_ready = task_status.success() return { 'result': task_status.result, 'error': task_status.error, 'is_ready': is_ready, 'is_alive': _is_alive(), 'progress': task_status.progress._asdict(), 'download_id': download_id, 'allow_dropbox_sync': isinstance(download_data, FileDownload) and download_data.use_transfer, 'has_file': download_data is not None and download_data.has_file, 'custom_message': message, }
def get_task_status_json(task_id): try: task_status = get_task_status(get_task(task_id)) except Exception: # There was a period of time where the format of metadata we were setting # from the task would cause a celery-internal failure notify_exception(None, "Error fetching task") return TaskStatus( state=STATES.failed, progress=None, result=TaskStatusResult( errors=[TaskStatusResultError(description='Unknown Failure')]), ) if task_status.state == STATES.failed: errors = (task_status.error if isinstance( task_status.error, (list, tuple)) else [task_status.error]) return TaskStatus( state=task_status.state, progress=TaskStatusProgress( percent=task_status.progress.percent, ), result=TaskStatusResult(errors=[ TaskStatusResultError(description=error) for error in errors ]), ) else: return TaskStatus( state=task_status.state, progress=TaskStatusProgress( percent=task_status.progress.percent, ), result=normalize_task_status_result(task_status.result), )
def get_download_context(download_id, message=None, require_result=False): """ :param require_result: If set to True, is_ready will not be set to True unless result is also available. If check_state=False, this is ignored. """ download_data = DownloadBase.get(download_id) if download_data is None: download_data = DownloadBase(download_id=download_id) task = download_data.task task_status = get_task_status( task, is_multiple_download_task=isinstance(download_data, MultipleTaskDownload)) if task_status.failed(): # Celery replaces exceptions with a wrapped one that we can't directly import # so I think our best choice is to match off the name, even though that's hacky exception_name = (task.result.__class__.__name__ if isinstance(task.result, Exception) else None) raise TaskFailedError(task_status.error, exception_name=exception_name) if require_result: is_ready = task_status.success() and task_status.result is not None else: is_ready = task_status.success() return { 'result': task_status.result, 'error': task_status.error, 'is_ready': is_ready, 'is_alive': is_alive() if heartbeat_enabled() else True, 'progress': task_status.progress._asdict(), 'download_id': download_id, 'allow_dropbox_sync': isinstance(download_data, FileDownload) and download_data.use_transfer, 'has_file': download_data is not None and download_data.has_file, 'custom_message': message, }
def test_not_missing(self): self.assertEqual( get_task_status( self.MockTask( task_meta={ 'children': [], 'date_done': datetime.datetime(2020, 4, 7, 14, 37, 1, 926615), 'result': { 'current': 17076, 'total': 10565489 }, 'status': 'PROGRESS', 'task_args': None, 'task_id': '2243626c-f725-442e-b257-b018a0860d1b', 'task_kwargs': None, 'task_name': None, 'traceback': None })), TaskStatus(result=None, error=None, state=STATES.started, progress=TaskProgress(current=17076, total=10565489, percent=100 * 17076 // 10565489, error=False, error_message='')))
def get_saved_export_task_status(export_instance_id): """Get info on the ongoing rebuild task if one exists. (This is built with the assumption that there shouldn't be multiple rebuilds in progress for a single export instance) """ download_data = _get_saved_export_download_data(export_instance_id) return get_task_status(download_data.task)
def get_task_status_json(task_id): task_status = get_task_status(get_task(task_id)) return TaskStatus( state=task_status.state, progress=TaskStatusProgress(percent=task_status.progress.percent, ), result=normalize_task_status_result(task_status.result), )
def get_task_status_json(task_id): task_status = get_task_status(get_task(task_id)) return TaskStatus( state=task_status.state, progress=TaskStatusProgress(percent=task_status.progress.percent), result=normalize_task_status_result(task_status.result), )
def get(self, request, *args, **kwargs): download_id = request.GET.get('download_id') download = DownloadBase.get(download_id) if download is None: return json_response({ 'download_id': download_id, 'progress': None, }) status = get_task_status(download.task) return json_response({ 'download_id': download_id, 'success': status.success(), 'failed': status.failed(), 'missing': status.missing(), 'not_started': status.not_started(), 'progress': status.progress._asdict(), })
def rebuild_saved_export(export_instance_id, manual=False): """Kicks off a celery task to rebuild the export. If this is called while another one is already running for the same export instance, it will just noop. """ download_data = _get_saved_export_download_data(export_instance_id) status = get_task_status(download_data.task) if manual and status.missing() and download_data.task: download_data.task.revoke(terminate=True) if status.not_started() or status.started(): return # associate task with the export instance download_data.set_task( _start_export_task.apply_async( args=[export_instance_id], queue=EXPORT_DOWNLOAD_QUEUE if manual else SAVED_EXPORTS_QUEUE, ))
def test_missing(self): self.assertEqual( get_task_status( self.MockTask( task_meta={ 'date_done': None, 'result': None, 'status': 'PENDING', 'task_args': None, 'task_id': 'obviously fake!', 'task_kwargs': None, 'task_name': None, 'traceback': None })), TaskStatus(result=None, error=None, state=STATES.missing, progress=TaskProgress(current=None, total=None, percent=None, error=False, error_message='')))