def getTaskResult(self, task_id): async_result = AsyncResult(task_id) can_forget = async_result.ready() try: try: result = async_result.result if isinstance(result, Exception): result_cls_name = result.__class__.__name__ try: errno = ERRNO_NS[result_cls_name] except KeyError: LOGGER.error('Undefined errno: %s', result_cls_name) raise spec.ServerError() value = [errno, result.message] else: value = result except Exception as exc: LOGGER.exception(exc) raise spec.ServerError() status = getattr(spec.ResultStatus, async_result.status) return spec.AsyncResult(status=status, value=json.dumps(value)) finally: if can_forget: async_result.forget() LOGGER.info('Forgot the result of task %s', task_id)
def addArticleAsset(self, aid, filename, content, meta): try: delayed_task = tasks.create_articleasset_from_bytes.delay( aid, filename, content, meta.owner, meta.use_license) return delayed_task.id except Exception as exc: LOGGER.exception(exc) raise spec.ServerError()
def addArticle(self, xml_string, overwrite): try: delayed_task = tasks.create_article_from_string.delay( xml_string, overwrite_if_exists=overwrite) return delayed_task.id except Exception as exc: LOGGER.exception(exc) raise spec.ServerError()
def getCollection(self, collection_id): try: data = Collection.objects.get(pk=collection_id) return collection_from_model(data) except Collection.DoesNotExist: raise spec.DoesNotExist() except Exception as exc: LOGGER.ServerError(exc) raise spec.ServerError()
def getIssue(self, issue_id): try: data = Issue.objects.get(pk=issue_id) return issue_from_model(data) except Issue.DoesNotExist: raise spec.DoesNotExist() except Exception as exc: LOGGER.ServerError(exc) raise spec.ServerError()
def scanArticles(self, es_dsl_query): try: return ARTICLE_ES_CLIENT.scan(es_dsl_query) except connectors.exceptions.BadRequestError: raise spec.BadRequestError() except connectors.exceptions.TimeoutError: raise spec.TimeoutError() except Exception as exc: LOGGER.exception(exc) raise spec.ServerError()
def getCollections(self, from_date=None, until_date=None, limit=None, offset=None): query = {} limit = limit or LIMIT offset = offset or 0 if from_date or until_date: from_date = from_date or '0001-01-01' until_date = until_date or datetime.datetime.now().isoformat()[:10] query = { "created__range": [from_date, until_date] } try: data = [collection_from_model(i) for i in Collection.objects.filter(**query)[offset:offset+limit]] except Exception as exc: LOGGER.ServerError(exc) raise spec.ServerError() return data
def getJournal(self, journal_id, collection_id=None): try: journal_model = Journal.objects.get(pk=journal_id) journal_struct = journal_from_model(journal_model) except Journal.DoesNotExist: raise spec.DoesNotExist() except Exception as exc: LOGGER.ServerError(exc) raise spec.ServerError() journal_struct.timeline = [] if collection_id: jtl_model = JournalTimeline.objects.filter( journal=journal_model, collection=collection_id).order_by('since') journal_struct.timeline = journal_timeline_from_model(jtl_model) return journal_struct
def getIssues(self, journal_id, from_date=None, until_date=None, limit=None, offset=None): query = {} limit = limit or LIMIT offset = offset or 0 if from_date or until_date: from_date = from_date or '0001-01-01' until_date = until_date or datetime.datetime.now().isoformat()[:10] query = { "created__range": [from_date, until_date] } if journal_id: query['journal__pk'] = journal_id try: data = [issue_from_model(i) for i in Issue.objects.filter(**query)[offset:offset+limit]] except Exception as exc: LOGGER.ServerError(exc) raise spec.ServerError() return data
def getScanArticlesBatch(self, batch_id): try: next_id, batch = ARTICLE_ES_CLIENT.scroll(batch_id) except connectors.exceptions.BadRequestError: raise spec.BadRequestError() except connectors.exceptions.TimeoutError: raise spec.TimeoutError() except Exception as exc: LOGGER.exception(exc) raise spec.ServerError() articles = [article_from_es(data) for data in batch] results = spec.ScanArticlesResults() if articles: results.articles = articles if next_id: results.next_batch_id = next_id return results