def export(): """ --- post: summary: Download the results of a search description: >- Downloads all the results of a search as a zip archive; upto a max of 10,000 results. The returned file will contain an Excel document with structured data as well as the binary files from all matching documents. Supports the same query parameters as the search API. responses: '202': description: Accepted tags: - Entity """ require(request.authz.logged_in) parser = SearchQueryParser(request.args, request.authz) tag_request(query=parser.text, prefix=parser.prefix) query = EntitiesQuery(parser) label = gettext("Search: %s") % query.to_text() export = create_export( operation=OP_EXPORT_SEARCH, role_id=request.authz.id, label=label, mime_type=ZIP, meta={"query": query.get_full_query()}, ) job_id = get_session_id() queue_task(None, OP_EXPORT_SEARCH, job_id=job_id, export_id=export.id) return ("", 202)
def export(collection_id): """ --- post: summary: Download cross-reference results description: Download results of cross-referencing as an Excel file parameters: - in: path name: collection_id required: true schema: type: integer responses: '202': description: Accepted tags: - Xref - Collection """ collection = get_db_collection(collection_id, request.authz.READ) label = "%s - Crossreference results" % collection.label export = create_export( operation=OP_EXPORT_XREF_RESULTS, role_id=request.authz.id, label=label, collection=collection, mime_type=XLSX, ) job_id = get_session_id() payload = { "collection_id": collection_id, "export_id": export.id, } queue_task(None, OP_EXPORT_XREF_RESULTS, job_id=job_id, payload=payload) return ("", 202)
def setUp(self): super(ExportApiTestCase, self).setUp() self.load_fixtures() self.email = "*****@*****.**" self.role_email = self.create_user("with_email", email=self.email) _, self.headers = self.login(foreign_id="with_email") csv_path = self.get_fixture_path("experts.csv") temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export1 = create_export("TEST", self.role_email.id, "test1") complete_export(self.export1.id, temp_path, "exports.csv") temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export2 = create_export("TEST", self.role_email.id, "test2") self.export2.expires_at = datetime.utcnow() + timedelta(days=-1) complete_export(self.export2.id, temp_path, "experts.csv")
def setUp(self): super(ExportsTestCase, self).setUp() self.load_fixtures() self.email = "*****@*****.**" self.role_email = self.create_user("with_email", email=self.email) csv_path = self.get_fixture_path("experts.csv") temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export1 = create_export("TEST", self.role_email.id, "test1", expires_after=Export.DEFAULT_EXPIRATION) complete_export(self.export1.id, temp_path) temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export2 = create_export("TEST", self.role_email.id, "test2", expires_after=timedelta(days=-1)) complete_export(self.export2.id, temp_path)
def setUp(self): super(ExportsTestCase, self).setUp() self.load_fixtures() self.email = "*****@*****.**" self.role_email = self.create_user("with_email", email=self.email) csv_path = self.get_fixture_path("experts.csv") temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export1 = create_export("TEST", self.role_email.id, "test1") complete_export(self.export1.id, temp_path, "experts.csv") temp_path = self._create_temporary_copy(csv_path, "experts.csv") self.export2 = create_export("TEST", self.role_email.id, "test2") self.export2.expires_at = datetime.utcnow() + timedelta(days=-1) complete_export(self.export2.id, temp_path, "experts.csv") source_path = self.get_fixture_path("../util.py") temp_path = self._create_temporary_copy(source_path, "init.py") self.export3 = create_export("TEST", self.role_email.id, "test3") self.export3.expires_at = datetime.utcnow() + timedelta(days=-1) complete_export(self.export3.id, temp_path, "init.py")
def export(): """ --- post: summary: Download the results of a search description: >- Downloads all the results of a search as a zip archive; upto a max of 10,000 results. The returned file will contain an Excel document with structured data as well as the binary files from all matching documents. Supports the same query parameters as the search API. responses: '202': description: Accepted tags: - Entity """ require(request.authz.logged_in) parser = SearchQueryParser(request.args, request.authz) parser.limit = MAX_PAGE tag_request(query=parser.text, prefix=parser.prefix) result = EntitiesQuery.handle(request, parser=parser) label = "Search results for query: %s" % parser.text export = create_export( operation=OP_EXPORT_SEARCH_RESULTS, role_id=request.authz.id, label=label, file_path=None, expires_after=Export.DEFAULT_EXPIRATION, collection=None, mime_type=ZIP, ) job_id = get_session_id() payload = { "export_id": export.id, "result": result.to_dict(), } queue_task(None, OP_EXPORT_SEARCH_RESULTS, job_id=job_id, payload=payload) return ("", 202)