def export_data_async(request, filter=None, **kwargs): format = request.GET.get("format", Format.XLS_2007) filename = request.GET.get("filename", None) previous_export_id = request.GET.get("previous_export", None) export_tag = _export_tag_or_bust(request) export_object = FakeSavedExportSchema(index=export_tag) return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format)
def export_data_async(request, filter=None, **kwargs): format = request.GET.get("format", Format.XLS_2007) filename = request.GET.get("filename", None) previous_export_id = request.GET.get("previous_export", None) export_tag = _export_tag_or_bust(request) export_object = FakeSavedExportSchema(index=export_tag) return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format )
def export_data_shared(export_tag, format=None, filename=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """ Shared method for export. If there is data, return an HTTPResponse with the appropriate data. If there is not data returns None. """ if previous_export_id and not SavedExportSchema.get_db().doc_exist( previous_export_id): return HttpResponseNotFound( _('No previous export with id "{id}" found'.format( id=previous_export_id))) if not filename: filename = export_tag files = FakeSavedExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator) if files and files.checkpoint: return export_response(files.file, format, filename, files.checkpoint) else: return None
def generate_export_objects(self, export_tags): if self.safe_only: return [] self.export_objects = [] for schema_index in export_tags: self.export_objects.append( FakeSavedExportSchema(index=schema_index))
def generate_export_objects(self, export_tags): if self.safe_only: return [] self.export_objects = [] for schema_index in export_tags: self.export_objects.append( FakeSavedExportSchema(index=schema_index, filter_function=SerializableFunction()))
def get_export_files(export_tag, format=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """This function only exists for backwards compatibility""" return FakeSavedExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator)
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): async = request.GET.get('async') == 'true' next = request.GET.get("next", "") format = request.GET.get("format", "") export_type = request.GET.get("type", "form") previous_export_id = request.GET.get("previous_export", None) filename = request.GET.get("filename", None) max_column_size = int(request.GET.get("max_column_size", 2000)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(request.GET.get("is_custom", "false")) export_tags = json.loads( request.GET.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)(domain=domain, safe_only=safe_only) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper.make(request, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() elif safe_only: return HttpResponseForbidden() else: if not async: # this function doesn't support synchronous export without a custom export object # if we ever want that (i.e. for HTML Preview) then we just need to give # FakeSavedExportSchema a download_data function (called below) return HttpResponseBadRequest() try: export_tag = json.loads( request.GET.get("export_tag", "null") or "null") except ValueError: return HttpResponseBadRequest() assert (export_tag[0] == domain) export_object = FakeSavedExportSchema(index=export_tag) if not filename: filename = export_object.name filename += ' ' + date.today().isoformat() if async: return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format, max_column_size=max_column_size, ) else: if not next: next = export.ExcelExportReport.get_url(domain=domain) resp = export_object.download_data(format, filter=filter) if resp: return resp else: messages.error( request, "Sorry, there was no data found for the tag '%s'." % export_object.name) return HttpResponseRedirect(next)
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): async = request.GET.get('async') == 'true' next = request.GET.get("next", "") format = request.GET.get("format", "") export_type = request.GET.get("type", "form") previous_export_id = request.GET.get("previous_export", None) filename = request.GET.get("filename", None) max_column_size = int(request.GET.get("max_column_size", 2000)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(request.GET.get("is_custom", "false")) export_tags = json.loads(request.GET.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)( domain=domain, safe_only=safe_only ) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper.make(request, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() elif safe_only: return HttpResponseForbidden() else: if not async: # this function doesn't support synchronous export without a custom export object # if we ever want that (i.e. for HTML Preview) then we just need to give # FakeSavedExportSchema a download_data function (called below) return HttpResponseBadRequest() try: export_tag = json.loads(request.GET.get("export_tag", "null") or "null") except ValueError: return HttpResponseBadRequest() assert(export_tag[0] == domain) export_object = FakeSavedExportSchema(index=export_tag) if not filename: filename = export_object.name filename += ' ' + date.today().isoformat() if async: return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format, max_column_size=max_column_size, ) else: if not next: next = export.ExcelExportReport.get_url(domain=domain) resp = export_object.download_data(format, filter=filter) if resp: return resp else: messages.error(request, "Sorry, there was no data found for the tag '%s'." % export_object.name) return HttpResponseRedirect(next)