def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): req = request.POST if request.method == "POST" else request.GET async = req.get("async") == "true" next = req.get("next", "") format = req.get("format", "") export_type = req.get("type", "form") previous_export_id = req.get("previous_export", None) filename = req.get("filename", None) max_column_size = int(req.get("max_column_size", 2000)) limit = int(req.get("limit", 0)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(req.get("is_custom", "false")) export_tags = json.loads(req.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)( domain=domain, safe_only=safe_only ) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper.make(request, export_type, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() except BadExportConfiguration, e: return HttpResponseBadRequest(str(e))
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): async = request.GET.get('async') == 'true' next = request.GET.get("next", "") format = request.GET.get("format", "") export_type = request.GET.get("type", "form") previous_export_id = request.GET.get("previous_export", None) filename = request.GET.get("filename", None) max_column_size = int(request.GET.get("max_column_size", 2000)) limit = int(request.GET.get("limit", 0)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(request.GET.get("is_custom", "false")) export_tags = json.loads(request.GET.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)( domain=domain, safe_only=safe_only ) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper.make(request, export_type, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() except BadExportConfiguration, e: return HttpResponseBadRequest(str(e))
def test_create_export_case_filter(self): filter_ = create_export_filter(self.request, DOMAIN, export_type='case') self.assertEqual( filter_.dumps(), '[{"function": "corehq.apps.reports.util.case_users_filter", "kwargs": {"users": ["' + self.user.user_id + '"], "groups": []}}]')
def test_create_export_form_filter(self): filter_ = create_export_filter(self.request, DOMAIN, export_type='form') self.assertEqual( filter_.dumps(), '[{"function": "corehq.apps.reports.util.app_export_filter", "kwargs": {"app_id": null}},' ' {"function": "corehq.apps.reports.util.datespan_export_filter", "kwargs": {"datespan": null}},' ' {"function": "corehq.apps.reports.util.users_filter", "kwargs": {"users": ["' + self.user.user_id + '"]}}]')
def export_data_async(request, domain): """ Download all data for a couchdbkit model """ try: export_tag = json.loads(request.GET.get("export_tag", "null") or "null") export_type = request.GET.get("type", "form") except ValueError: return HttpResponseBadRequest() assert(export_tag[0] == domain) filter = util.create_export_filter(request, domain, export_type=export_type) return couchexport_views.export_data_async(request, filter=filter, type=export_type)
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): async = request.GET.get('async') == 'true' next = request.GET.get("next", "") format = request.GET.get("format", "") export_type = request.GET.get("type", "form") previous_export_id = request.GET.get("previous_export", None) filename = request.GET.get("filename", None) max_column_size = int(request.GET.get("max_column_size", 2000)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(request.GET.get("is_custom", "false")) export_tags = json.loads(request.GET.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)( domain=domain, safe_only=safe_only ) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper(request, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() elif safe_only: return HttpResponseForbidden() else: if not async: # this function doesn't support synchronous export without a custom export object # if we ever want that (i.e. for HTML Preview) then we just need to give # FakeSavedExportSchema a download_data function (called below) return HttpResponseBadRequest() try: export_tag = json.loads(request.GET.get("export_tag", "null") or "null") except ValueError: return HttpResponseBadRequest() assert(export_tag[0] == domain) export_object = FakeSavedExportSchema(index=export_tag) if async: return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format, max_column_size=max_column_size, ) else: if not next: next = export.ExcelExportReport.get_url(domain) resp = export_object.download_data(format, filter=filter) if resp: return resp else: messages.error(request, "Sorry, there was no data found for the tag '%s'." % export_object.name) return HttpResponseRedirect(next)
def _export_default_or_custom_data(request, domain, export_id=None, bulk_export=False, safe_only=False): async = request.GET.get('async') == 'true' next = request.GET.get("next", "") format = request.GET.get("format", "") export_type = request.GET.get("type", "form") previous_export_id = request.GET.get("previous_export", None) filename = request.GET.get("filename", None) max_column_size = int(request.GET.get("max_column_size", 2000)) filter = util.create_export_filter(request, domain, export_type=export_type) if bulk_export: try: is_custom = json.loads(request.GET.get("is_custom", "false")) export_tags = json.loads( request.GET.get("export_tags", "null") or "null") except ValueError: return HttpResponseBadRequest() export_helper = (CustomBulkExportHelper if is_custom else ApplicationBulkExportHelper)(domain=domain, safe_only=safe_only) return export_helper.prepare_export(export_tags, filter) elif export_id: # this is a custom export try: export_object = CustomExportHelper.make(request, domain, export_id).custom_export if safe_only and not export_object.is_safe: return HttpResponseForbidden() except ResourceNotFound: raise Http404() elif safe_only: return HttpResponseForbidden() else: if not async: # this function doesn't support synchronous export without a custom export object # if we ever want that (i.e. for HTML Preview) then we just need to give # FakeSavedExportSchema a download_data function (called below) return HttpResponseBadRequest() try: export_tag = json.loads( request.GET.get("export_tag", "null") or "null") except ValueError: return HttpResponseBadRequest() assert (export_tag[0] == domain) export_object = FakeSavedExportSchema(index=export_tag) if not filename: filename = export_object.name filename += ' ' + date.today().isoformat() if async: return export_object.export_data_async( filter=filter, filename=filename, previous_export_id=previous_export_id, format=format, max_column_size=max_column_size, ) else: if not next: next = export.ExcelExportReport.get_url(domain=domain) resp = export_object.download_data(format, filter=filter) if resp: return resp else: messages.error( request, "Sorry, there was no data found for the tag '%s'." % export_object.name) return HttpResponseRedirect(next)