def _send_chart_response( self, result: Dict[Any, Any], form_data: Optional[Dict[str, Any]] = None, datasource: Optional[BaseDatasource] = None, ) -> Response: result_type = result["query_context"].result_type result_format = result["query_context"].result_format # Post-process the data so it matches the data presented in the chart. # This is needed for sending reports based on text charts that do the # post-processing of data, eg, the pivot table. if result_type == ChartDataResultType.POST_PROCESSED: result = apply_post_process(result, form_data, datasource) if result_format == ChartDataResultFormat.CSV: # Verify user has permission to export CSV file if not security_manager.can_access("can_csv", "Superset"): return self.response_403() if not result["queries"]: return self.response_400(_("Empty query result")) if len(result["queries"]) == 1: # return single query results csv format data = result["queries"][0]["data"] return CsvResponse(data, headers=generate_download_headers("csv")) # return multi-query csv results bundled as a zip file encoding = current_app.config["CSV_EXPORT"].get( "encoding", "utf-8") files = { f"query_{idx + 1}.csv": result["data"].encode(encoding) for idx, result in enumerate(result["queries"]) } return Response( create_zip(files), headers=generate_download_headers("zip"), mimetype="application/zip", ) if result_format == ChartDataResultFormat.JSON: response_data = simplejson.dumps( {"result": result["queries"]}, default=json_int_dttm_ser, ignore_nan=True, ) resp = make_response(response_data, 200) resp.headers["Content-Type"] = "application/json; charset=utf-8" return resp return self.response_400( message=f"Unsupported result_format: {result_format}")
def test_export_dataset(self): """ Dataset API: Test export dataset """ birth_names_dataset = self.get_birth_names_dataset() argument = [birth_names_dataset.id] uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" self.login(username="******") rv = self.get_assert_metric(uri, "export") self.assertEqual(rv.status_code, 200) self.assertEqual( rv.headers["Content-Disposition"], generate_download_headers("yaml")["Content-Disposition"], ) cli_export = export_to_dict( session=db.session, recursive=True, back_references=False, include_defaults=False, ) cli_export_tables = cli_export["databases"][0]["tables"] expected_response = [] for export_table in cli_export_tables: if export_table["table_name"] == "birth_names": expected_response = export_table break ui_export = yaml.safe_load(rv.data.decode("utf-8")) self.assertEqual(ui_export[0], expected_response)
def test_export_dataset(self): """ Dataset API: Test export dataset """ birth_names_dataset = self.get_birth_names_dataset() # TODO: fix test for presto # debug with dump: https://github.com/apache/incubator-superset/runs/1092546855 if birth_names_dataset.database.backend == "presto": return argument = [birth_names_dataset.id] uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}" self.login(username="******") rv = self.get_assert_metric(uri, "export") self.assertEqual(rv.status_code, 200) self.assertEqual( rv.headers["Content-Disposition"], generate_download_headers("yaml")["Content-Disposition"], ) cli_export = export_to_dict( session=db.session, recursive=True, back_references=False, include_defaults=False, ) cli_export_tables = cli_export["databases"][0]["tables"] expected_response = {} for export_table in cli_export_tables: if export_table["table_name"] == "birth_names": expected_response = export_table break ui_export = yaml.safe_load(rv.data.decode("utf-8")) self.assertEqual(ui_export[0], expected_response)
def download_dashboards(self) -> FlaskResponse: if request.args.get("action") == "go": ids = request.args.getlist("id") return Response( DashboardModel.export_dashboards(ids), headers=generate_download_headers("json"), mimetype="application/text", ) return self.render_template("superset/export_dashboards.html", dashboards_url="/dashboard/list")
def test_export(self): """ Dashboard API: Test dashboard export """ self.login(username="******") argument = [1, 2] uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" rv = self.get_assert_metric(uri, "export") self.assertEqual(rv.status_code, 200) self.assertEqual( rv.headers["Content-Disposition"], generate_download_headers("json")["Content-Disposition"], )
def test_export(self): """ Dashboard API: Test dashboard export """ self.login(username="******") argument = [1, 2] uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" # freeze time to ensure filename is deterministic with freeze_time("2020-01-01T00:00:00Z"): rv = self.get_assert_metric(uri, "export") headers = generate_download_headers("json")["Content-Disposition"] assert rv.status_code == 200 assert rv.headers["Content-Disposition"] == headers
def export(self, **kwargs: Any) -> Response: """Export dashboards --- get: description: >- Exports multiple datasets and downloads them as YAML files parameters: - in: query name: q content: application/json: schema: type: array items: type: integer responses: 200: description: Dataset export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] query = self.datamodel.session.query(SqlaTable).filter( SqlaTable.id.in_(requested_ids) ) query = self._base_filters.apply_all(query) items = query.all() ids = [item.id for item in items] if len(ids) != len(requested_ids): return self.response_404() data = [t.export_to_dict() for t in items] return Response( yaml.safe_dump(data), headers=generate_download_headers("yaml"), mimetype="application/text", )
def export(self, **kwargs: Any) -> Response: """Export dashboards --- get: description: >- Exports multiple Dashboards and downloads them as YAML files parameters: - in: query name: q content: application/json: schema: type: array items: type: integer responses: 200: description: Dashboard export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ query = self.datamodel.session.query(Dashboard).filter( Dashboard.id.in_(kwargs["rison"]) ) query = self._base_filters.apply_all(query) ids = [item.id for item in query.all()] if not ids: return self.response_404() export = Dashboard.export_dashboards(ids) resp = make_response(export, 200) resp.headers["Content-Disposition"] = generate_download_headers("json")[ "Content-Disposition" ] return resp
def _send_chart_response( self, result: Dict[Any, Any], form_data: Optional[Dict[str, Any]] = None, datasource: Optional[BaseDatasource] = None, ) -> Response: result_type = result["query_context"].result_type result_format = result["query_context"].result_format # Post-process the data so it matches the data presented in the chart. # This is needed for sending reports based on text charts that do the # post-processing of data, eg, the pivot table. if result_type == ChartDataResultType.POST_PROCESSED: result = apply_post_process(result, form_data, datasource) if result_format == ChartDataResultFormat.CSV: # Verify user has permission to export CSV file if not security_manager.can_access("can_csv", "Superset"): return self.response_403() # return the first result data = result["queries"][0]["data"] return CsvResponse(data, headers=generate_download_headers("csv")) if result_format == ChartDataResultFormat.JSON: response_data = simplejson.dumps( {"result": result["queries"]}, default=json_int_dttm_ser, ignore_nan=True, ) resp = make_response(response_data, 200) resp.headers["Content-Type"] = "application/json; charset=utf-8" return resp return self.response_400( message=f"Unsupported result_format: {result_format}")
def export(self, **kwargs: Any) -> Response: """Export dashboards --- get: description: >- Exports multiple Dashboards and downloads them as YAML files. parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_export_ids_schema' responses: 200: description: Dashboard export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] token = request.args.get("token") if is_feature_enabled("VERSIONED_EXPORT"): timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") root = f"dashboard_export_{timestamp}" filename = f"{root}.zip" buf = BytesIO() with ZipFile(buf, "w") as bundle: try: for file_name, file_content in ExportDashboardsCommand( requested_ids).run(): with bundle.open(f"{root}/{file_name}", "w") as fp: fp.write(file_content.encode()) except DashboardNotFoundError: return self.response_404() buf.seek(0) response = send_file( buf, mimetype="application/zip", as_attachment=True, attachment_filename=filename, ) if token: response.set_cookie(token, "done", max_age=600) return response query = self.datamodel.session.query(Dashboard).filter( Dashboard.id.in_(requested_ids)) query = self._base_filters.apply_all(query) ids = [item.id for item in query.all()] if not ids: return self.response_404() export = Dashboard.export_dashboards(ids) resp = make_response(export, 200) resp.headers["Content-Disposition"] = generate_download_headers( "json")["Content-Disposition"] if token: resp.set_cookie(token, "done", max_age=600) return resp
def export(self, **kwargs: Any) -> Response: """Export datasets --- get: description: >- Exports multiple datasets and downloads them as YAML files parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_export_ids_schema' responses: 200: description: Dataset export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] if is_feature_enabled("VERSIONED_EXPORT"): timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") root = f"dataset_export_{timestamp}" filename = f"{root}.zip" buf = BytesIO() with ZipFile(buf, "w") as bundle: try: for file_name, file_content in ExportDatasetsCommand( requested_ids).run(): with bundle.open(f"{root}/{file_name}", "w") as fp: fp.write(file_content.encode()) except DatasetNotFoundError: return self.response_404() buf.seek(0) return send_file( buf, mimetype="application/zip", as_attachment=True, attachment_filename=filename, ) query = self.datamodel.session.query(SqlaTable).filter( SqlaTable.id.in_(requested_ids)) query = self._base_filters.apply_all(query) items = query.all() ids = [item.id for item in items] if len(ids) != len(requested_ids): return self.response_404() data = [t.export_to_dict() for t in items] return Response( yaml.safe_dump(data), headers=generate_download_headers("yaml"), mimetype="application/text", )