def import_(self) -> Response: """Import dataset(s) with associated databases --- post: requestBody: required: true content: multipart/form-data: schema: type: object properties: formData: type: string format: binary passwords: type: string overwrite: type: bool responses: 200: description: Dataset import result content: application/json: schema: type: object properties: message: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ upload = request.files.get("formData") if not upload: return self.response_400() with ZipFile(upload) as bundle: contents = get_contents_from_bundle(bundle) passwords = (json.loads(request.form["passwords"]) if "passwords" in request.form else None) overwrite = request.form.get("overwrite") == "true" command = ImportDatasetsCommand(contents, passwords=passwords, overwrite=overwrite) try: command.run() return self.response(200, message="OK") except CommandInvalidError as exc: logger.warning("Import dataset failed") return self.response_422(message=exc.normalized_messages()) except DatasetImportError as exc: logger.exception("Import dataset failed") return self.response_500(message=str(exc))
def import_(self) -> Response: """Import dataset(s) with associated databases --- post: requestBody: content: application/zip: schema: type: string format: binary responses: 200: description: Dataset import result content: application/json: schema: type: object properties: message: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ upload = request.files.get("formData") if not upload: return self.response_400() with ZipFile(upload) as bundle: contents = { remove_root(file_name): bundle.read(file_name).decode() for file_name in bundle.namelist() } passwords = (json.loads(request.form["passwords"]) if "passwords" in request.form else None) command = ImportDatasetsCommand(contents, passwords=passwords) try: command.run() return self.response(200, message="OK") except CommandInvalidError as exc: logger.warning("Import dataset failed") return self.response_422(message=exc.normalized_messages()) except DatasetImportError as exc: logger.exception("Import dataset failed") return self.response_500(message=str(exc))
def import_datasources(path: str, sync: str, recursive: bool) -> None: """Import datasources from YAML""" # pylint: disable=import-outside-toplevel from superset.datasets.commands.importers.v0 import ImportDatasetsCommand sync_array = sync.split(",") sync_columns = "columns" in sync_array sync_metrics = "metrics" in sync_array path_object = Path(path) files: List[Path] = [] if path_object.is_file(): files.append(path_object) elif path_object.exists() and not recursive: files.extend(path_object.glob("*.yaml")) files.extend(path_object.glob("*.yml")) elif path_object.exists() and recursive: files.extend(path_object.rglob("*.yaml")) files.extend(path_object.rglob("*.yml")) contents = {} for path_ in files: with open(path_) as file: contents[path_.name] = file.read() try: ImportDatasetsCommand(contents, sync_columns, sync_metrics).run() except Exception: # pylint: disable=broad-except logger.exception("Error when importing dataset") sys.exit(1)
def import_datasources(path: str) -> None: """Import datasources from ZIP file""" from superset.datasets.commands.importers.dispatcher import ( ImportDatasetsCommand, ) contents = {path: open(path).read()} try: ImportDatasetsCommand(contents).run() except Exception: # pylint: disable=broad-except logger.exception( "There was an error when importing the dataset(s), please check the " "exception traceback in the log")
def import_datasources(path: str) -> None: """Import datasources from ZIP file""" from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.datasets.commands.importers.dispatcher import ( ImportDatasetsCommand, ) if is_zipfile(path): with ZipFile(path) as bundle: contents = get_contents_from_bundle(bundle) else: contents = {path: open(path).read()} try: ImportDatasetsCommand(contents, overwrite=True).run() except Exception: # pylint: disable=broad-except logger.exception( "There was an error when importing the dataset(s), please check the " "exception traceback in the log")
def import_(self) -> Response: """Import dataset(s) with associated databases --- post: requestBody: required: true content: multipart/form-data: schema: type: object properties: formData: description: upload file (ZIP or YAML) type: string format: binary passwords: description: JSON map of passwords for each file type: string overwrite: description: overwrite existing datasets? type: boolean responses: 200: description: Dataset import result content: application/json: schema: type: object properties: message: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ upload = request.files.get("formData") if not upload: return self.response_400() if is_zipfile(upload): with ZipFile(upload) as bundle: contents = get_contents_from_bundle(bundle) else: upload.seek(0) contents = {upload.filename: upload.read()} if not contents: raise NoValidFilesFoundError() passwords = (json.loads(request.form["passwords"]) if "passwords" in request.form else None) overwrite = request.form.get("overwrite") == "true" command = ImportDatasetsCommand(contents, passwords=passwords, overwrite=overwrite) command.run() return self.response(200, message="OK")