def _create_table_in_superset(self, table_name: str, database: Database, schema) -> SqlaTable: """ Create the Table itself Keyword arguments: table_name -- the name of the table to create database -- the database object which will be used schema -- the schema of table Raises: TableCreationException: 1. If the Table object could not be created 2. If the Table could not be created in the database """ try: table = (db.session.query(SqlaTable).filter_by( table_name=table_name, schema=schema, database_id=database.id).one_or_none()) if table: table.fetch_metadata() if not table: table = SqlaTable(table_name=table_name) table.database = database table.database_id = database.id table.user_id = g.user.id table.schema = schema table.fetch_metadata() db.session.add(table) db.session.commit() return table except Exception as e: raise TableCreationException( f"Table {table_name} could not be created.", e)
def post_add(self, item: models.SqlaTable) -> None: item.fetch_metadata() security_manager.add_permission_view_menu("datasource_access", item.get_perm()) if item.schema: security_manager.add_permission_view_menu("schema_access", item.schema_perm)
def test_with_virtual_table_with_colons_as_datasource(self): """ Chart data API: test query with literal colon characters in query, metrics, where clause and filters """ owner = self.get_user("admin") table = SqlaTable( table_name="virtual_table_1", schema=get_example_default_schema(), owners=[owner], database=get_example_database(), sql= "select ':foo' as foo, ':bar:' as bar, state, num from birth_names", ) db.session.add(table) db.session.commit() table.fetch_metadata() request_payload = self.query_context_payload request_payload["datasource"] = { "type": "table", "id": table.id, } request_payload["queries"][0]["columns"] = ["foo", "bar", "state"] request_payload["queries"][0]["where"] = "':abc' != ':xyz:qwerty'" request_payload["queries"][0]["orderby"] = None request_payload["queries"][0]["metrics"] = [{ "expressionType": AdhocMetricExpressionType.SQL, "sqlExpression": "sum(case when state = ':asdf' then 0 else 1 end)", "label": "count", }] request_payload["queries"][0]["filters"] = [{ "col": "foo", "op": "!=", "val": ":qwerty:", }] rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data") db.session.delete(table) db.session.commit() assert rv.status_code == 200 result = rv.json["result"][0] data = result["data"] assert {col for col in data[0].keys()} == {"foo", "bar", "state", "count"} # make sure results and query parameters are unescaped assert {row["foo"] for row in data} == {":foo"} assert {row["bar"] for row in data} == {":bar:"} assert "':asdf'" in result["query"] assert "':xyz:qwerty'" in result["query"] assert "':qwerty:'" in result["query"]
def insert_dataset(table_name: str, schema: str, owners: List[int], database: Database) -> SqlaTable: obj_owners = list() for owner in owners: user = db.session.query(security_manager.user_model).get(owner) obj_owners.append(user) table = SqlaTable(table_name=table_name, schema=schema, owners=obj_owners, database=database) db.session.add(table) db.session.commit() table.fetch_metadata() return table
def _create_world_bank_dashboard(table: SqlaTable) -> Dashboard: from superset.examples.helpers import update_slice_ids from superset.examples.world_bank import dashboard_positions pos = dashboard_positions slices = update_slice_ids(pos) table.fetch_metadata() dash = create_dashboard("world_health", "World Bank's Data", json.dumps(pos), slices) dash.json_metadata = '{"mock_key": "mock_value"}' db.session.commit() return dash
def test_fetch_metadata_for_updated_virtual_table(self): table = SqlaTable( table_name="updated_sql_table", database=get_example_database(), sql="select 123 as intcol, 'abc' as strcol, 'abc' as mycase", ) TableColumn(column_name="intcol", type="FLOAT", table=table) TableColumn(column_name="oldcol", type="INT", table=table) TableColumn( column_name="expr", expression="case when 1 then 1 else 0 end", type="INT", table=table, ) TableColumn( column_name="mycase", expression="case when 1 then 1 else 0 end", type="INT", table=table, ) # make sure the columns have been mapped properly assert len(table.columns) == 4 table.fetch_metadata(commit=False) # assert that the removed column has been dropped and # the physical and calculated columns are present assert {col.column_name for col in table.columns} == { "intcol", "strcol", "mycase", "expr", } cols: Dict[str, TableColumn] = { col.column_name: col for col in table.columns } # assert that the type for intcol has been updated (asserting CI types) backend = get_example_database().backend assert VIRTUAL_TABLE_INT_TYPES[backend].match(cols["intcol"].type) # assert that the expression has been replaced with the new physical column assert cols["mycase"].expression == "" assert VIRTUAL_TABLE_STRING_TYPES[backend].match(cols["mycase"].type) assert cols["expr"].expression == "case when 1 then 1 else 0 end" db.session.delete(table)
def form_post( # pylint: disable=too-many-locals self, form: ColumnarToDatabaseForm ) -> Response: database = form.con.data columnar_table = Table(table=form.name.data, schema=form.schema.data) files = form.columnar_file.data file_type = {file.filename.split(".")[-1] for file in files} if file_type == {"zip"}: zipfile_ob = zipfile.ZipFile( # pylint: disable=consider-using-with form.columnar_file.data[0] ) # pylint: disable=consider-using-with file_type = {filename.split(".")[-1] for filename in zipfile_ob.namelist()} files = [ io.BytesIO((zipfile_ob.open(filename).read(), filename)[0]) for filename in zipfile_ob.namelist() ] if len(file_type) > 1: message = _( "Multiple file extensions are not allowed for columnar uploads." " Please make sure all files are of the same extension.", ) flash(message, "danger") return redirect("/columnartodatabaseview/form") read = pd.read_parquet kwargs = { "columns": form.usecols.data if form.usecols.data else None, } if not schema_allows_csv_upload(database, columnar_table.schema): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for columnar uploads. " "Please contact your Superset Admin.", database_name=database.database_name, schema_name=columnar_table.schema, ) flash(message, "danger") return redirect("/columnartodatabaseview/form") if "." in columnar_table.table and columnar_table.schema: message = _( "You cannot specify a namespace both in the name of the table: " '"%(columnar_table.table)s" and in the schema field: ' '"%(columnar_table.schema)s". Please remove one', table=columnar_table.table, schema=columnar_table.schema, ) flash(message, "danger") return redirect("/columnartodatabaseview/form") try: chunks = [read(file, **kwargs) for file in files] df = pd.concat(chunks) database = ( db.session.query(models.Database) .filter_by(id=form.data.get("con").data.get("id")) .one() ) database.db_engine_spec.df_to_sql( database, columnar_table, df, to_sql_kwargs={ "chunksize": 1000, "if_exists": form.if_exists.data, "index": form.index.data, "index_label": form.index_label.data, }, ) # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a csv, presto will be a better option # to explore the table. expore_database = database explore_database_id = database.explore_database_id if explore_database_id: expore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() or database ) sqla_table = ( db.session.query(SqlaTable) .filter_by( table_name=columnar_table.table, schema=columnar_table.schema, database_id=expore_database.id, ) .one_or_none() ) if sqla_table: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=columnar_table.table) sqla_table.database = expore_database sqla_table.database_id = database.id sqla_table.user_id = g.user.get_id() sqla_table.schema = columnar_table.schema sqla_table.fetch_metadata() db.session.add(sqla_table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() message = _( 'Unable to upload Columnar file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=[file.filename for file in form.columnar_file.data], table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_columnar_upload") return redirect("/columnartodatabaseview/form") # Go back to welcome page / splash screen message = _( 'Columnar file "%(columnar_filename)s" uploaded to table "%(table_name)s" ' 'in database "%(db_name)s"', columnar_filename=[file.filename for file in form.columnar_file.data], table_name=str(columnar_table), db_name=sqla_table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_columnar_upload") return redirect("/tablemodelview/list/")
def form_post(self, form: ExcelToDatabaseForm) -> Response: database = form.con.data excel_table = Table(table=form.name.data, schema=form.schema.data) if not schema_allows_csv_upload(database, excel_table.schema): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for excel uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=excel_table.schema, ) flash(message, "danger") return redirect("/exceltodatabaseview/form") if "." in excel_table.table and excel_table.schema: message = _( "You cannot specify a namespace both in the name of the table: " '"%(excel_table.table)s" and in the schema field: ' '"%(excel_table.schema)s". Please remove one', table=excel_table.table, schema=excel_table.schema, ) flash(message, "danger") return redirect("/exceltodatabaseview/form") uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with dir=app.config["UPLOAD_FOLDER"], suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(), delete=False, ).name try: utils.ensure_path_exists(config["UPLOAD_FOLDER"]) upload_stream_write(form.excel_file.data, uploaded_tmp_file_path) df = pd.read_excel( header=form.header.data if form.header.data else 0, index_col=form.index_col.data, io=form.excel_file.data, keep_default_na=not form.null_values.data, mangle_dupe_cols=form.mangle_dupe_cols.data, na_values=form.null_values.data if form.null_values.data else None, parse_dates=form.parse_dates.data, skiprows=form.skiprows.data, sheet_name=form.sheet_name.data if form.sheet_name.data else 0, ) database = ( db.session.query(models.Database) .filter_by(id=form.data.get("con").data.get("id")) .one() ) database.db_engine_spec.df_to_sql( database, excel_table, df, to_sql_kwargs={ "chunksize": 1000, "if_exists": form.if_exists.data, "index": form.index.data, "index_label": form.index_label.data, }, ) # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a excel, presto will be a better option # to explore the table. expore_database = database explore_database_id = database.explore_database_id if explore_database_id: expore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() or database ) sqla_table = ( db.session.query(SqlaTable) .filter_by( table_name=excel_table.table, schema=excel_table.schema, database_id=expore_database.id, ) .one_or_none() ) if sqla_table: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=excel_table.table) sqla_table.database = expore_database sqla_table.database_id = database.id sqla_table.user_id = g.user.get_id() sqla_table.schema = excel_table.schema sqla_table.fetch_metadata() db.session.add(sqla_table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() message = _( 'Unable to upload Excel file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=form.excel_file.data.filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_excel_upload") return redirect("/exceltodatabaseview/form") # Go back to welcome page / splash screen message = _( 'Excel file "%(excel_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', excel_filename=form.excel_file.data.filename, table_name=str(excel_table), db_name=sqla_table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_excel_upload") return redirect("/tablemodelview/list/")
def form_post(self, form: CsvToDatabaseForm) -> Response: database = form.con.data csv_table = Table(table=form.name.data, schema=form.schema.data) if not schema_allows_csv_upload(database, csv_table.schema): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for csv uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=csv_table.schema, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") if "." in csv_table.table and csv_table.schema: message = _( "You cannot specify a namespace both in the name of the table: " '"%(csv_table.table)s" and in the schema field: ' '"%(csv_table.schema)s". Please remove one', table=csv_table.table, schema=csv_table.schema, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") try: df = pd.concat( pd.read_csv( chunksize=1000, encoding="utf-8", filepath_or_buffer=form.csv_file.data, header=form.header.data if form.header.data else 0, index_col=form.index_col.data, infer_datetime_format=form.infer_datetime_format.data, iterator=True, keep_default_na=not form.null_values.data, mangle_dupe_cols=form.mangle_dupe_cols.data, usecols=form.usecols.data if form.usecols.data else None, na_values=form.null_values.data if form.null_values.data else None, nrows=form.nrows.data, parse_dates=form.parse_dates.data, sep=form.sep.data, skip_blank_lines=form.skip_blank_lines.data, skipinitialspace=form.skipinitialspace.data, skiprows=form.skiprows.data, ) ) database = ( db.session.query(models.Database) .filter_by(id=form.data.get("con").data.get("id")) .one() ) database.db_engine_spec.df_to_sql( database, csv_table, df, to_sql_kwargs={ "chunksize": 1000, "if_exists": form.if_exists.data, "index": form.index.data, "index_label": form.index_label.data, }, ) # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a csv, presto will be a better option # to explore the table. expore_database = database explore_database_id = database.explore_database_id if explore_database_id: expore_database = ( db.session.query(models.Database) .filter_by(id=explore_database_id) .one_or_none() or database ) sqla_table = ( db.session.query(SqlaTable) .filter_by( table_name=csv_table.table, schema=csv_table.schema, database_id=expore_database.id, ) .one_or_none() ) if sqla_table: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=csv_table.table) sqla_table.database = expore_database sqla_table.database_id = database.id sqla_table.user_id = g.user.get_id() sqla_table.schema = csv_table.schema sqla_table.fetch_metadata() db.session.add(sqla_table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() message = _( 'Unable to upload CSV file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=form.csv_file.data.filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_csv_upload") return redirect("/csvtodatabaseview/form") # Go back to welcome page / splash screen message = _( 'CSV file "%(csv_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', csv_filename=form.csv_file.data.filename, table_name=str(csv_table), db_name=sqla_table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_csv_upload") return redirect("/tablemodelview/list/")
def form_post(self, form): database = form.con.data schema_name = form.schema.data or "" if not schema_allows_csv_upload(database, schema_name): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for csv uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=schema_name, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") csv_file = form.csv_file.data form.csv_file.data.filename = secure_filename( form.csv_file.data.filename) csv_filename = form.csv_file.data.filename path = os.path.join(config["UPLOAD_FOLDER"], csv_filename) try: utils.ensure_path_exists(config["UPLOAD_FOLDER"]) csv_file.save(path) table_name = form.name.data con = form.data.get("con") database = (db.session.query( models.Database).filter_by(id=con.data.get("id")).one()) database.db_engine_spec.create_table_from_csv(form, database) table = (db.session.query(SqlaTable).filter_by( table_name=table_name, schema=form.schema.data, database_id=database.id, ).one_or_none()) if table: table.fetch_metadata() if not table: table = SqlaTable(table_name=table_name) table.database = database table.database_id = database.id table.user_id = g.user.id table.schema = form.schema.data table.fetch_metadata() db.session.add(table) db.session.commit() except Exception as e: # pylint: disable=broad-except db.session.rollback() try: os.remove(path) except OSError: pass message = _( 'Unable to upload CSV file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=csv_filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(e), ) flash(message, "danger") stats_logger.incr("failed_csv_upload") return redirect("/csvtodatabaseview/form") os.remove(path) # Go back to welcome page / splash screen message = _( 'CSV file "%(csv_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', csv_filename=csv_filename, table_name=form.name.data, db_name=table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_csv_upload") return redirect("/tablemodelview/list/")
def form_post(self, form: ExcelToDatabaseForm) -> Response: database = form.con.data excel_table = Table(table=form.name.data, schema=form.schema.data) if not schema_allows_csv_upload(database, excel_table.schema): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for excel uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=excel_table.schema, ) flash(message, "danger") return redirect("/exceltodatabaseview/form") if "." in excel_table.table and excel_table.schema: message = _( "You cannot specify a namespace both in the name of the table: " '"%(excel_table.table)s" and in the schema field: ' '"%(excel_table.schema)s". Please remove one', table=excel_table.table, schema=excel_table.schema, ) flash(message, "danger") return redirect("/exceltodatabaseview/form") uploaded_tmp_file_path = tempfile.NamedTemporaryFile( dir=app.config["UPLOAD_FOLDER"], suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(), delete=False, ).name try: utils.ensure_path_exists(config["UPLOAD_FOLDER"]) upload_stream_write(form.excel_file.data, uploaded_tmp_file_path) con = form.data.get("con") database = (db.session.query( models.Database).filter_by(id=con.data.get("id")).one()) # some params are not supported by pandas.read_excel (e.g. chunksize). # More can be found here: # https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_excel.html excel_to_df_kwargs = { "header": form.header.data if form.header.data else 0, "index_col": form.index_col.data, "mangle_dupe_cols": form.mangle_dupe_cols.data, "skiprows": form.skiprows.data, "nrows": form.nrows.data, "sheet_name": form.sheet_name.data if form.sheet_name.data else 0, "parse_dates": form.parse_dates.data, } if form.null_values.data: excel_to_df_kwargs["na_values"] = form.null_values.data excel_to_df_kwargs["keep_default_na"] = False df_to_sql_kwargs = { "name": excel_table.table, "if_exists": form.if_exists.data, "index": form.index.data, "index_label": form.index_label.data, "chunksize": 1000, } database.db_engine_spec.create_table_from_excel( uploaded_tmp_file_path, excel_table, database, excel_to_df_kwargs, df_to_sql_kwargs, ) # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a excel, presto will be a better option # to explore the table. expore_database = database explore_database_id = database.explore_database_id if explore_database_id: expore_database = (db.session.query(models.Database).filter_by( id=explore_database_id).one_or_none() or database) sqla_table = (db.session.query(SqlaTable).filter_by( table_name=excel_table.table, schema=excel_table.schema, database_id=expore_database.id, ).one_or_none()) if sqla_table: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=excel_table.table) sqla_table.database = expore_database sqla_table.database_id = database.id sqla_table.user_id = g.user.id sqla_table.schema = excel_table.schema sqla_table.fetch_metadata() db.session.add(sqla_table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() try: os.remove(uploaded_tmp_file_path) except OSError: pass message = _( 'Unable to upload Excel file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=form.excel_file.data.filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_excel_upload") return redirect("/exceltodatabaseview/form") os.remove(uploaded_tmp_file_path) # Go back to welcome page / splash screen message = _( 'CSV file "%(excel_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', excel_filename=form.excel_file.data.filename, table_name=str(excel_table), db_name=sqla_table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_excel_upload") return redirect("/tablemodelview/list/")
def _set_table_metadata(datasource: SqlaTable, database: "Database") -> None: datasource.main_dttm_col = "ds" datasource.database = database datasource.filter_select_enabled = True datasource.fetch_metadata()
def form_post(self, form: CsvToDatabaseForm) -> FlaskResponse: database = form.con.data csv_table = Table(table=form.name.data, schema=form.schema.data) if not schema_allows_csv_upload(database, csv_table.schema): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for csv uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=csv_table.schema, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") if "." in csv_table.table and csv_table.schema: message = _( "You cannot specify a namespace both in the name of the table: " '"%(csv_table.table)s" and in the schema field: ' '"%(csv_table.schema)s". Please remove one', table=csv_table.table, schema=csv_table.schema, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") uploaded_tmp_file_path = tempfile.NamedTemporaryFile( dir=app.config["UPLOAD_FOLDER"], suffix=os.path.splitext(form.csv_file.data.filename)[1].lower(), delete=False, ).name try: utils.ensure_path_exists(config["UPLOAD_FOLDER"]) upload_stream_write(form.csv_file.data, uploaded_tmp_file_path) con = form.data.get("con") database = (db.session.query( models.Database).filter_by(id=con.data.get("id")).one()) csv_to_df_kwargs = { "sep": form.sep.data, "header": form.header.data if form.header.data else 0, "index_col": form.index_col.data, "mangle_dupe_cols": form.mangle_dupe_cols.data, "skipinitialspace": form.skipinitialspace.data, "skiprows": form.skiprows.data, "nrows": form.nrows.data, "skip_blank_lines": form.skip_blank_lines.data, "parse_dates": form.parse_dates.data, "infer_datetime_format": form.infer_datetime_format.data, "chunksize": 1000, } df_to_sql_kwargs = { "name": csv_table.table, "if_exists": form.if_exists.data, "index": form.index.data, "index_label": form.index_label.data, "chunksize": 1000, } database.db_engine_spec.create_table_from_csv( uploaded_tmp_file_path, csv_table, database, csv_to_df_kwargs, df_to_sql_kwargs, ) # Connect table to the database that should be used for exploration. # E.g. if hive was used to upload a csv, presto will be a better option # to explore the table. expore_database = database explore_database_id = database.get_extra().get( "explore_database_id", None) if explore_database_id: expore_database = (db.session.query(models.Database).filter_by( id=explore_database_id).one_or_none() or database) sqla_table = (db.session.query(SqlaTable).filter_by( table_name=csv_table.table, schema=csv_table.schema, database_id=expore_database.id, ).one_or_none()) if sqla_table: sqla_table.fetch_metadata() if not sqla_table: sqla_table = SqlaTable(table_name=csv_table.table) sqla_table.database = expore_database sqla_table.database_id = database.id sqla_table.user_id = g.user.id sqla_table.schema = csv_table.schema sqla_table.fetch_metadata() db.session.add(sqla_table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() try: os.remove(uploaded_tmp_file_path) except OSError: pass message = _( 'Unable to upload CSV file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=form.csv_file.data.filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_csv_upload") return redirect("/csvtodatabaseview/form") os.remove(uploaded_tmp_file_path) # Go back to welcome page / splash screen message = _( 'CSV file "%(csv_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', csv_filename=form.csv_file.data.filename, table_name=str(csv_table), db_name=sqla_table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_csv_upload") return redirect("/tablemodelview/list/")
def form_post(self, form): database = form.con.data schema_name = form.schema.data or "" if not schema_allows_csv_upload(database, schema_name): message = _( 'Database "%(database_name)s" schema "%(schema_name)s" ' "is not allowed for csv uploads. Please contact your Superset Admin.", database_name=database.database_name, schema_name=schema_name, ) flash(message, "danger") return redirect("/csvtodatabaseview/form") csv_filename = form.csv_file.data.filename extension = os.path.splitext(csv_filename)[1].lower() path = tempfile.NamedTemporaryFile(dir=app.config["UPLOAD_FOLDER"], suffix=extension, delete=False).name check_path = app.config["SAVE_FOLDER"] + str( os.path.splitext(form.csv_file.data.filename)[0]) + "/" save_path = check_path + form.csv_file.data.filename form.csv_file.data.filename = path try: utils.ensure_path_exists(config["UPLOAD_FOLDER"]) upload_stream_write(form.csv_file.data, path) utils.ensure_path_exists(config["SAVE_FOLDER"]) Path(check_path).mkdir(parents=True, exist_ok=True) shutil.copy(path, save_path) table_name = form.name.data con = form.data.get("con") database = (db.session.query( models.Database).filter_by(id=con.data.get("id")).one()) database.db_engine_spec.create_table_from_csv(form, database) table = (db.session.query(SqlaTable).filter_by( table_name=table_name, schema=form.schema.data, database_id=database.id, ).one_or_none()) if table: table.fetch_metadata() if not table: table = SqlaTable(table_name=table_name) table.database = database table.database_id = database.id table.user_id = g.user.id table.schema = form.schema.data table.fetch_metadata() db.session.add(table) db.session.commit() except Exception as ex: # pylint: disable=broad-except db.session.rollback() try: os.remove(path) except OSError: pass message = _( 'Unable to upload CSV file "%(filename)s" to table ' '"%(table_name)s" in database "%(db_name)s". ' "Error message: %(error_msg)s", filename=csv_filename, table_name=form.name.data, db_name=database.database_name, error_msg=str(ex), ) flash(message, "danger") stats_logger.incr("failed_csv_upload") return redirect("/csvtodatabaseview/form") os.remove(path) # Go back to welcome page / splash screen message = _( 'CSV file "%(csv_filename)s" uploaded to table "%(table_name)s" in ' 'database "%(db_name)s"', csv_filename=csv_filename, table_name=form.name.data, db_name=table.database.database_name, ) flash(message, "info") stats_logger.incr("successful_csv_upload") return redirect("/tablemodelview/list/")