def test_import_csv_explore_database(self): if utils.get_example_database().backend == "sqlite": # sqlite doesn't support schema / database creation return explore_db_id = utils.get_example_database().id upload_db = utils.get_or_create_db( "csv_explore_db", app.config["SQLALCHEMY_DATABASE_URI"] ) upload_db_id = upload_db.id extra = upload_db.get_extra() extra["explore_database_id"] = explore_db_id upload_db.extra = json.dumps(extra) db.session.commit() self.login(username="******") self.enable_csv_upload(DatasetDAO.get_database_by_id(upload_db_id)) table_name = "".join(random.choice(string.ascii_uppercase) for _ in range(5)) f = "testCSV.csv" self.create_sample_csvfile(f, ["a,b", "john,1", "paul,2"]) # initial upload with fail mode resp = self.upload_csv(f, table_name) self.assertIn(f'CSV file "{f}" uploaded to table "{table_name}"', resp) table = self.get_table_by_name(table_name) self.assertEqual(table.database_id, explore_db_id) # cleanup db.session.delete(table) db.session.delete(DatasetDAO.get_database_by_id(upload_db_id)) db.session.commit() os.remove(f)
def test_explore_database_id(self): database = utils.get_example_database() explore_database = utils.get_example_database() # test that explore_database_id is the regular database # id if none is set in the extra self.assertEqual(database.explore_database_id, database.id) # test that explore_database_id is correct if the extra is set extra = database.get_extra() extra["explore_database_id"] = explore_database.id database.extra = json.dumps(extra) self.assertEqual(database.explore_database_id, explore_database.id)
def test_import_csv_enforced_schema(self): if utils.get_example_database().backend == "sqlite": # sqlite doesn't support schema / database creation return self.login(username="******") table_name = "".join( random.choice(string.ascii_lowercase) for _ in range(5)) full_table_name = f"admin_database.{table_name}" filename = "testCSV.csv" self.create_sample_csvfile(filename, ["a,b", "john,1", "paul,2"]) try: self.enable_csv_upload(utils.get_example_database()) # no schema specified, fail upload resp = self.upload_csv(filename, table_name) self.assertIn( 'Database "examples" schema "None" is not allowed for csv uploads', resp) # user specified schema matches the expected schema, append success_msg = f'CSV file "{filename}" uploaded to table "{full_table_name}"' resp = self.upload_csv( filename, table_name, extra={ "schema": "admin_database", "if_exists": "append" }, ) self.assertIn(success_msg, resp) resp = self.upload_csv( filename, table_name, extra={ "schema": "admin_database", "if_exists": "replace" }, ) self.assertIn(success_msg, resp) # user specified schema doesn't match, fail resp = self.upload_csv(filename, table_name, extra={"schema": "gold"}) self.assertIn( 'Database "examples" schema "gold" is not allowed for csv uploads', resp, ) finally: os.remove(filename)
def test_extra_table_metadata(self): self.login("admin") example_db = utils.get_example_database() schema = "default" if example_db.backend == "presto" else "superset" self.get_json_resp( f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/" )
def test_get_superset_tables_substr(self): example_db = utils.get_example_database() if example_db.backend in {"presto", "hive"}: # TODO: change table to the real table that is in examples. return self.login(username="******") schema_name = self.default_schema_backend_map[example_db.backend] uri = f"superset/tables/{example_db.id}/{schema_name}/ab_role/" rv = self.client.get(uri) response = json.loads(rv.data.decode("utf-8")) self.assertEqual(rv.status_code, 200) expected_response = { "options": [{ "label": "ab_role", "schema": schema_name, "title": "ab_role", "type": "table", "value": "ab_role", "extra": None, }], "tableLength": 1, } self.assertEqual(response, expected_response)
def test_testconn(self, username="******"): # need to temporarily allow sqlite dbs, teardown will undo this app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False self.login(username=username) database = utils.get_example_database() # validate that the endpoint works with the password-masked sqlalchemy uri data = json.dumps({ "uri": database.safe_sqlalchemy_uri(), "name": "examples", "impersonate_user": False, }) response = self.client.post("/superset/testconn", data=data, content_type="application/json") assert response.status_code == 200 assert response.headers["Content-Type"] == "application/json" # validate that the endpoint works with the decrypted sqlalchemy uri data = json.dumps({ "uri": database.sqlalchemy_uri_decrypted, "name": "examples", "impersonate_user": False, }) response = self.client.post("/superset/testconn", data=data, content_type="application/json") assert response.status_code == 200 assert response.headers["Content-Type"] == "application/json"
def test_get_superset_tables_not_allowed(self): example_db = utils.get_example_database() schema_name = self.default_schema_backend_map[example_db.backend] self.login(username="******") uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 404)
def test_templated_sql_json(self): if utils.get_example_database().backend == "presto": # TODO: make it work for presto return self.login() sql = "SELECT '{{ 1+1 }}' as test" data = self.run_sql(sql, "fdaklj3ws") self.assertEqual(data["data"][0]["test"], "2")
def test_get_select_star_not_allowed(self): """ Database API: Test get select star not allowed """ self.login(username="******") example_db = utils.get_example_database() resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names") self.assertEqual(resp.status_code, 404)
def test_templated_sql_json(self): if utils.get_example_database().backend == "presto": # TODO: make it work for presto return self.login() sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}' as test" data = self.run_sql(sql, "fdaklj3ws") self.assertEqual(data["data"][0]["test"], "2017-01-01T00:00:00")
def test_custom_template_processors_ignored(self) -> None: """Test custom template processor is ignored for a difference backend database.""" maindb = utils.get_example_database() sql = "SELECT '$DATE()'" tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(sql) assert sql == rendered
def test_process_template(self): maindb = utils.get_example_database() if maindb.backend == "presto": # TODO: make it work for presto return sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(sql) self.assertEqual("SELECT '2017-01-01T00:00:00'", rendered)
def test_custom_template_processors_ignored(self) -> None: """Test custom template processor is ignored for a difference backend database.""" maindb = utils.get_example_database() sql = ("SELECT '$DATE()'" if maindb.backend != "presto" else f"SELECT '{datetime.date.today().isoformat()}'") tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(sql) assert sql == rendered
def test_template_kwarg(self): maindb = utils.get_example_database() if maindb.backend == "presto": # TODO: make it work for presto return s = "{{ foo }}" tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(s, foo="bar") self.assertEqual("bar", rendered)
def test_databaseview_edit(self, username="******"): # validate that sending a password-masked uri does not over-write the decrypted # uri self.login(username=username) database = utils.get_example_database() sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted url = "databaseview/edit/{}".format(database.id) data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns} data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri() self.client.post(url, data=data) database = utils.get_example_database() self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted) # Need to clean up after ourselves database.impersonate_user = False database.allow_dml = False database.allow_run_async = False db.session.commit()
def test_import_csv(self): self.login(username="******") table_name = "".join( random.choice(string.ascii_uppercase) for _ in range(5)) f1 = "testCSV.csv" self.create_sample_csvfile(f1, ["a,b", "john,1", "paul,2"]) f2 = "testCSV2.csv" self.create_sample_csvfile(f2, ["b,c,d", "john,1,x", "paul,2,y"]) self.enable_csv_upload(utils.get_example_database()) try: success_msg_f1 = f'CSV file "{f1}" uploaded to table "{table_name}"' # initial upload with fail mode resp = self.upload_csv(f1, table_name) self.assertIn(success_msg_f1, resp) # upload again with fail mode; should fail fail_msg = f'Unable to upload CSV file "{f1}" to table "{table_name}"' resp = self.upload_csv(f1, table_name) self.assertIn(fail_msg, resp) # upload again with append mode resp = self.upload_csv(f1, table_name, extra={"if_exists": "append"}) self.assertIn(success_msg_f1, resp) # upload again with replace mode resp = self.upload_csv(f1, table_name, extra={"if_exists": "replace"}) self.assertIn(success_msg_f1, resp) # try to append to table from file with different schema resp = self.upload_csv(f2, table_name, extra={"if_exists": "append"}) fail_msg_f2 = f'Unable to upload CSV file "{f2}" to table "{table_name}"' self.assertIn(fail_msg_f2, resp) # replace table from file with different schema resp = self.upload_csv(f2, table_name, extra={"if_exists": "replace"}) success_msg_f2 = f'CSV file "{f2}" uploaded to table "{table_name}"' self.assertIn(success_msg_f2, resp) table = self.get_table_by_name(table_name) # make sure the new column name is reflected in the table metadata self.assertIn("d", table.column_names) finally: os.remove(f1) os.remove(f2)
def test_custom_password_store(self): database = utils.get_example_database() conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) def custom_password_store(uri): return "password_store_test" models.custom_password_store = custom_password_store conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) if conn_pre.password: assert conn.password == "password_store_test" assert conn.password != conn_pre.password # Disable for password store for later tests models.custom_password_store = None
def upload_csv( self, filename: str, table_name: str, extra: Optional[Dict[str, str]] = None ): form_data = { "csv_file": open(filename, "rb"), "sep": ",", "name": table_name, "con": utils.get_example_database().id, "if_exists": "fail", "index_label": "test_label", "mangle_dupe_cols": False, } if extra: form_data.update(extra) return self.get_resp("/csvtodatabaseview/form", data=form_data)
def test_virtual_table_explore_visibility(self): # test that default visibility it set to True database = utils.get_example_database() self.assertEqual(database.allows_virtual_table_explore, True) # test that visibility is disabled when extra is set to False extra = database.get_extra() extra["allows_virtual_table_explore"] = False database.extra = json.dumps(extra) self.assertEqual(database.allows_virtual_table_explore, False) # test that visibility is enabled when extra is set to True extra = database.get_extra() extra["allows_virtual_table_explore"] = True database.extra = json.dumps(extra) self.assertEqual(database.allows_virtual_table_explore, True) # test that visibility is not broken with bad values extra = database.get_extra() extra["allows_virtual_table_explore"] = "trash value" database.extra = json.dumps(extra) self.assertEqual(database.allows_virtual_table_explore, True)
def test_select_star(self): self.login(username="******") examples_db = utils.get_example_database() resp = self.get_resp( f"/superset/select_star/{examples_db.id}/birth_names") self.assertIn("gender", resp)
def test_process_template(self): maindb = utils.get_example_database() sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(sql) self.assertEqual("SELECT '2017-01-01T00:00:00'", rendered)
def test_template_kwarg(self): maindb = utils.get_example_database() s = "{{ foo }}" tp = jinja_context.get_template_processor(database=maindb) rendered = tp.process_template(s, foo="bar") self.assertEqual("bar", rendered)
def test_extra_table_metadata(self): self.login("admin") dbid = utils.get_example_database().id self.get_json_resp( f"/superset/extra_table_metadata/{dbid}/birth_names/superset/" )