Пример #1
0
def setup_sample_data() -> Any:
    # TODO(john-bodley): Determine a cleaner way of setting up the sample data without
    # relying on `tests.integration_tests.test_app.app` leveraging an  `app` fixture which is purposely
    # scoped to the function level to ensure tests remain idempotent.
    with app.app_context():
        setup_presto_if_needed()

        from superset.cli.test import load_test_users_run

        load_test_users_run()

        from superset.examples.css_templates import load_css_templates

        load_css_templates()

    yield

    with app.app_context():
        engine = get_example_database().get_sqla_engine()

        # drop sqlachemy tables

        db.session.commit()
        from sqlalchemy.ext import declarative

        sqla_base = declarative.declarative_base()
        # uses sorted_tables to drop in proper order without violating foreign constrains
        for table in sqla_base.metadata.sorted_tables:
            table.__table__.drop()
        db.session.commit()
def load_birth_names_data():
    with app.app_context():
        database = get_example_database()
        df = _get_dataframe(database)
        dtype = {
            "ds": DateTime if database.backend != "presto" else String(255),
            "gender": String(16),
            "state": String(10),
            "name": String(255),
        }

        df.to_sql(
            BIRTH_NAMES_TBL_NAME,
            database.get_sqla_engine(),
            if_exists="replace",
            chunksize=500,
            dtype=dtype,
            index=False,
            method="multi",
            schema=get_example_default_schema(),
        )
    yield
    with app.app_context():
        engine = get_example_database().get_sqla_engine()
        engine.execute("DROP TABLE IF EXISTS birth_names")
Пример #3
0
def load_unicode_dashboard_with_position(load_unicode_data):
    slice_name = "Unicode Cloud"
    position = "{}"
    with app.app_context():
        dash = _create_unicode_dashboard(slice_name, position)
        yield
        _cleanup(dash, slice_name)
Пример #4
0
def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g):
    config = app.config.copy()
    app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: ""
    mock_upload_to_s3.return_value = "mock-location"
    mock_g.user = True
    mock_database = mock.MagicMock()
    mock_database.get_df.return_value.empty = False
    mock_execute = mock.MagicMock(return_value=True)
    mock_database.get_sqla_engine.return_value.execute = mock_execute
    table_name = "foobar"
    schema = "schema"

    with app.app_context():
        HiveEngineSpec.df_to_sql(
            mock_database,
            Table(table=table_name, schema=schema),
            pd.DataFrame(),
            {
                "if_exists": "replace",
                "header": 1,
                "na_values": "mock",
                "sep": "mock"
            },
        )

    mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {schema}.{table_name}")
    app.config = config
Пример #5
0
def create_annotation_layers():
    """
    Creates ANNOTATION_LAYERS_COUNT-1 layers with no annotations
    and a final one with ANNOTATION_COUNT childs
    :return:
    """
    with app.app_context():
        annotation_layers = []
        annotations = []
        for cx in range(ANNOTATION_LAYERS_COUNT - 1):
            annotation_layers.append(
                _insert_annotation_layer(name=f"name{cx}", descr=f"descr{cx}")
            )
        layer_with_annotations = _insert_annotation_layer("layer_with_annotations")
        annotation_layers.append(layer_with_annotations)
        for cx in range(ANNOTATIONS_COUNT):
            annotations.append(
                _insert_annotation(
                    layer_with_annotations,
                    short_descr=f"short_descr{cx}",
                    long_descr=f"long_descr{cx}",
                    start_dttm=get_start_dttm(cx),
                    end_dttm=get_end_dttm(cx),
                )
            )
        yield annotation_layers

        # rollback changes
        for annotation_layer in annotation_layers:
            db.session.delete(annotation_layer)
        for annotation in annotations:
            db.session.delete(annotation)
        db.session.commit()
Пример #6
0
def _load_data():
    table_name = "birth_names"

    with app.app_context():
        database = get_example_database()
        df = _get_dataframe(database)
        dtype = {
            "ds": DateTime if database.backend != "presto" else String(255),
            "gender": String(16),
            "state": String(10),
            "name": String(255),
        }
        table = _create_table(
            df=df,
            table_name=table_name,
            database=database,
            dtype=dtype,
            fetch_values_predicate="123 = 123",
        )

        from superset.examples.birth_names import create_dashboard, create_slices

        slices, _ = create_slices(table, admin_owner=False)
        dash = create_dashboard(slices)
        slices_ids_to_delete = [slice.id for slice in slices]
        dash_id_to_delete = dash.id
        return dash_id_to_delete, slices_ids_to_delete
Пример #7
0
    def load_dashboard(self):
        with app.app_context():
            table = (db.session.query(SqlaTable).filter_by(
                table_name="energy_usage").one())
            # get a slice from the allowed table
            slice = db.session.query(Slice).filter_by(
                slice_name="Energy Sankey").one()

            self.grant_public_access_to_table(table)

            pytest.hidden_dash_slug = f"hidden_dash_{random()}"
            pytest.published_dash_slug = f"published_dash_{random()}"

            # Create a published and hidden dashboard and add them to the database
            published_dash = Dashboard()
            published_dash.dashboard_title = "Published Dashboard"
            published_dash.slug = pytest.published_dash_slug
            published_dash.slices = [slice]
            published_dash.published = True

            hidden_dash = Dashboard()
            hidden_dash.dashboard_title = "Hidden Dashboard"
            hidden_dash.slug = pytest.hidden_dash_slug
            hidden_dash.slices = [slice]
            hidden_dash.published = False

            db.session.merge(published_dash)
            db.session.merge(hidden_dash)
            yield db.session.commit()

            self.revoke_public_access_to_table(table)
            db.session.delete(published_dash)
            db.session.delete(hidden_dash)
            db.session.commit()
Пример #8
0
def create_old_role(pvm_map: PvmMigrationMapType, external_pvms):
    with app.app_context():
        pvms = []
        for old_pvm, new_pvms in pvm_map.items():
            pvms.append(
                security_manager.add_permission_view_menu(
                    old_pvm.permission, old_pvm.view))
        for external_pvm in external_pvms:
            pvms.append(
                security_manager.find_permission_view_menu(
                    external_pvm.permission, external_pvm.view))

        new_role = Role(name="Dummy Role", permissions=pvms)
        db.session.add(new_role)
        db.session.commit()

        yield new_role

        new_role = (db.session.query(Role).filter(
            Role.name == "Dummy Role").one_or_none())
        new_role.permissions = []
        db.session.merge(new_role)
        for old_pvm, new_pvms in pvm_map.items():
            security_manager.del_permission_view_menu(old_pvm.permission,
                                                      old_pvm.view)
            for new_pvm in new_pvms:
                security_manager.del_permission_view_menu(
                    new_pvm.permission, new_pvm.view)

        db.session.delete(new_role)
        db.session.commit()
def load_energy_table_with_slice():
    table_name = "energy_usage"
    df = _get_dataframe()
    with app.app_context():
        _create_energy_table(df, table_name)
        yield
        _cleanup()
Пример #10
0
    def load_energy_charts(self):
        with app.app_context():
            admin = self.get_user("admin")
            energy_table = (db.session.query(SqlaTable).filter_by(
                table_name="energy_usage").one_or_none())
            energy_table_id = 1
            if energy_table:
                energy_table_id = energy_table.id
            chart1 = self.insert_chart("foo_a", [admin.id],
                                       energy_table_id,
                                       description="ZY_bar")
            chart2 = self.insert_chart("zy_foo", [admin.id],
                                       energy_table_id,
                                       description="desc1")
            chart3 = self.insert_chart("foo_b", [admin.id],
                                       energy_table_id,
                                       description="desc1zy_")
            chart4 = self.insert_chart("foo_c", [admin.id],
                                       energy_table_id,
                                       viz_type="viz_zy_")
            chart5 = self.insert_chart("bar", [admin.id],
                                       energy_table_id,
                                       description="foo")

            yield
            # rollback changes
            db.session.delete(chart1)
            db.session.delete(chart2)
            db.session.delete(chart3)
            db.session.delete(chart4)
            db.session.delete(chart5)
            db.session.commit()
Пример #11
0
        def __call__(self) -> Database:
            with app.app_context():
                if self._db is None:
                    self._db = get_example_database()
                    self._load_lazy_data_to_decouple_from_session()

                return self._db
Пример #12
0
def setup_csv_upload():
    with app.app_context():
        login(test_client, username="******")

        upload_db = superset.utils.database.get_or_create_db(
            CSV_UPLOAD_DATABASE, app.config["SQLALCHEMY_EXAMPLES_URI"])
        extra = upload_db.get_extra()
        extra[
            "explore_database_id"] = superset.utils.database.get_example_database(
            ).id
        upload_db.extra = json.dumps(extra)
        upload_db.allow_file_upload = True
        db.session.commit()

        yield

        upload_db = get_upload_db()
        engine = upload_db.get_sqla_engine()
        engine.execute(f"DROP TABLE IF EXISTS {EXCEL_UPLOAD_TABLE}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE}")
        engine.execute(f"DROP TABLE IF EXISTS {PARQUET_UPLOAD_TABLE}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_SCHEMA}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_EXPLORE}")
        db.session.delete(upload_db)
        db.session.commit()
Пример #13
0
def dashboard() -> Generator[Dashboard, None, None]:
    dashboard: Dashboard
    slice_: Slice
    datasource: SqlaTable
    database: Database
    session: Session
    try:
        with app.app_context() as ctx:
            dashboard_owner_user = security_manager.find_user(
                DASHBOARD_OWNER_USERNAME)
            database = create_database("test_database_filter_sets")
            datasource = create_datasource_table(name="test_datasource",
                                                 database=database,
                                                 owners=[dashboard_owner_user])
            slice_ = create_slice(datasource=datasource,
                                  name="test_slice",
                                  owners=[dashboard_owner_user])
            dashboard = create_dashboard(
                dashboard_title="test_dashboard",
                published=True,
                slices=[slice_],
                owners=[dashboard_owner_user],
            )
            session = ctx.app.appbuilder.get_session
            session.add(dashboard)
            session.commit()
        yield dashboard
    except Exception as ex:
        print(str(ex))
    finally:
        with app.app_context() as ctx:
            session = ctx.app.appbuilder.get_session
            try:
                dashboard.owners = []
                slice_.owners = []
                datasource.owners = []
                session.merge(dashboard)
                session.merge(slice_)
                session.merge(datasource)
                session.commit()
                session.delete(dashboard)
                session.delete(slice_)
                session.delete(datasource)
                session.delete(database)
                session.commit()
            except Exception as ex:
                print(str(ex))
Пример #14
0
def test_where_latest_partition_no_columns_no_values(mock_method):
    mock_method.return_value = ("01-01-19", None)
    db = mock.Mock()
    with app.app_context():
        result = HiveEngineSpec.where_latest_partition("test_table",
                                                       "test_schema", db,
                                                       select())
    assert result is None
Пример #15
0
def load_unicode_data():
    with app.app_context():
        _get_dataframe().to_sql(
            UNICODE_TBL_NAME,
            get_example_database().get_sqla_engine(),
            if_exists="replace",
            chunksize=500,
            dtype={"phrase": String(500)},
            index=False,
            method="multi",
            schema=get_example_default_schema(),
        )

    yield
    with app.app_context():
        engine = get_example_database().get_sqla_engine()
        engine.execute("DROP TABLE IF EXISTS unicode_test")
Пример #16
0
def test_upload_to_s3_no_bucket_path():
    with app.app_context():
        with pytest.raises(
                Exception,
                match=
                "No upload bucket specified. You can specify one in the config file.",
        ):
            upload_to_s3("filename", "prefix", Table("table"))
def create_dashboard_for_loaded_data():
    with app.app_context():
        table = create_table_metadata(WB_HEALTH_POPULATION, get_example_database())
        slices = _create_world_bank_slices(table)
        dash = _create_world_bank_dashboard(table, slices)
        slices_ids_to_delete = [slice.id for slice in slices]
        dash_id_to_delete = dash.id
        return dash_id_to_delete, slices_ids_to_delete
Пример #18
0
 def setUpClass(cls):
     with app.app_context():
         security_manager.add_role("override_me")
         security_manager.add_role(TEST_ROLE_1)
         security_manager.add_role(TEST_ROLE_2)
         security_manager.add_role(DB_ACCESS_ROLE)
         security_manager.add_role(SCHEMA_ACCESS_ROLE)
         db.session.commit()
Пример #19
0
 def delete_imports(cls):
     with app.app_context():
         # Imported data clean up
         session = db.session
         for table in session.query(SqlaTable):
             if DBREF in table.params_dict:
                 session.delete(table)
         session.commit()
Пример #20
0
 def tearDownClass(cls):
     with app.app_context():
         override_me = security_manager.find_role("override_me")
         db.session.delete(override_me)
         db.session.delete(security_manager.find_role(TEST_ROLE_1))
         db.session.delete(security_manager.find_role(TEST_ROLE_2))
         db.session.delete(security_manager.find_role(DB_ACCESS_ROLE))
         db.session.delete(security_manager.find_role(SCHEMA_ACCESS_ROLE))
         db.session.commit()
Пример #21
0
def test_time_grain_denylist():
    config = app.config.copy()
    app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"]

    with app.app_context():
        time_grain_functions = SqliteEngineSpec.get_time_grain_expressions()
        assert not "PT1M" in time_grain_functions

    app.config = config
Пример #22
0
def public_role_like_test_role():
    with app.app_context():
        app.config["PUBLIC_ROLE_LIKE"] = "TestRole"
        security_manager.sync_role_definitions()

        yield

        security_manager.get_public_role().permissions = []
        db.session.commit()
Пример #23
0
def load_energy_table_data():
    with app.app_context():
        database = get_example_database()
        df = _get_dataframe()
        df.to_sql(
            ENERGY_USAGE_TBL_NAME,
            database.get_sqla_engine(),
            if_exists="replace",
            chunksize=500,
            index=False,
            dtype={"source": String(255), "target": String(255), "value": Float()},
            method="multi",
            schema=get_example_default_schema(),
        )
    yield
    with app.app_context():
        engine = get_example_database().get_sqla_engine()
        engine.execute("DROP TABLE IF EXISTS energy_usage")
def load_unicode_dashboard_with_position():
    table_name = "unicode_test"
    slice_name = "Unicode Cloud"
    df = _get_dataframe()
    position = "{}"
    with app.app_context():
        dash = _create_unicode_dashboard(df, table_name, slice_name, position)
        yield
        _cleanup(dash, slice_name)
Пример #25
0
    def add_schedule_slice_and_dashboard(self):
        with app.app_context():
            self.common_data = dict(
                active=True,
                crontab="* * * * *",
                recipients=self.RECIPIENTS,
                deliver_as_group=True,
                delivery_type=EmailDeliveryType.inline,
            )
            # Pick up a sample slice and dashboard
            slice = db.session.query(Slice).filter_by(slice_name="Region Filter").one()
            dashboard = (
                db.session.query(Dashboard)
                .filter_by(dashboard_title="World Bank's Data")
                .one()
            )

            dashboard_schedule = DashboardEmailSchedule(**self.common_data)
            dashboard_schedule.dashboard_id = dashboard.id
            dashboard_schedule.user_id = 1
            db.session.add(dashboard_schedule)

            slice_schedule = SliceEmailSchedule(**self.common_data)
            slice_schedule.slice_id = slice.id
            slice_schedule.user_id = 1
            slice_schedule.email_format = SliceEmailReportFormat.data
            slice_schedule.slack_channel = "#test_channel"

            db.session.add(slice_schedule)
            db.session.commit()

            self.slice_schedule = slice_schedule.id
            self.dashboard_schedule = dashboard_schedule.id

        yield

        with app.app_context():
            db.session.query(SliceEmailSchedule).filter_by(
                id=self.slice_schedule
            ).delete()
            db.session.query(DashboardEmailSchedule).filter_by(
                id=self.dashboard_schedule
            ).delete()
            db.session.commit()
Пример #26
0
def test_upload_to_s3_success(client):
    config = app.config.copy()
    app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"] = "bucket"
    client.return_value.upload_file.return_value = True

    with app.app_context():
        location = upload_to_s3("filename", "prefix", Table("table"))
        assert f"s3a://bucket/prefix/table" == location

    app.config = config
Пример #27
0
def test_cron_schedule_window(current_dttm: str, cron: str,
                              excepted: List[FakeDatetime]):
    """
    Reports scheduler: Test cron schedule window
    """
    with app.app_context():

        with freeze_time(current_dttm):
            datetimes = cron_schedule_window(cron)
            assert list(datetimes) == excepted
Пример #28
0
def test_where_latest_partition_super_method_exception(mock_method):
    mock_method.side_effect = Exception()
    db = mock.Mock()
    columns = [{"name": "ds"}, {"name": "hour"}]
    with app.app_context():
        result = HiveEngineSpec.where_latest_partition("test_table",
                                                       "test_schema", db,
                                                       select(), columns)
    assert result is None
    mock_method.assert_called()
Пример #29
0
def tabbed_dashboard():
    position_json = {
        "DASHBOARD_VERSION_KEY": "v2",
        "GRID_ID": {
            "children": ["TABS-IpViLohnyP"],
            "id": "GRID_ID",
            "parents": ["ROOT_ID"],
            "type": "GRID",
        },
        "HEADER_ID": {
            "id": "HEADER_ID",
            "meta": {
                "text": "tabbed dashboard"
            },
            "type": "HEADER",
        },
        "ROOT_ID": {
            "children": ["GRID_ID"],
            "id": "ROOT_ID",
            "type": "ROOT"
        },
        "TAB-j53G4gtKGF": {
            "children": [],
            "id": "TAB-j53G4gtKGF",
            "meta": {
                "defaultText": "Tab title",
                "placeholder": "Tab title",
                "text": "Tab 1",
            },
            "parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"],
            "type": "TAB",
        },
        "TAB-nerWR09Ju": {
            "children": [],
            "id": "TAB-nerWR09Ju",
            "meta": {
                "defaultText": "Tab title",
                "placeholder": "Tab title",
                "text": "Tab 2",
            },
            "parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"],
            "type": "TAB",
        },
        "TABS-IpViLohnyP": {
            "children": ["TAB-j53G4gtKGF", "TAB-nerWR09Ju"],
            "id": "TABS-IpViLohnyP",
            "meta": {},
            "parents": ["ROOT_ID", "GRID_ID"],
            "type": "TABS",
        },
    }
    with app.app_context():
        dash = create_dashboard("tabbed-dash-test", "Tabbed Dash Test",
                                json.dumps(position_json), [])
    yield dash
Пример #30
0
def test_where_latest_partition(mock_method):
    mock_method.return_value = ("01-01-19", 1)
    db = mock.Mock()
    db.get_indexes = mock.Mock(return_value=[{"column_names": ["ds", "hour"]}])
    db.get_extra = mock.Mock(return_value={})
    db.get_df = mock.Mock()
    columns = [{"name": "ds"}, {"name": "hour"}]
    with app.app_context():
        result = HiveEngineSpec.where_latest_partition("test_table",
                                                       "test_schema", db,
                                                       select(), columns)
    query_result = str(result.compile(compile_kwargs={"literal_binds": True}))
    assert "SELECT  \nWHERE ds = '01-01-19' AND hour = 1" == query_result