def create_alert_slack_chart_grace():
    with app.app_context():
        chart = db.session.query(Slice).first()
        report_schedule = create_report_notification(
            slack_channel="slack_channel",
            chart=chart,
            report_type=ReportScheduleType.ALERT,
        )
        report_schedule.last_state = ReportState.GRACE
        report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0)

        log = ReportExecutionLog(
            report_schedule=report_schedule,
            state=ReportState.SUCCESS,
            start_dttm=report_schedule.last_eval_dttm,
            end_dttm=report_schedule.last_eval_dttm,
            scheduled_dttm=report_schedule.last_eval_dttm,
        )
        db.session.add(log)
        db.session.commit()
        yield report_schedule

        cleanup_report_schedule(report_schedule)
def setup_csv_upload():
    with app.app_context():
        login(test_client, username="******")

        upload_db = utils.get_or_create_db(
            CSV_UPLOAD_DATABASE, app.config["SQLALCHEMY_EXAMPLES_URI"]
        )
        extra = upload_db.get_extra()
        extra["explore_database_id"] = utils.get_example_database().id
        upload_db.extra = json.dumps(extra)
        upload_db.allow_csv_upload = True
        db.session.commit()

        yield

        upload_db = get_upload_db()
        engine = upload_db.get_sqla_engine()
        engine.execute(f"DROP TABLE IF EXISTS {EXCEL_UPLOAD_TABLE}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_SCHEMA}")
        engine.execute(f"DROP TABLE IF EXISTS {CSV_UPLOAD_TABLE_W_EXPLORE}")
        db.session.delete(upload_db)
        db.session.commit()
def create_old_role(pvm_map: PvmMigrationMapType, external_pvms):
    with app.app_context():
        pvms = []
        for old_pvm, new_pvms in pvm_map.items():
            pvms.append(
                security_manager.add_permission_view_menu(
                    old_pvm.permission, old_pvm.view
                )
            )
        for external_pvm in external_pvms:
            pvms.append(
                security_manager.find_permission_view_menu(
                    external_pvm.permission, external_pvm.view
                )
            )

        new_role = Role(name="Dummy Role", permissions=pvms)
        db.session.add(new_role)
        db.session.commit()

        yield new_role

        new_role = (
            db.session.query(Role).filter(Role.name == "Dummy Role").one_or_none()
        )
        new_role.permissions = []
        db.session.merge(new_role)
        for old_pvm, new_pvms in pvm_map.items():
            security_manager.del_permission_view_menu(old_pvm.permission, old_pvm.view)
            for new_pvm in new_pvms:
                security_manager.del_permission_view_menu(
                    new_pvm.permission, new_pvm.view
                )

        db.session.delete(new_role)
        db.session.commit()
Exemple #4
0
def logged_in_admin():
    """Fixture with app context and logged in admin user."""
    with app.app_context():
        login(test_client, username="******")
        yield
        test_client.get("/logout/", follow_redirects=True)
Exemple #5
0
def create_no_alert_email_chart(request):
    param_config = {
        "alert1": {
            "sql": "SELECT 10 as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": "<", "threshold": 10}',
        },
        "alert2": {
            "sql": "SELECT 10 as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": ">=", "threshold": 11}',
        },
        "alert3": {
            "sql": "SELECT 10 as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": "<", "threshold": 10}',
        },
        "alert4": {
            "sql": "SELECT 10 as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": "<=", "threshold": 9}',
        },
        "alert5": {
            "sql": "SELECT 10 as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": "!=", "threshold": 10}',
        },
        "alert6": {
            "sql": "SELECT first from test_table where 1=0",
            "validator_type": ReportScheduleValidatorType.NOT_NULL,
            "validator_config_json": "{}",
        },
        "alert7": {
            "sql": "SELECT first from test_table where 1=0",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": ">", "threshold": 0}',
        },
        "alert8": {
            "sql": "SELECT Null as metric",
            "validator_type": ReportScheduleValidatorType.NOT_NULL,
            "validator_config_json": "{}",
        },
        "alert9": {
            "sql": "SELECT Null as metric",
            "validator_type": ReportScheduleValidatorType.OPERATOR,
            "validator_config_json": '{"op": ">", "threshold": 0}',
        },
    }
    with app.app_context():
        chart = db.session.query(Slice).first()
        example_database = get_example_database()
        with create_test_table_context(example_database):

            report_schedule = create_report_notification(
                email_target="*****@*****.**",
                chart=chart,
                report_type=ReportScheduleType.ALERT,
                database=example_database,
                sql=param_config[request.param]["sql"],
                validator_type=param_config[request.param]["validator_type"],
                validator_config_json=param_config[request.param][
                    "validator_config_json"
                ],
            )
            yield report_schedule

            cleanup_report_schedule(report_schedule)
 def setUpClass(cls):
     with app.app_context():
         db.session.query(Query).delete()
         db.session.commit()
Exemple #7
0
 def test_time_grain_denylist(self):
     with app.app_context():
         app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"]
         time_grain_functions = SqliteEngineSpec.get_time_grain_expressions(
         )
         self.assertNotIn("PT1M", time_grain_functions)
def get_table_by_name(name: str) -> SqlaTable:
    with app.app_context():
        return db.session.query(SqlaTable).filter_by(table_name=name).one()
Exemple #9
0
def app_context():
    with app.app_context():
        yield
Exemple #10
0
def teardown_module():
    with app.app_context():
        db.session.query(AlertLog).delete()
        db.session.query(Alert).delete()
Exemple #11
0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
from datetime import datetime
from unittest import mock

import pytest
import pandas as pd
from sqlalchemy.sql import select
from tests.test_app import app

with app.app_context():
    from superset.db_engine_specs.hive import HiveEngineSpec, upload_to_s3
from superset.exceptions import SupersetException
from superset.sql_parse import Table, ParsedQuery


def test_0_progress():
    log = """
        17/02/07 18:26:27 INFO log.PerfLogger: <PERFLOG method=compile from=org.apache.hadoop.hive.ql.Driver>
        17/02/07 18:26:27 INFO log.PerfLogger: <PERFLOG method=parse from=org.apache.hadoop.hive.ql.Driver>
    """.split(
        "\n"
    )
    assert HiveEngineSpec.progress(log) == 0

Exemple #12
0
def load_birth_names_dashboard_with_slices_module_scope():
    dash_id_to_delete, slices_ids_to_delete = _load_data()
    yield
    with app.app_context():
        _cleanup(dash_id_to_delete, slices_ids_to_delete)
Exemple #13
0
 def test_time_grain_blacklist(self):
     with app.app_context():
         app.config["TIME_GRAIN_BLACKLIST"] = ["PT1M"]
         time_grain_functions = SqliteEngineSpec.get_time_grain_functions()
         self.assertNotIn("PT1M", time_grain_functions)
Exemple #14
0
def load_world_bank_dashboard_with_slices():
    dash_id_to_delete, slices_ids_to_delete = _load_data()
    yield
    with app.app_context():
        _cleanup(dash_id_to_delete, slices_ids_to_delete)