Exemple #1
0
 def __init__(self) -> None:
     if is_feature_enabled("THUMBNAILS"):
         self.include_route_methods = self.include_route_methods | {
             "thumbnail"
         }
     super().__init__()
    def get_sqla_query(  # sqla
        self,
        metrics: List[Metric],
        granularity: str,
        from_dttm: Optional[datetime],
        to_dttm: Optional[datetime],
        columns: Optional[List[str]] = None,
        groupby: Optional[List[str]] = None,
        filter: Optional[List[Dict[str, Any]]] = None,
        is_timeseries: bool = True,
        timeseries_limit: int = 15,
        timeseries_limit_metric: Optional[Metric] = None,
        row_limit: Optional[int] = None,
        row_offset: Optional[int] = None,
        inner_from_dttm: Optional[datetime] = None,
        inner_to_dttm: Optional[datetime] = None,
        orderby: Optional[List[Tuple[ColumnElement, bool]]] = None,
        extras: Optional[Dict[str, Any]] = None,
        order_desc: bool = True,
    ) -> SqlaQuery:
        """Querying any sqla table from this common interface"""
        template_kwargs = {
            "from_dttm": from_dttm,
            "groupby": groupby,
            "metrics": metrics,
            "row_limit": row_limit,
            "row_offset": row_offset,
            "to_dttm": to_dttm,
            "filter": filter,
            "columns": {col.column_name: col
                        for col in self.columns},
        }
        is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
        template_kwargs.update(self.template_params_dict)
        extra_cache_keys: List[Any] = []
        template_kwargs["extra_cache_keys"] = extra_cache_keys
        template_processor = self.get_template_processor(**template_kwargs)
        db_engine_spec = self.database.db_engine_spec
        prequeries: List[str] = []

        orderby = orderby or []

        # For backward compatibility
        if granularity not in self.dttm_cols:
            granularity = self.main_dttm_col

        # Database spec supports join-free timeslot grouping
        time_groupby_inline = db_engine_spec.time_groupby_inline

        cols: Dict[str,
                   Column] = {col.column_name: col
                              for col in self.columns}
        metrics_dict: Dict[str, SqlMetric] = {
            m.metric_name: m
            for m in self.metrics
        }

        if not granularity and is_timeseries:
            raise Exception(
                _("Datetime column not provided as part table configuration "
                  "and is required by this type of chart"))
        if (not metrics and not columns
                and (is_sip_38 or (not is_sip_38 and not groupby))):
            raise Exception(_("Empty query?"))
        metrics_exprs: List[ColumnElement] = []
        for m in metrics:
            if utils.is_adhoc_metric(m):
                assert isinstance(m, dict)
                metrics_exprs.append(self.adhoc_metric_to_sqla(m, cols))
            elif isinstance(m, str) and m in metrics_dict:
                metrics_exprs.append(metrics_dict[m].get_sqla_col())
            else:
                raise Exception(
                    _("Metric '%(metric)s' does not exist", metric=m))
        if metrics_exprs:
            main_metric_expr = metrics_exprs[0]
        else:
            main_metric_expr, label = literal_column("COUNT(*)"), "ccount"
            main_metric_expr = self.make_sqla_column_compatible(
                main_metric_expr, label)

        select_exprs: List[Column] = []
        groupby_exprs_sans_timestamp = OrderedDict()

        if (is_sip_38 and metrics and columns) or (not is_sip_38 and groupby):
            # dedup columns while preserving order
            columns_ = columns if is_sip_38 else groupby
            assert columns_
            groupby = list(dict.fromkeys(columns_))

            select_exprs = []
            for s in groupby:
                if s in cols:
                    outer = cols[s].get_sqla_col()
                else:
                    outer = literal_column(f"({s})")
                    outer = self.make_sqla_column_compatible(outer, s)

                groupby_exprs_sans_timestamp[outer.name] = outer
                select_exprs.append(outer)
        elif columns:
            for s in columns:
                select_exprs.append(
                    cols[s].get_sqla_col() if s in cols else self.
                    make_sqla_column_compatible(literal_column(s)))
            metrics_exprs = []

        assert extras is not None
        time_range_endpoints = extras.get("time_range_endpoints")
        groupby_exprs_with_timestamp = OrderedDict(
            groupby_exprs_sans_timestamp.items())
        if granularity:
            dttm_col = cols[granularity]
            time_grain = extras.get("time_grain_sqla")
            time_filters = []

            if is_timeseries:
                timestamp = dttm_col.get_timestamp_expression(time_grain)
                select_exprs += [timestamp]
                groupby_exprs_with_timestamp[timestamp.name] = timestamp

            # Use main dttm column to support index with secondary dttm columns.
            if (db_engine_spec.time_secondary_columns
                    and self.main_dttm_col in self.dttm_cols
                    and self.main_dttm_col != dttm_col.column_name):
                time_filters.append(cols[self.main_dttm_col].get_time_filter(
                    from_dttm, to_dttm, time_range_endpoints))
            time_filters.append(
                dttm_col.get_time_filter(from_dttm, to_dttm,
                                         time_range_endpoints))

        select_exprs += metrics_exprs

        labels_expected = [c._df_label_expected for c in select_exprs]

        select_exprs = db_engine_spec.make_select_compatible(
            groupby_exprs_with_timestamp.values(), select_exprs)
        qry = sa.select(select_exprs)

        tbl = self.get_from_clause(template_processor)

        if (is_sip_38 and metrics) or (not is_sip_38 and not columns):
            qry = qry.group_by(*groupby_exprs_with_timestamp.values())

        where_clause_and = []
        having_clause_and = []

        for flt in filter:  # type: ignore
            if not all([flt.get(s) for s in ["col", "op"]]):
                continue
            col = flt["col"]
            op = flt["op"].upper()
            col_obj = cols.get(col)
            if col_obj:
                is_list_target = op in (
                    utils.FilterOperator.IN.value,
                    utils.FilterOperator.NOT_IN.value,
                )
                eq = self.filter_values_handler(
                    values=flt.get("val"),
                    target_column_is_numeric=col_obj.is_numeric,
                    is_list_target=is_list_target,
                )
                if op in (
                        utils.FilterOperator.IN.value,
                        utils.FilterOperator.NOT_IN.value,
                ):
                    cond = col_obj.get_sqla_col().in_(eq)
                    if isinstance(eq, str) and NULL_STRING in eq:
                        cond = or_(cond, col_obj.get_sqla_col() is None)
                    if op == utils.FilterOperator.NOT_IN.value:
                        cond = ~cond
                    where_clause_and.append(cond)
                else:
                    if col_obj.is_numeric:
                        eq = utils.cast_to_num(flt["val"])
                    if op == utils.FilterOperator.EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() == eq)
                    elif op == utils.FilterOperator.NOT_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() != eq)
                    elif op == utils.FilterOperator.GREATER_THAN.value:
                        where_clause_and.append(col_obj.get_sqla_col() > eq)
                    elif op == utils.FilterOperator.LESS_THAN.value:
                        where_clause_and.append(col_obj.get_sqla_col() < eq)
                    elif op == utils.FilterOperator.GREATER_THAN_OR_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() >= eq)
                    elif op == utils.FilterOperator.LESS_THAN_OR_EQUALS.value:
                        where_clause_and.append(col_obj.get_sqla_col() <= eq)
                    elif op == utils.FilterOperator.LIKE.value:
                        where_clause_and.append(
                            col_obj.get_sqla_col().like(eq))
                    elif op == utils.FilterOperator.IS_NULL.value:
                        where_clause_and.append(col_obj.get_sqla_col() == None)
                    elif op == utils.FilterOperator.IS_NOT_NULL.value:
                        where_clause_and.append(col_obj.get_sqla_col() != None)
                    else:
                        raise Exception(
                            _("Invalid filter operation type: %(op)s", op=op))
        if config["ENABLE_ROW_LEVEL_SECURITY"]:
            where_clause_and += self._get_sqla_row_level_filters(
                template_processor)
        if extras:
            where = extras.get("where")
            if where:
                where = template_processor.process_template(where)
                where_clause_and += [sa.text("({})".format(where))]
            having = extras.get("having")
            if having:
                having = template_processor.process_template(having)
                having_clause_and += [sa.text("({})".format(having))]
        if granularity:
            qry = qry.where(and_(*(time_filters + where_clause_and)))
        else:
            qry = qry.where(and_(*where_clause_and))
        qry = qry.having(and_(*having_clause_and))

        if not orderby and ((is_sip_38 and metrics) or
                            (not is_sip_38 and not columns)):
            orderby = [(main_metric_expr, not order_desc)]

        # To ensure correct handling of the ORDER BY labeling we need to reference the
        # metric instance if defined in the SELECT clause.
        metrics_exprs_by_label = {m._label: m for m in metrics_exprs}

        for col, ascending in orderby:
            direction = asc if ascending else desc
            if utils.is_adhoc_metric(col):
                col = self.adhoc_metric_to_sqla(col, cols)
            elif col in cols:
                col = cols[col].get_sqla_col()

            if isinstance(col, Label) and col._label in metrics_exprs_by_label:
                col = metrics_exprs_by_label[col._label]

            qry = qry.order_by(direction(col))

        if row_limit:
            qry = qry.limit(row_limit)
        if row_offset:
            qry = qry.offset(row_offset)

        if (is_timeseries and timeseries_limit and not time_groupby_inline
                and ((is_sip_38 and columns) or (not is_sip_38 and groupby))):
            if self.database.db_engine_spec.allows_joins:
                # some sql dialects require for order by expressions
                # to also be in the select clause -- others, e.g. vertica,
                # require a unique inner alias
                inner_main_metric_expr = self.make_sqla_column_compatible(
                    main_metric_expr, "mme_inner__")
                inner_groupby_exprs = []
                inner_select_exprs = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    inner = self.make_sqla_column_compatible(
                        gby_obj, gby_name + "__")
                    inner_groupby_exprs.append(inner)
                    inner_select_exprs.append(inner)

                inner_select_exprs += [inner_main_metric_expr]
                subq = select(inner_select_exprs).select_from(tbl)
                inner_time_filter = dttm_col.get_time_filter(
                    inner_from_dttm or from_dttm,
                    inner_to_dttm or to_dttm,
                    time_range_endpoints,
                )
                subq = subq.where(
                    and_(*(where_clause_and + [inner_time_filter])))
                subq = subq.group_by(*inner_groupby_exprs)

                ob = inner_main_metric_expr
                if timeseries_limit_metric:
                    ob = self._get_timeseries_orderby(timeseries_limit_metric,
                                                      metrics_dict, cols)
                direction = desc if order_desc else asc
                subq = subq.order_by(direction(ob))
                subq = subq.limit(timeseries_limit)

                on_clause = []
                for gby_name, gby_obj in groupby_exprs_sans_timestamp.items():
                    # in this case the column name, not the alias, needs to be
                    # conditionally mutated, as it refers to the column alias in
                    # the inner query
                    col_name = db_engine_spec.make_label_compatible(gby_name +
                                                                    "__")
                    on_clause.append(gby_obj == column(col_name))

                tbl = tbl.join(subq.alias(), and_(*on_clause))
            else:
                if timeseries_limit_metric:
                    orderby = [(
                        self._get_timeseries_orderby(timeseries_limit_metric,
                                                     metrics_dict, cols),
                        False,
                    )]

                # run prequery to get top groups
                prequery_obj = {
                    "is_timeseries": False,
                    "row_limit": timeseries_limit,
                    "metrics": metrics,
                    "granularity": granularity,
                    "from_dttm": inner_from_dttm or from_dttm,
                    "to_dttm": inner_to_dttm or to_dttm,
                    "filter": filter,
                    "orderby": orderby,
                    "extras": extras,
                    "columns": columns,
                    "order_desc": True,
                }
                if not is_sip_38:
                    prequery_obj["groupby"] = groupby

                result = self.query(prequery_obj)
                prequeries.append(result.query)
                dimensions = [
                    c for c in result.df.columns
                    if c not in metrics and c in groupby_exprs_sans_timestamp
                ]
                top_groups = self._get_top_groups(
                    result.df, dimensions, groupby_exprs_sans_timestamp)
                qry = qry.where(top_groups)
        return SqlaQuery(
            extra_cache_keys=extra_cache_keys,
            labels_expected=labels_expected,
            sqla_query=qry.select_from(tbl),
            prequeries=prequeries,
        )
Exemple #3
0
 def test_nonexistent_feature_flags(self):
     self.assertFalse(is_feature_enabled('FOO'))
Exemple #4
0
    def export(self, **kwargs: Any) -> Response:
        """Export datasets
        ---
        get:
          description: >-
            Exports multiple datasets and downloads them as YAML files
          parameters:
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/get_export_ids_schema'
          responses:
            200:
              description: Dataset export
              content:
                text/plain:
                  schema:
                    type: string
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            500:
              $ref: '#/components/responses/500'
        """
        requested_ids = kwargs["rison"]

        if is_feature_enabled("VERSIONED_EXPORT"):
            timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
            root = f"dataset_export_{timestamp}"
            filename = f"{root}.zip"

            buf = BytesIO()
            with ZipFile(buf, "w") as bundle:
                try:
                    for file_name, file_content in ExportDatasetsCommand(
                            requested_ids).run():
                        with bundle.open(f"{root}/{file_name}", "w") as fp:
                            fp.write(file_content.encode())
                except DatasetNotFoundError:
                    return self.response_404()
            buf.seek(0)

            return send_file(
                buf,
                mimetype="application/zip",
                as_attachment=True,
                attachment_filename=filename,
            )

        query = self.datamodel.session.query(SqlaTable).filter(
            SqlaTable.id.in_(requested_ids))
        query = self._base_filters.apply_all(query)
        items = query.all()
        ids = [item.id for item in items]
        if len(ids) != len(requested_ids):
            return self.response_404()

        data = [t.export_to_dict() for t in items]
        return Response(
            yaml.safe_dump(data),
            headers=generate_download_headers("yaml"),
            mimetype="application/text",
        )
Exemple #5
0
class CoreTests(SupersetTestCase):
    def __init__(self, *args, **kwargs):
        super(CoreTests, self).__init__(*args, **kwargs)

    def setUp(self):
        db.session.query(Query).delete()
        db.session.query(DatasourceAccessRequest).delete()
        db.session.query(models.Log).delete()
        self.table_ids = {
            tbl.table_name: tbl.id
            for tbl in (db.session.query(SqlaTable).all())
        }
        self.original_unsafe_db_setting = app.config[
            "PREVENT_UNSAFE_DB_CONNECTIONS"]

    def tearDown(self):
        db.session.query(Query).delete()
        app.config[
            "PREVENT_UNSAFE_DB_CONNECTIONS"] = self.original_unsafe_db_setting

    def test_login(self):
        resp = self.get_resp("/login/",
                             data=dict(username="******", password="******"))
        self.assertNotIn("User confirmation needed", resp)

        resp = self.get_resp("/logout/", follow_redirects=True)
        self.assertIn("User confirmation needed", resp)

        resp = self.get_resp("/login/",
                             data=dict(username="******",
                                       password="******"))
        self.assertIn("User confirmation needed", resp)

    def test_dashboard_endpoint(self):
        resp = self.client.get("/superset/dashboard/-1/")
        assert resp.status_code == 404

    def test_slice_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        resp = self.get_resp("/superset/slice/{}/".format(slc.id))
        assert "Time Column" in resp
        assert "List Roles" in resp

        # Testing overrides
        resp = self.get_resp("/superset/slice/{}/?standalone=true".format(
            slc.id))
        assert '<div class="navbar' not in resp

        resp = self.client.get("/superset/slice/-1/")
        assert resp.status_code == 404

    def _get_query_context_dict(self) -> Dict[str, Any]:
        self.login(username="******")
        slc = self.get_slice("Girl Name Cloud", db.session)
        return {
            "datasource": {
                "id": slc.datasource_id,
                "type": slc.datasource_type
            },
            "queries": [{
                "granularity": "ds",
                "groupby": ["name"],
                "metrics": [{
                    "label": "sum__num"
                }],
                "filters": [],
                "row_limit": 100,
            }],
        }

    def test_viz_cache_key(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)

        viz = slc.viz
        qobj = viz.query_obj()
        cache_key = viz.cache_key(qobj)
        self.assertEqual(cache_key, viz.cache_key(qobj))

        qobj["groupby"] = []
        self.assertNotEqual(cache_key, viz.cache_key(qobj))

    def test_cache_key_changes_when_datasource_is_updated(self):
        qc_dict = self._get_query_context_dict()

        # construct baseline cache_key
        query_context = QueryContext(**qc_dict)
        query_object = query_context.queries[0]
        cache_key_original = query_context.cache_key(query_object)

        # make temporary change and revert it to refresh the changed_on property
        datasource = ConnectorRegistry.get_datasource(
            datasource_type=qc_dict["datasource"]["type"],
            datasource_id=qc_dict["datasource"]["id"],
            session=db.session,
        )
        description_original = datasource.description
        datasource.description = "temporary description"
        db.session.commit()
        datasource.description = description_original
        db.session.commit()

        # create new QueryContext with unchanged attributes and extract new cache_key
        query_context = QueryContext(**qc_dict)
        query_object = query_context.queries[0]
        cache_key_new = query_context.cache_key(query_object)

        # the new cache_key should be different due to updated datasource
        self.assertNotEqual(cache_key_original, cache_key_new)

    def test_query_context_time_range_endpoints(self):
        query_context = QueryContext(**self._get_query_context_dict())
        query_object = query_context.queries[0]
        extras = query_object.to_dict()["extras"]
        self.assertTrue("time_range_endpoints" in extras)

        self.assertEquals(
            extras["time_range_endpoints"],
            (utils.TimeRangeEndpoint.INCLUSIVE,
             utils.TimeRangeEndpoint.EXCLUSIVE),
        )

    def test_get_superset_tables_not_allowed(self):
        example_db = utils.get_example_database()
        schema_name = self.default_schema_backend_map[example_db.backend]
        self.login(username="******")
        uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    def test_get_superset_tables_substr(self):
        example_db = utils.get_example_database()
        self.login(username="******")
        schema_name = self.default_schema_backend_map[example_db.backend]
        uri = f"superset/tables/{example_db.id}/{schema_name}/ab_role/"
        rv = self.client.get(uri)
        response = json.loads(rv.data.decode("utf-8"))
        self.assertEqual(rv.status_code, 200)

        expeted_response = {
            "options": [{
                "label": "ab_role",
                "schema": schema_name,
                "title": "ab_role",
                "type": "table",
                "value": "ab_role",
            }],
            "tableLength":
            1,
        }
        self.assertEqual(response, expeted_response)

    def test_get_superset_tables_not_found(self):
        self.login(username="******")
        uri = f"superset/tables/invalid/public/undefined/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    def test_api_v1_query_endpoint(self):
        self.login(username="******")
        qc_dict = self._get_query_context_dict()
        data = json.dumps(qc_dict)
        resp = json.loads(
            self.get_resp("/api/v1/query/", {"query_context": data}))
        self.assertEqual(resp[0]["rowcount"], 100)

    def test_old_slice_json_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)

        json_endpoint = "/superset/explore_json/{}/{}/".format(
            slc.datasource_type, slc.datasource_id)
        resp = self.get_resp(json_endpoint,
                             {"form_data": json.dumps(slc.viz.form_data)})
        assert '"Jennifer"' in resp

    def test_slice_json_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        resp = self.get_resp(slc.explore_json_url)
        assert '"Jennifer"' in resp

    def test_old_slice_csv_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)

        csv_endpoint = "/superset/explore_json/{}/{}/?csv=true".format(
            slc.datasource_type, slc.datasource_id)
        resp = self.get_resp(csv_endpoint,
                             {"form_data": json.dumps(slc.viz.form_data)})
        assert "Jennifer," in resp

    def test_slice_csv_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)

        csv_endpoint = "/superset/explore_json/?csv=true"
        resp = self.get_resp(csv_endpoint,
                             {"form_data": json.dumps({"slice_id": slc.id})})
        assert "Jennifer," in resp

    def test_admin_only_permissions(self):
        def assert_admin_permission_in(role_name, assert_func):
            role = security_manager.find_role(role_name)
            permissions = [p.permission.name for p in role.permissions]
            assert_func("can_sync_druid_source", permissions)
            assert_func("can_approve", permissions)

        assert_admin_permission_in("Admin", self.assertIn)
        assert_admin_permission_in("Alpha", self.assertNotIn)
        assert_admin_permission_in("Gamma", self.assertNotIn)

    def test_admin_only_menu_views(self):
        def assert_admin_view_menus_in(role_name, assert_func):
            role = security_manager.find_role(role_name)
            view_menus = [p.view_menu.name for p in role.permissions]
            assert_func("ResetPasswordView", view_menus)
            assert_func("RoleModelView", view_menus)
            assert_func("Security", view_menus)
            assert_func("SQL Lab", view_menus)

        assert_admin_view_menus_in("Admin", self.assertIn)
        assert_admin_view_menus_in("Alpha", self.assertNotIn)
        assert_admin_view_menus_in("Gamma", self.assertNotIn)

    def test_save_slice(self):
        self.login(username="******")
        slice_name = f"Energy Sankey"
        slice_id = self.get_slice(slice_name, db.session).id
        copy_name = f"Test Sankey Save_{random.random()}"
        tbl_id = self.table_ids.get("energy_usage")
        new_slice_name = f"Test Sankey Overwrite_{random.random()}"

        url = ("/superset/explore/table/{}/?slice_name={}&"
               "action={}&datasource_name=energy_usage")

        form_data = {
            "viz_type": "sankey",
            "groupby": "target",
            "metric": "sum__value",
            "row_limit": 5000,
            "slice_id": slice_id,
            "time_range_endpoints": ["inclusive", "exclusive"],
        }
        # Changing name and save as a new slice
        resp = self.client.post(
            url.format(tbl_id, copy_name, "saveas"),
            data={"form_data": json.dumps(form_data)},
        )
        db.session.expunge_all()
        new_slice_id = resp.json["form_data"]["slice_id"]
        slc = db.session.query(Slice).filter_by(id=new_slice_id).one()

        self.assertEqual(slc.slice_name, copy_name)
        form_data.pop("slice_id")  # We don't save the slice id when saving as
        self.assertEqual(slc.viz.form_data, form_data)

        form_data = {
            "viz_type": "sankey",
            "groupby": "source",
            "metric": "sum__value",
            "row_limit": 5000,
            "slice_id": new_slice_id,
            "time_range": "now",
            "time_range_endpoints": ["inclusive", "exclusive"],
        }
        # Setting the name back to its original name by overwriting new slice
        self.client.post(
            url.format(tbl_id, new_slice_name, "overwrite"),
            data={"form_data": json.dumps(form_data)},
        )
        db.session.expunge_all()
        slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
        self.assertEqual(slc.slice_name, new_slice_name)
        self.assertEqual(slc.viz.form_data, form_data)

        # Cleanup
        db.session.delete(slc)
        db.session.commit()

    def test_filter_endpoint(self):
        self.login(username="******")
        slice_name = "Energy Sankey"
        slice_id = self.get_slice(slice_name, db.session).id
        db.session.commit()
        tbl_id = self.table_ids.get("energy_usage")
        table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id)
        table.filter_select_enabled = True
        url = (
            "/superset/filter/table/{}/target/?viz_type=sankey&groupby=source"
            "&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&"
            "slice_id={}&datasource_name=energy_usage&"
            "datasource_id=1&datasource_type=table")

        # Changing name
        resp = self.get_resp(url.format(tbl_id, slice_id))
        assert len(resp) > 0
        assert "Carbon Dioxide" in resp

    def test_slice_data(self):
        # slice data should have some required attributes
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        slc_data_attributes = slc.data.keys()
        assert "changed_on" in slc_data_attributes
        assert "modified" in slc_data_attributes

    def test_slices(self):
        # Testing by hitting the two supported end points for all slices
        self.login(username="******")
        Slc = Slice
        urls = []
        for slc in db.session.query(Slc).all():
            urls += [
                (slc.slice_name, "explore", slc.slice_url),
                (slc.slice_name, "explore_json", slc.explore_json_url),
            ]
        for name, method, url in urls:
            logger.info(f"[{name}]/[{method}]: {url}")
            print(f"[{name}]/[{method}]: {url}")
            resp = self.client.get(url)
            self.assertEqual(resp.status_code, 200)

    def test_tablemodelview_list(self):
        self.login(username="******")

        url = "/tablemodelview/list/"
        resp = self.get_resp(url)

        # assert that a table is listed
        table = db.session.query(SqlaTable).first()
        assert table.name in resp
        assert "/superset/explore/table/{}".format(table.id) in resp

    def test_add_slice(self):
        self.login(username="******")
        # assert that /chart/add responds with 200
        url = "/chart/add"
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)

    def test_get_user_slices(self):
        self.login(username="******")
        userid = security_manager.find_user("admin").id
        url = f"/sliceasync/api/read?_flt_0_created_by={userid}"
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)

    def test_slices_V2(self):
        # Add explore-v2-beta role to admin user
        # Test all slice urls as user with with explore-v2-beta role
        security_manager.add_role("explore-v2-beta")

        security_manager.add_user(
            "explore_beta",
            "explore_beta",
            " user",
            "*****@*****.**",
            security_manager.find_role("explore-v2-beta"),
            password="******",
        )
        self.login(username="******", password="******")

        Slc = Slice
        urls = []
        for slc in db.session.query(Slc).all():
            urls += [(slc.slice_name, "slice_url", slc.slice_url)]
        for name, method, url in urls:
            print(f"[{name}]/[{method}]: {url}")
            self.client.get(url)

    def test_doctests(self):
        modules = [utils, models, sql_lab]
        for mod in modules:
            failed, tests = doctest.testmod(mod)
            if failed:
                raise Exception("Failed a doctest")

    def test_misc(self):
        assert self.get_resp("/health") == "OK"
        assert self.get_resp("/healthcheck") == "OK"
        assert self.get_resp("/ping") == "OK"

    def test_testconn(self, username="******"):
        # need to temporarily allow sqlite dbs, teardown will undo this
        app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
        self.login(username=username)
        database = utils.get_example_database()
        # validate that the endpoint works with the password-masked sqlalchemy uri
        data = json.dumps({
            "uri": database.safe_sqlalchemy_uri(),
            "name": "examples",
            "impersonate_user": False,
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 200
        assert response.headers["Content-Type"] == "application/json"

        # validate that the endpoint works with the decrypted sqlalchemy uri
        data = json.dumps({
            "uri": database.sqlalchemy_uri_decrypted,
            "name": "examples",
            "impersonate_user": False,
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 200
        assert response.headers["Content-Type"] == "application/json"

    def test_testconn_failed_conn(self, username="******"):
        self.login(username=username)

        data = json.dumps({
            "uri": "broken://url",
            "name": "examples",
            "impersonate_user": False
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 400
        assert response.headers["Content-Type"] == "application/json"
        response_body = json.loads(response.data.decode("utf-8"))
        expected_body = {"error": "Could not load database driver: broken"}
        assert response_body == expected_body, "%s != %s" % (
            response_body,
            expected_body,
        )

    def test_testconn_unsafe_uri(self, username="******"):
        self.login(username=username)
        app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True

        response = self.client.post(
            "/superset/testconn",
            data=json.dumps({
                "uri": "sqlite:///home/superset/unsafe.db",
                "name": "unsafe",
                "impersonate_user": False,
            }),
            content_type="application/json",
        )
        self.assertEqual(400, response.status_code)
        response_body = json.loads(response.data.decode("utf-8"))
        expected_body = {
            "error":
            "SQLite database cannot be used as a data source for security reasons."
        }
        self.assertEqual(expected_body, response_body)

    def test_custom_password_store(self):
        database = utils.get_example_database()
        conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)

        def custom_password_store(uri):
            return "password_store_test"

        models.custom_password_store = custom_password_store
        conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
        if conn_pre.password:
            assert conn.password == "password_store_test"
            assert conn.password != conn_pre.password
        # Disable for password store for later tests
        models.custom_password_store = None

    def test_databaseview_edit(self, username="******"):
        # validate that sending a password-masked uri does not over-write the decrypted
        # uri
        self.login(username=username)
        database = utils.get_example_database()
        sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
        url = "databaseview/edit/{}".format(database.id)
        data = {
            k: database.__getattribute__(k)
            for k in DatabaseView.add_columns
        }
        data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri()
        self.client.post(url, data=data)
        database = utils.get_example_database()
        self.assertEqual(sqlalchemy_uri_decrypted,
                         database.sqlalchemy_uri_decrypted)

        # Need to clean up after ourselves
        database.impersonate_user = False
        database.allow_dml = False
        database.allow_run_async = False
        db.session.commit()

    def test_warm_up_cache(self):
        slc = self.get_slice("Girls", db.session)
        data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(
            slc.id))
        self.assertEqual(data, [{
            "slice_id": slc.id,
            "slice_name": slc.slice_name
        }])

        data = self.get_json_resp(
            "/superset/warm_up_cache?table_name=energy_usage&db_name=main")
        assert len(data) > 0

    def test_shortner(self):
        self.login(username="******")
        data = (
            "//superset/explore/table/1/?viz_type=sankey&groupby=source&"
            "groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
            "flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name="
            "Energy+Sankey&collapsed_fieldsets=&action=&datasource_name="
            "energy_usage&datasource_id=1&datasource_type=table&"
            "previous_viz_type=sankey")
        resp = self.client.post("/r/shortner/", data=dict(data=data))
        assert re.search(r"\/r\/[0-9]+", resp.data.decode("utf-8"))

    @skipUnless((is_feature_enabled("KV_STORE")),
                "skipping as /kv/ endpoints are not enabled")
    def test_kv(self):
        self.login(username="******")

        resp = self.client.get("/kv/10001/")
        self.assertEqual(404, resp.status_code)

        value = json.dumps({"data": "this is a test"})
        resp = self.client.post("/kv/store/", data=dict(data=value))
        self.assertEqual(resp.status_code, 200)
        kv = db.session.query(models.KeyValue).first()
        kv_value = kv.value
        self.assertEqual(json.loads(value), json.loads(kv_value))

        resp = self.client.get("/kv/{}/".format(kv.id))
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(json.loads(value),
                         json.loads(resp.data.decode("utf-8")))

    def test_gamma(self):
        self.login(username="******")
        assert "Charts" in self.get_resp("/chart/list/")
        assert "Dashboards" in self.get_resp("/dashboard/list/")

    def test_csv_endpoint(self):
        self.login("admin")
        sql = """
            SELECT name
            FROM birth_names
            WHERE name = 'James'
            LIMIT 1
        """
        client_id = "{}".format(random.getrandbits(64))[:10]
        self.run_sql(sql, client_id, raise_on_error=True)

        resp = self.get_resp("/superset/csv/{}".format(client_id))
        data = csv.reader(io.StringIO(resp))
        expected_data = csv.reader(io.StringIO("name\nJames\n"))

        client_id = "{}".format(random.getrandbits(64))[:10]
        self.run_sql(sql, client_id, raise_on_error=True)

        resp = self.get_resp("/superset/csv/{}".format(client_id))
        data = csv.reader(io.StringIO(resp))
        expected_data = csv.reader(io.StringIO("name\nJames\n"))

        self.assertEqual(list(expected_data), list(data))
        self.logout()

    def test_extra_table_metadata(self):
        self.login("admin")
        dbid = utils.get_example_database().id
        self.get_json_resp(
            f"/superset/extra_table_metadata/{dbid}/birth_names/superset/")

    def test_process_template(self):
        maindb = utils.get_example_database()
        sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
        tp = jinja_context.get_template_processor(database=maindb)
        rendered = tp.process_template(sql)
        self.assertEqual("SELECT '2017-01-01T00:00:00'", rendered)

    def test_get_template_kwarg(self):
        maindb = utils.get_example_database()
        s = "{{ foo }}"
        tp = jinja_context.get_template_processor(database=maindb, foo="bar")
        rendered = tp.process_template(s)
        self.assertEqual("bar", rendered)

    def test_template_kwarg(self):
        maindb = utils.get_example_database()
        s = "{{ foo }}"
        tp = jinja_context.get_template_processor(database=maindb)
        rendered = tp.process_template(s, foo="bar")
        self.assertEqual("bar", rendered)

    def test_templated_sql_json(self):
        self.login("admin")
        sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}' as test"
        data = self.run_sql(sql, "fdaklj3ws")
        self.assertEqual(data["data"][0]["test"], "2017-01-01T00:00:00")

    def test_fetch_datasource_metadata(self):
        self.login(username="******")
        url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table"
        resp = self.get_json_resp(url)
        keys = [
            "name",
            "type",
            "order_by_choices",
            "granularity_sqla",
            "time_grain_sqla",
            "id",
        ]
        for k in keys:
            self.assertIn(k, resp.keys())

    def test_user_profile(self, username="******"):
        self.login(username=username)
        slc = self.get_slice("Girls", db.session)

        # Setting some faves
        url = "/superset/favstar/Slice/{}/select/".format(slc.id)
        resp = self.get_json_resp(url)
        self.assertEqual(resp["count"], 1)

        dash = db.session.query(Dashboard).filter_by(slug="births").first()
        url = "/superset/favstar/Dashboard/{}/select/".format(dash.id)
        resp = self.get_json_resp(url)
        self.assertEqual(resp["count"], 1)

        userid = security_manager.find_user("admin").id
        resp = self.get_resp("/superset/profile/admin/")
        self.assertIn('"app"', resp)
        data = self.get_json_resp(
            "/superset/recent_activity/{}/".format(userid))
        self.assertNotIn("message", data)
        data = self.get_json_resp(
            "/superset/created_slices/{}/".format(userid))
        self.assertNotIn("message", data)
        data = self.get_json_resp(
            "/superset/created_dashboards/{}/".format(userid))
        self.assertNotIn("message", data)
        data = self.get_json_resp("/superset/fave_slices/{}/".format(userid))
        self.assertNotIn("message", data)
        data = self.get_json_resp(
            "/superset/fave_dashboards/{}/".format(userid))
        self.assertNotIn("message", data)
        data = self.get_json_resp(
            "/superset/fave_dashboards_by_username/{}/".format(username))
        self.assertNotIn("message", data)

    def test_slice_id_is_always_logged_correctly_on_web_request(self):
        # superset/explore case
        slc = db.session.query(Slice).filter_by(slice_name="Girls").one()
        qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
        self.get_resp(slc.slice_url, {"form_data": json.dumps(slc.form_data)})
        self.assertEqual(1, qry.count())

    def test_slice_id_is_always_logged_correctly_on_ajax_request(self):
        # superset/explore_json case
        self.login(username="******")
        slc = db.session.query(Slice).filter_by(slice_name="Girls").one()
        qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
        slc_url = slc.slice_url.replace("explore", "explore_json")
        self.get_json_resp(slc_url, {"form_data": json.dumps(slc.form_data)})
        self.assertEqual(1, qry.count())

    def test_slice_query_endpoint(self):
        # API endpoint for query string
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        resp = self.get_resp("/superset/slice_query/{}/".format(slc.id))
        assert "query" in resp
        assert "language" in resp
        self.logout()

    def test_import_csv(self):
        self.login(username="******")
        table_name = "".join(
            random.choice(string.ascii_uppercase) for _ in range(5))

        filename_1 = "testCSV.csv"
        test_file_1 = open(filename_1, "w+")
        test_file_1.write("a,b\n")
        test_file_1.write("john,1\n")
        test_file_1.write("paul,2\n")
        test_file_1.close()

        filename_2 = "testCSV2.csv"
        test_file_2 = open(filename_2, "w+")
        test_file_2.write("b,c,d\n")
        test_file_2.write("john,1,x\n")
        test_file_2.write("paul,2,y\n")
        test_file_2.close()

        example_db = utils.get_example_database()
        example_db.allow_csv_upload = True
        db_id = example_db.id
        db.session.commit()
        form_data = {
            "csv_file": open(filename_1, "rb"),
            "sep": ",",
            "name": table_name,
            "con": db_id,
            "if_exists": "fail",
            "index_label": "test_label",
            "mangle_dupe_cols": False,
        }
        url = "/databaseview/list/"
        add_datasource_page = self.get_resp(url)
        self.assertIn("Upload a CSV", add_datasource_page)

        url = "/csvtodatabaseview/form"
        form_get = self.get_resp(url)
        self.assertIn("CSV to Database configuration", form_get)

        try:
            # initial upload with fail mode
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'CSV file "{filename_1}" uploaded to table "{table_name}"',
                resp)

            # upload again with fail mode; should fail
            form_data["csv_file"] = open(filename_1, "rb")
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'Unable to upload CSV file "{filename_1}" to table "{table_name}"',
                resp,
            )

            # upload again with append mode
            form_data["csv_file"] = open(filename_1, "rb")
            form_data["if_exists"] = "append"
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'CSV file "{filename_1}" uploaded to table "{table_name}"',
                resp)

            # upload again with replace mode
            form_data["csv_file"] = open(filename_1, "rb")
            form_data["if_exists"] = "replace"
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'CSV file "{filename_1}" uploaded to table "{table_name}"',
                resp)

            # try to append to table from file with different schema
            form_data["csv_file"] = open(filename_2, "rb")
            form_data["if_exists"] = "append"
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'Unable to upload CSV file "{filename_2}" to table "{table_name}"',
                resp,
            )

            # replace table from file with different schema
            form_data["csv_file"] = open(filename_2, "rb")
            form_data["if_exists"] = "replace"
            resp = self.get_resp(url, data=form_data)
            self.assertIn(
                f'CSV file "{filename_2}" uploaded to table "{table_name}"',
                resp)
            table = (db.session.query(SqlaTable).filter_by(
                table_name=table_name, database_id=db_id).first())
            # make sure the new column name is reflected in the table metadata
            self.assertIn("d", table.column_names)
        finally:
            os.remove(filename_1)
            os.remove(filename_2)

    def test_dataframe_timezone(self):
        tz = pytz.FixedOffset(60)
        data = [
            (datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz), ),
            (datetime.datetime(2017, 11, 18, 22, 6, 30, tzinfo=tz), ),
        ]
        results = SupersetResultSet(list(data), [["data"]], BaseEngineSpec)
        df = results.to_pandas_df()
        data = dataframe.df_to_records(df)
        json_str = json.dumps(data,
                              default=utils.pessimistic_json_iso_dttm_ser)
        self.assertDictEqual(
            data[0],
            {"data": pd.Timestamp("2017-11-18 21:53:00.219225+0100", tz=tz)})
        self.assertDictEqual(
            data[1], {"data": pd.Timestamp("2017-11-18 22:06:30+0100", tz=tz)})
        self.assertEqual(
            json_str,
            '[{"data": "2017-11-18T21:53:00.219225+01:00"}, {"data": "2017-11-18T22:06:30+01:00"}]',
        )

    def test_mssql_engine_spec_pymssql(self):
        # Test for case when tuple is returned (pymssql)
        data = [
            (1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)),
            (2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)),
        ]
        results = SupersetResultSet(list(data), [["col1"], ["col2"], ["col3"]],
                                    MssqlEngineSpec)
        df = results.to_pandas_df()
        data = dataframe.df_to_records(df)
        self.assertEqual(len(data), 2)
        self.assertEqual(
            data[0],
            {
                "col1": 1,
                "col2": 1,
                "col3": pd.Timestamp("2017-10-19 23:39:16.660000")
            },
        )

    def test_comments_in_sqlatable_query(self):
        clean_query = "SELECT '/* val 1 */' as c1, '-- val 2' as c2 FROM tbl"
        commented_query = "/* comment 1 */" + clean_query + "-- comment 2"
        table = SqlaTable(table_name="test_comments_in_sqlatable_query_table",
                          sql=commented_query)
        rendered_query = str(table.get_from_clause())
        self.assertEqual(clean_query, rendered_query)

    def test_slice_payload_no_data(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        json_endpoint = "/superset/explore_json/"
        form_data = slc.form_data
        form_data.update({
            "adhoc_filters": [{
                "clause": "WHERE",
                "comparator": "NA",
                "expressionType": "SIMPLE",
                "operator": "==",
                "subject": "gender",
            }]
        })
        data = self.get_json_resp(json_endpoint,
                                  {"form_data": json.dumps(form_data)})
        self.assertEqual(data["status"], utils.QueryStatus.SUCCESS)
        self.assertEqual(data["error"], "No data")

    def test_slice_payload_invalid_query(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        form_data = slc.form_data
        form_data.update({"groupby": ["N/A"]})

        data = self.get_json_resp("/superset/explore_json/",
                                  {"form_data": json.dumps(form_data)})
        self.assertEqual(data["status"], utils.QueryStatus.FAILED)

    def test_slice_payload_no_datasource(self):
        self.login(username="******")
        data = self.get_json_resp("/superset/explore_json/",
                                  raise_on_error=False)

        self.assertEqual(
            data["error"],
            "The datasource associated with this chart no longer exists")

    @mock.patch(
        "superset.security.SupersetSecurityManager.schemas_accessible_by_user")
    @mock.patch("superset.security.SupersetSecurityManager.database_access")
    @mock.patch(
        "superset.security.SupersetSecurityManager.all_datasource_access")
    def test_schemas_access_for_csv_upload_endpoint(self,
                                                    mock_all_datasource_access,
                                                    mock_database_access,
                                                    mock_schemas_accessible):
        self.login(username="******")
        dbobj = self.create_fake_db()
        mock_all_datasource_access.return_value = False
        mock_database_access.return_value = False
        mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
        data = self.get_json_resp(
            url="/superset/schemas_access_for_csv_upload?db_id={db_id}".format(
                db_id=dbobj.id))
        assert data == ["this_schema_is_allowed_too"]
        self.delete_fake_db()

    def test_select_star(self):
        self.login(username="******")
        examples_db = utils.get_example_database()
        resp = self.get_resp(
            f"/superset/select_star/{examples_db.id}/birth_names")
        self.assertIn("gender", resp)

    def test_get_select_star_not_allowed(self):
        """
            Database API: Test get select star not allowed
        """
        self.login(username="******")
        example_db = utils.get_example_database()
        resp = self.client.get(
            f"/superset/select_star/{example_db.id}/birth_names")
        self.assertEqual(resp.status_code, 404)

    @mock.patch("superset.views.core.results_backend_use_msgpack", False)
    @mock.patch("superset.views.core.results_backend")
    @mock.patch("superset.views.core.db")
    def test_display_limit(self, mock_superset_db, mock_results_backend):
        query_mock = mock.Mock()
        query_mock.sql = "SELECT *"
        query_mock.database = 1
        query_mock.schema = "superset"
        mock_superset_db.session.query().filter_by(
        ).one_or_none.return_value = (query_mock)

        data = [{"col_0": i} for i in range(100)]
        payload = {
            "status": utils.QueryStatus.SUCCESS,
            "query": {
                "rows": 100
            },
            "data": data,
        }
        # do not apply msgpack serialization
        use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"]
        app.config["RESULTS_BACKEND_USE_MSGPACK"] = False
        serialized_payload = sql_lab._serialize_payload(payload, False)
        compressed = utils.zlib_compress(serialized_payload)
        mock_results_backend.get.return_value = compressed

        # get all results
        result = json.loads(self.get_resp("/superset/results/key/"))
        expected = {"status": "success", "query": {"rows": 100}, "data": data}
        self.assertEqual(result, expected)

        # limit results to 1
        limited_data = data[:1]
        result = json.loads(self.get_resp("/superset/results/key/?rows=1"))
        expected = {
            "status": "success",
            "query": {
                "rows": 100
            },
            "data": limited_data,
            "displayLimitReached": True,
        }
        self.assertEqual(result, expected)

        app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack

    def test_results_default_deserialization(self):
        use_new_deserialization = False
        data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
        cursor_descr = (
            ("a", "string"),
            ("b", "int"),
            ("c", "float"),
            ("d", "datetime"),
        )
        db_engine_spec = BaseEngineSpec()
        results = SupersetResultSet(data, cursor_descr, db_engine_spec)
        query = {
            "database_id": 1,
            "sql": "SELECT * FROM birth_names LIMIT 100",
            "status": utils.QueryStatus.PENDING,
        }
        (
            serialized_data,
            selected_columns,
            all_columns,
            expanded_columns,
        ) = sql_lab._serialize_and_expand_data(results, db_engine_spec,
                                               use_new_deserialization)
        payload = {
            "query_id": 1,
            "status": utils.QueryStatus.SUCCESS,
            "state": utils.QueryStatus.SUCCESS,
            "data": serialized_data,
            "columns": all_columns,
            "selected_columns": selected_columns,
            "expanded_columns": expanded_columns,
            "query": query,
        }

        serialized_payload = sql_lab._serialize_payload(
            payload, use_new_deserialization)
        self.assertIsInstance(serialized_payload, str)

        query_mock = mock.Mock()
        deserialized_payload = views._deserialize_results_payload(
            serialized_payload, query_mock, use_new_deserialization)

        self.assertDictEqual(deserialized_payload, payload)
        query_mock.assert_not_called()

    def test_results_msgpack_deserialization(self):
        use_new_deserialization = True
        data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
        cursor_descr = (
            ("a", "string"),
            ("b", "int"),
            ("c", "float"),
            ("d", "datetime"),
        )
        db_engine_spec = BaseEngineSpec()
        results = SupersetResultSet(data, cursor_descr, db_engine_spec)
        query = {
            "database_id": 1,
            "sql": "SELECT * FROM birth_names LIMIT 100",
            "status": utils.QueryStatus.PENDING,
        }
        (
            serialized_data,
            selected_columns,
            all_columns,
            expanded_columns,
        ) = sql_lab._serialize_and_expand_data(results, db_engine_spec,
                                               use_new_deserialization)
        payload = {
            "query_id": 1,
            "status": utils.QueryStatus.SUCCESS,
            "state": utils.QueryStatus.SUCCESS,
            "data": serialized_data,
            "columns": all_columns,
            "selected_columns": selected_columns,
            "expanded_columns": expanded_columns,
            "query": query,
        }

        serialized_payload = sql_lab._serialize_payload(
            payload, use_new_deserialization)
        self.assertIsInstance(serialized_payload, bytes)

        with mock.patch.object(
                db_engine_spec, "expand_data",
                wraps=db_engine_spec.expand_data) as expand_data:
            query_mock = mock.Mock()
            query_mock.database.db_engine_spec.expand_data = expand_data

            deserialized_payload = views._deserialize_results_payload(
                serialized_payload, query_mock, use_new_deserialization)
            df = results.to_pandas_df()
            payload["data"] = dataframe.df_to_records(df)

            self.assertDictEqual(deserialized_payload, payload)
            expand_data.assert_called_once()

    @mock.patch.dict(
        "superset.extensions.feature_flag_manager._feature_flags",
        {"FOO": lambda x: 1},
        clear=True,
    )
    def test_feature_flag_serialization(self):
        """
        Functions in feature flags don't break bootstrap data serialization.
        """
        self.login()

        encoded = json.dumps(
            {
                "FOO": lambda x: 1,
                "super": "set"
            },
            default=utils.pessimistic_json_iso_dttm_ser,
        )
        html = cgi.escape(encoded).replace("'", "&#39;").replace('"', "&#34;")

        urls = [
            "/superset/sqllab",
            "/superset/welcome",
            "/superset/dashboard/1/",
            "/superset/profile/admin/",
            "/superset/explore/table/1",
        ]
        for url in urls:
            data = self.get_resp(url)
            self.assertTrue(html in data)

    @mock.patch.dict(
        "superset.extensions.feature_flag_manager._feature_flags",
        {"SQLLAB_BACKEND_PERSISTENCE": True},
        clear=True,
    )
    def test_sqllab_backend_persistence_payload(self):
        username = "******"
        self.login(username)
        user_id = security_manager.find_user(username).id

        # create a tab
        data = {
            "queryEditor":
            json.dumps({
                "title": "Untitled Query 1",
                "dbId": 1,
                "schema": None,
                "autorun": False,
                "sql": "SELECT ...",
                "queryLimit": 1000,
            })
        }
        resp = self.get_json_resp("/tabstateview/", data=data)
        tab_state_id = resp["id"]

        # run a query in the created tab
        self.run_sql(
            "SELECT name FROM birth_names",
            "client_id_1",
            user_name=username,
            raise_on_error=True,
            sql_editor_id=tab_state_id,
        )
        # run an orphan query (no tab)
        self.run_sql(
            "SELECT name FROM birth_names",
            "client_id_2",
            user_name=username,
            raise_on_error=True,
        )

        # we should have only 1 query returned, since the second one is not
        # associated with any tabs
        payload = views.Superset._get_sqllab_payload(user_id=user_id)
        self.assertEqual(len(payload["queries"]), 1)
Exemple #6
0
class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
    """The dashboard object!"""

    __tablename__ = "dashboards"
    id = Column(Integer, primary_key=True)
    dashboard_title = Column(String(500))
    position_json = Column(utils.MediumText())
    description = Column(Text)
    css = Column(Text)
    certified_by = Column(Text)
    certification_details = Column(Text)
    json_metadata = Column(Text)
    slug = Column(String(255), unique=True)
    slices = relationship(Slice,
                          secondary=dashboard_slices,
                          backref="dashboards")
    owners = relationship(security_manager.user_model,
                          secondary=dashboard_user)
    published = Column(Boolean, default=False)
    roles = relationship(security_manager.role_model, secondary=DashboardRoles)
    _filter_sets = relationship("FilterSet",
                                back_populates="dashboard",
                                cascade="all, delete")
    export_fields = [
        "dashboard_title",
        "position_json",
        "json_metadata",
        "description",
        "css",
        "slug",
    ]

    def __repr__(self) -> str:
        return f"Dashboard<{self.id or self.slug}>"

    @property
    def url(self) -> str:
        return f"/superset/dashboard/{self.slug or self.id}/"

    @property
    def datasources(self) -> Set[BaseDatasource]:
        # Verbose but efficient database enumeration of dashboard datasources.
        datasources_by_cls_model: Dict[Type["BaseDatasource"],
                                       Set[int]] = defaultdict(set)

        for slc in self.slices:
            datasources_by_cls_model[slc.cls_model].add(slc.datasource_id)

        return {
            datasource
            for cls_model, datasource_ids in datasources_by_cls_model.items()
            for datasource in db.session.query(cls_model).filter(
                cls_model.id.in_(datasource_ids)).all()
        }

    @property
    def filter_sets(self) -> Dict[int, FilterSet]:
        return {fs.id: fs for fs in self._filter_sets}

    @property
    def filter_sets_lst(self) -> Dict[int, FilterSet]:
        if is_user_admin():
            return self._filter_sets
        current_user = g.user.id
        filter_sets_by_owner_type: Dict[str, List[Any]] = {
            "Dashboard": [],
            "User": []
        }
        for fs in self._filter_sets:
            filter_sets_by_owner_type[fs.owner_type].append(fs)
        user_filter_sets = list(
            filter(
                lambda filter_set: filter_set.owner_id == current_user,
                filter_sets_by_owner_type["User"],
            ))
        return {
            fs.id: fs
            for fs in user_filter_sets + filter_sets_by_owner_type["Dashboard"]
        }

    @property
    def charts(self) -> List[BaseDatasource]:
        return [slc.chart for slc in self.slices]

    @property
    def sqla_metadata(self) -> None:
        # pylint: disable=no-member
        meta = MetaData(bind=self.get_sqla_engine())
        meta.reflect()

    @property
    def status(self) -> utils.DashboardStatus:
        if self.published:
            return utils.DashboardStatus.PUBLISHED
        return utils.DashboardStatus.DRAFT

    @renders("dashboard_title")
    def dashboard_link(self) -> Markup:
        title = escape(self.dashboard_title or "<empty>")
        return Markup(f'<a href="{self.url}">{title}</a>')

    @property
    def digest(self) -> str:
        """
        Returns a MD5 HEX digest that makes this dashboard unique
        """
        unique_string = f"{self.position_json}.{self.css}.{self.json_metadata}"
        return md5_sha_from_str(unique_string)

    @property
    def thumbnail_url(self) -> str:
        """
        Returns a thumbnail URL with a HEX digest. We want to avoid browser cache
        if the dashboard has changed
        """
        return f"/api/v1/dashboard/{self.id}/thumbnail/{self.digest}/"

    @property
    def changed_by_name(self) -> str:
        if not self.changed_by:
            return ""
        return str(self.changed_by)

    @property
    def changed_by_url(self) -> str:
        if not self.changed_by:
            return ""
        return f"/superset/profile/{self.changed_by.username}"

    @property
    def data(self) -> Dict[str, Any]:
        positions = self.position_json
        if positions:
            positions = json.loads(positions)
        return {
            "id":
            self.id,
            "metadata":
            self.params_dict,
            "certified_by":
            self.certified_by,
            "certification_details":
            self.certification_details,
            "css":
            self.css,
            "dashboard_title":
            self.dashboard_title,
            "published":
            self.published,
            "slug":
            self.slug,
            "slices": [slc.data for slc in self.slices],
            "position_json":
            positions,
            "last_modified_time":
            self.changed_on.replace(microsecond=0).timestamp(),
        }

    @cache_manager.cache.memoize(
        # manage cache version manually
        make_name=lambda fname: f"{fname}-v1.0",
        unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"),
    )
    def datasets_trimmed_for_slices(self) -> List[Dict[str, Any]]:
        # Verbose but efficient database enumeration of dashboard datasources.
        slices_by_datasource: Dict[Tuple[Type["BaseDatasource"], int],
                                   Set[Slice]] = defaultdict(set)

        for slc in self.slices:
            slices_by_datasource[(slc.cls_model, slc.datasource_id)].add(slc)

        result: List[Dict[str, Any]] = []

        for (cls_model, datasource_id), slices in slices_by_datasource.items():
            datasource = (db.session.query(cls_model).filter_by(
                id=datasource_id).one_or_none())

            if datasource:
                # Filter out unneeded fields from the datasource payload
                result.append(datasource.data_for_slices(slices))

        return result

    @property  # type: ignore
    def params(self) -> str:  # type: ignore
        return self.json_metadata

    @params.setter
    def params(self, value: str) -> None:
        self.json_metadata = value

    @property
    def position(self) -> Dict[str, Any]:
        if self.position_json:
            return json.loads(self.position_json)
        return {}

    def update_thumbnail(self) -> None:
        url = get_url_path("Superset.dashboard", dashboard_id_or_slug=self.id)
        cache_dashboard_thumbnail.delay(url, self.digest, force=True)

    @debounce(0.1)
    def clear_cache(self) -> None:
        cache_manager.cache.delete_memoized(
            Dashboard.datasets_trimmed_for_slices, self)

    @classmethod
    @debounce(0.1)
    def clear_cache_for_slice(cls, slice_id: int) -> None:
        filter_query = select(
            [dashboard_slices.c.dashboard_id],
            distinct=True).where(dashboard_slices.c.slice_id == slice_id)
        for (dashboard_id, ) in db.engine.execute(filter_query):
            cls(id=dashboard_id).clear_cache()

    @classmethod
    @debounce(0.1)
    def clear_cache_for_datasource(cls, datasource_id: int) -> None:
        filter_query = select(
            [dashboard_slices.c.dashboard_id],
            distinct=True,
        ).select_from(
            join(
                dashboard_slices,
                Slice,
                (Slice.id == dashboard_slices.c.slice_id)
                & (Slice.datasource_id == datasource_id),
            ))
        for (dashboard_id, ) in db.engine.execute(filter_query):
            cls(id=dashboard_id).clear_cache()

    @classmethod
    def export_dashboards(  # pylint: disable=too-many-locals
            cls, dashboard_ids: List[int]) -> str:
        copied_dashboards = []
        datasource_ids = set()
        for dashboard_id in dashboard_ids:
            # make sure that dashboard_id is an integer
            dashboard_id = int(dashboard_id)
            dashboard = (db.session.query(Dashboard).options(
                subqueryload(
                    Dashboard.slices)).filter_by(id=dashboard_id).first())
            # remove ids and relations (like owners, created by, slices, ...)
            copied_dashboard = dashboard.copy()
            for slc in dashboard.slices:
                datasource_ids.add((slc.datasource_id, slc.datasource_type))
                copied_slc = slc.copy()
                # save original id into json
                # we need it to update dashboard's json metadata on import
                copied_slc.id = slc.id
                # add extra params for the import
                copied_slc.alter_params(
                    remote_id=slc.id,
                    datasource_name=slc.datasource.datasource_name,
                    schema=slc.datasource.schema,
                    database_name=slc.datasource.database.name,
                )
                # set slices without creating ORM relations
                slices = copied_dashboard.__dict__.setdefault("slices", [])
                slices.append(copied_slc)

            json_metadata = json.loads(dashboard.json_metadata)
            native_filter_configuration: List[Dict[
                str, Any]] = json_metadata.get("native_filter_configuration",
                                               [])
            for native_filter in native_filter_configuration:
                session = db.session()
                for target in native_filter.get("targets", []):
                    id_ = target.get("datasetId")
                    if id_ is None:
                        continue
                    datasource = ConnectorRegistry.get_datasource_by_id(
                        session, id_)
                    datasource_ids.add((datasource.id, datasource.type))

            copied_dashboard.alter_params(remote_id=dashboard_id)
            copied_dashboards.append(copied_dashboard)

        eager_datasources = []
        for datasource_id, datasource_type in datasource_ids:
            eager_datasource = ConnectorRegistry.get_eager_datasource(
                db.session, datasource_type, datasource_id)
            copied_datasource = eager_datasource.copy()
            copied_datasource.alter_params(
                remote_id=eager_datasource.id,
                database_name=eager_datasource.database.name,
            )
            datasource_class = copied_datasource.__class__
            for field_name in datasource_class.export_children:
                field_val = getattr(eager_datasource, field_name).copy()
                # set children without creating ORM relations
                copied_datasource.__dict__[field_name] = field_val
            eager_datasources.append(copied_datasource)

        return json.dumps(
            {
                "dashboards": copied_dashboards,
                "datasources": eager_datasources
            },
            cls=utils.DashboardEncoder,
            indent=4,
        )

    @classmethod
    def get(cls, id_or_slug: str) -> Dashboard:
        session = db.session()
        qry = session.query(Dashboard).filter(id_or_slug_filter(id_or_slug))
        return qry.one_or_none()

    def is_actor_owner(self) -> bool:
        if g.user is None or g.user.is_anonymous or not g.user.is_authenticated:
            return False
        return g.user.id in set(map(lambda user: user.id, self.owners))
Exemple #7
0
    def apply(self, query: Query, value: Any) -> Query:
        if is_user_admin():
            return query

        datasource_perms = security_manager.user_view_menu_names("datasource_access")
        schema_perms = security_manager.user_view_menu_names("schema_access")

        is_rbac_disabled_filter = []
        dashboard_has_roles = Dashboard.roles.any()
        if is_feature_enabled("DASHBOARD_RBAC"):
            is_rbac_disabled_filter.append(~dashboard_has_roles)

        datasource_perm_query = (
            db.session.query(Dashboard.id)
            .join(Dashboard.slices)
            .filter(
                and_(
                    Dashboard.published.is_(True),
                    *is_rbac_disabled_filter,
                    or_(
                        Slice.perm.in_(datasource_perms),
                        Slice.schema_perm.in_(schema_perms),
                        security_manager.can_access_all_datasources(),
                    ),
                )
            )
        )

        users_favorite_dash_query = db.session.query(FavStar.obj_id).filter(
            and_(
                FavStar.user_id == security_manager.user_model.get_user_id(),
                FavStar.class_name == "Dashboard",
            )
        )
        owner_ids_query = (
            db.session.query(Dashboard.id)
            .join(Dashboard.owners)
            .filter(
                security_manager.user_model.id
                == security_manager.user_model.get_user_id()
            )
        )

        feature_flagged_filters = []
        if is_feature_enabled("DASHBOARD_RBAC"):
            roles_based_query = (
                db.session.query(Dashboard.id)
                .join(Dashboard.roles)
                .filter(
                    and_(
                        Dashboard.published.is_(True),
                        dashboard_has_roles,
                        Role.id.in_([x.id for x in security_manager.get_user_roles()]),
                    ),
                )
            )

            feature_flagged_filters.append(Dashboard.id.in_(roles_based_query))

        if is_feature_enabled("EMBEDDED_SUPERSET") and security_manager.is_guest_user(
            g.user
        ):

            guest_user: GuestUser = g.user
            embedded_dashboard_ids = [
                r["id"]
                for r in guest_user.resources
                if r["type"] == GuestTokenResourceType.DASHBOARD.value
            ]

            # TODO (embedded): only use uuid filter once uuids are rolled out
            condition = (
                Dashboard.embedded.any(
                    EmbeddedDashboard.uuid.in_(embedded_dashboard_ids)
                )
                if any(is_uuid(id_) for id_ in embedded_dashboard_ids)
                else Dashboard.id.in_(embedded_dashboard_ids)
            )

            feature_flagged_filters.append(condition)

        query = query.filter(
            or_(
                Dashboard.id.in_(owner_ids_query),
                Dashboard.id.in_(datasource_perm_query),
                Dashboard.id.in_(users_favorite_dash_query),
                *feature_flagged_filters,
            )
        )

        return query
 def ensure_alert_reports_enabled(self) -> Optional[Response]:
     if not is_feature_enabled("ALERT_REPORTS"):
         return self.response_404()
     return None
from urllib import parse

import sqlalchemy as sqla
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
from markupsafe import escape, Markup
from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
from sqlalchemy.orm import make_transient, relationship

from superset import ConnectorRegistry, db, is_feature_enabled, security_manager
from superset.legacy import update_time_range
from superset.models.helpers import AuditMixinNullable, ImportMixin
from superset.models.tags import ChartUpdater
from superset.utils import core as utils

if is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
    from superset.viz_sip38 import BaseViz, viz_types  # type: ignore
else:
    from superset.viz import BaseViz, viz_types  # type: ignore

if TYPE_CHECKING:
    # pylint: disable=unused-import
    from superset.connectors.base.models import BaseDatasource

metadata = Model.metadata  # pylint: disable=no-member
slice_user = Table(
    "slice_user",
    metadata,
    Column("id", Integer, primary_key=True),
    Column("user_id", Integer, ForeignKey("ab_user.id")),
    Column("slice_id", Integer, ForeignKey("slices.id")),
    def annotation(self, pk: int) -> FlaskResponse:  # pylint: disable=unused-argument
        if not is_feature_enabled("ENABLE_REACT_CRUD_VIEWS"):
            return super().list()

        return super().render_app_template()
Exemple #11
0
class DashboardRestApi(BaseSupersetModelRestApi):
    datamodel = SQLAInterface(Dashboard)

    @before_request(only=["thumbnail"])
    def ensure_thumbnails_enabled(self) -> Optional[Response]:
        if not is_feature_enabled("THUMBNAILS"):
            return self.response_404()
        return None

    include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
        RouteMethod.EXPORT,
        RouteMethod.IMPORT,
        RouteMethod.RELATED,
        "bulk_delete",  # not using RouteMethod since locally defined
        "favorite_status",
        "get_charts",
        "get_datasets",
        "thumbnail",
    }
    resource_name = "dashboard"
    allow_browser_login = True

    class_permission_name = "Dashboard"
    method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP

    list_columns = [
        "id",
        "published",
        "status",
        "slug",
        "url",
        "css",
        "position_json",
        "json_metadata",
        "thumbnail_url",
        "certified_by",
        "certification_details",
        "changed_by.first_name",
        "changed_by.last_name",
        "changed_by.username",
        "changed_by.id",
        "changed_by_name",
        "changed_by_url",
        "changed_on_utc",
        "changed_on_delta_humanized",
        "created_by.first_name",
        "created_by.id",
        "created_by.last_name",
        "dashboard_title",
        "owners.id",
        "owners.username",
        "owners.first_name",
        "owners.last_name",
        "roles.id",
        "roles.name",
    ]
    list_select_columns = list_columns + ["changed_on", "changed_by_fk"]
    order_columns = [
        "changed_by.first_name",
        "changed_on_delta_humanized",
        "created_by.first_name",
        "dashboard_title",
        "published",
    ]

    add_columns = [
        "certified_by",
        "certification_details",
        "dashboard_title",
        "slug",
        "owners",
        "roles",
        "position_json",
        "css",
        "json_metadata",
        "published",
    ]
    edit_columns = add_columns

    search_columns = (
        "created_by",
        "changed_by",
        "dashboard_title",
        "id",
        "owners",
        "published",
        "roles",
        "slug",
    )
    search_filters = {
        "dashboard_title": [DashboardTitleOrSlugFilter],
        "id": [DashboardFavoriteFilter, DashboardCertifiedFilter],
    }
    base_order = ("changed_on", "desc")

    add_model_schema = DashboardPostSchema()
    edit_model_schema = DashboardPutSchema()
    chart_entity_response_schema = ChartEntityResponseSchema()
    dashboard_get_response_schema = DashboardGetResponseSchema()
    dashboard_dataset_schema = DashboardDatasetSchema()

    base_filters = [["id", DashboardAccessFilter, lambda: []]]

    order_rel_fields = {
        "slices": ("slice_name", "asc"),
        "owners": ("first_name", "asc"),
        "roles": ("name", "asc"),
    }
    related_field_filters = {
        "owners": RelatedFieldFilter("first_name", FilterRelatedOwners),
        "roles": RelatedFieldFilter("name", FilterRelatedRoles),
        "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners),
    }
    allowed_rel_fields = {"owners", "roles", "created_by"}

    openapi_spec_tag = "Dashboards"
    """ Override the name set for this collection of endpoints """
    openapi_spec_component_schemas = (
        ChartEntityResponseSchema,
        DashboardGetResponseSchema,
        DashboardDatasetSchema,
        GetFavStarIdsSchema,
    )
    apispec_parameter_schemas = {
        "get_delete_ids_schema": get_delete_ids_schema,
        "get_export_ids_schema": get_export_ids_schema,
        "thumbnail_query_schema": thumbnail_query_schema,
        "get_fav_star_ids_schema": get_fav_star_ids_schema,
    }
    openapi_spec_methods = openapi_spec_methods_override
    """ Overrides GET methods OpenApi descriptions """

    def __repr__(self) -> str:
        """Deterministic string representation of the API instance for etag_cache."""
        return "Superset.dashboards.api.DashboardRestApi@v{}{}".format(
            self.appbuilder.app.config["VERSION_STRING"],
            self.appbuilder.app.config["VERSION_SHA"],
        )

    @etag_cache(
        get_last_modified=lambda _self, id_or_slug: DashboardDAO.
        get_dashboard_changed_on(  # pylint: disable=line-too-long,useless-suppression
            id_or_slug),
        max_age=0,
        raise_for_access=lambda _self, id_or_slug: DashboardDAO.
        get_by_id_or_slug(id_or_slug),
        skip=lambda _self, id_or_slug: not is_feature_enabled("DASHBOARD_CACHE"
                                                              ),
    )
    @expose("/<id_or_slug>", methods=["GET"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
        log_to_statsd=False,  # pylint: disable=arguments-renamed
    )
    def get(self, id_or_slug: str) -> Response:
        """Gets a dashboard
        ---
        get:
          description: >-
            Get a dashboard
          parameters:
          - in: path
            schema:
              type: string
            name: id_or_slug
            description: Either the id of the dashboard, or its slug
          responses:
            200:
              description: Dashboard
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      result:
                        $ref: '#/components/schemas/DashboardGetResponseSchema'
            302:
              description: Redirects to the current digest
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
        """
        # pylint: disable=arguments-differ
        try:
            dash = DashboardDAO.get_by_id_or_slug(id_or_slug)
            result = self.dashboard_get_response_schema.dump(dash)
            return self.response(200, result=result)
        except DashboardAccessDeniedError:
            return self.response_403()
        except DashboardNotFoundError:
            return self.response_404()

    @etag_cache(
        get_last_modified=lambda _self, id_or_slug: DashboardDAO.
        get_dashboard_and_datasets_changed_on(  # pylint: disable=line-too-long,useless-suppression
            id_or_slug),
        max_age=0,
        raise_for_access=lambda _self, id_or_slug: DashboardDAO.
        get_by_id_or_slug(id_or_slug),
        skip=lambda _self, id_or_slug: not is_feature_enabled("DASHBOARD_CACHE"
                                                              ),
    )
    @expose("/<id_or_slug>/datasets", methods=["GET"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.get_datasets",
        log_to_statsd=False,
    )
    def get_datasets(self, id_or_slug: str) -> Response:
        """Gets a dashboard's datasets
        ---
        get:
          description: >-
            Returns a list of a dashboard's datasets. Each dataset includes only
            the information necessary to render the dashboard's charts.
          parameters:
          - in: path
            schema:
              type: string
            name: id_or_slug
            description: Either the id of the dashboard, or its slug
          responses:
            200:
              description: Dashboard dataset definitions
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      result:
                        type: array
                        items:
                          $ref: '#/components/schemas/DashboardDatasetSchema'
            302:
              description: Redirects to the current digest
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
        """
        try:
            datasets = DashboardDAO.get_datasets_for_dashboard(id_or_slug)
            result = [
                self.dashboard_dataset_schema.dump(dataset)
                for dataset in datasets
            ]
            return self.response(200, result=result)
        except DashboardAccessDeniedError:
            return self.response_403()
        except DashboardNotFoundError:
            return self.response_404()

    @etag_cache(
        get_last_modified=lambda _self, id_or_slug: DashboardDAO.
        get_dashboard_and_slices_changed_on(  # pylint: disable=line-too-long,useless-suppression
            id_or_slug),
        max_age=0,
        raise_for_access=lambda _self, id_or_slug: DashboardDAO.
        get_by_id_or_slug(id_or_slug),
        skip=lambda _self, id_or_slug: not is_feature_enabled("DASHBOARD_CACHE"
                                                              ),
    )
    @expose("/<id_or_slug>/charts", methods=["GET"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.get_charts",
        log_to_statsd=False,
    )
    def get_charts(self, id_or_slug: str) -> Response:
        """Gets the chart definitions for a given dashboard
        ---
        get:
          description: >-
            Get the chart definitions for a given dashboard
          parameters:
          - in: path
            schema:
              type: string
            name: id_or_slug
          responses:
            200:
              description: Dashboard chart definitions
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      result:
                        type: array
                        items:
                          $ref: '#/components/schemas/ChartEntityResponseSchema'
            302:
              description: Redirects to the current digest
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
        """
        try:
            charts = DashboardDAO.get_charts_for_dashboard(id_or_slug)
            result = [
                self.chart_entity_response_schema.dump(chart)
                for chart in charts
            ]

            if is_feature_enabled("REMOVE_SLICE_LEVEL_LABEL_COLORS"):
                # dashboard metadata has dashboard-level label_colors,
                # so remove slice-level label_colors from its form_data
                for chart in result:
                    form_data = chart.get("form_data")
                    form_data.pop("label_colors", None)

            return self.response(200, result=result)
        except DashboardAccessDeniedError:
            return self.response_403()
        except DashboardNotFoundError:
            return self.response_404()

    @expose("/", methods=["POST"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.post",
        log_to_statsd=False,
    )
    @requires_json
    def post(self) -> Response:
        """Creates a new Dashboard
        ---
        post:
          description: >-
            Create a new Dashboard.
          requestBody:
            description: Dashboard schema
            required: true
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/{{self.__class__.__name__}}.post'
          responses:
            201:
              description: Dashboard added
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      id:
                        type: number
                      result:
                        $ref: '#/components/schemas/{{self.__class__.__name__}}.post'
            302:
              description: Redirects to the current digest
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            500:
              $ref: '#/components/responses/500'
        """
        try:
            item = self.add_model_schema.load(request.json)
        # This validates custom Schema with custom validations
        except ValidationError as error:
            return self.response_400(message=error.messages)
        try:
            new_model = CreateDashboardCommand(g.user, item).run()
            return self.response(201, id=new_model.id, result=item)
        except DashboardInvalidError as ex:
            return self.response_422(message=ex.normalized_messages())
        except DashboardCreateFailedError as ex:
            logger.error(
                "Error creating model %s: %s",
                self.__class__.__name__,
                str(ex),
                exc_info=True,
            )
            return self.response_422(message=str(ex))

    @expose("/<pk>", methods=["PUT"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.put",
        log_to_statsd=False,
    )
    @requires_json
    def put(self, pk: int) -> Response:
        """Changes a Dashboard
        ---
        put:
          description: >-
            Changes a Dashboard.
          parameters:
          - in: path
            schema:
              type: integer
            name: pk
          requestBody:
            description: Dashboard schema
            required: true
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/{{self.__class__.__name__}}.put'
          responses:
            200:
              description: Dashboard changed
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      id:
                        type: number
                      result:
                        $ref: '#/components/schemas/{{self.__class__.__name__}}.put'
                      last_modified_time:
                        type: number
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        try:
            item = self.edit_model_schema.load(request.json)
        # This validates custom Schema with custom validations
        except ValidationError as error:
            return self.response_400(message=error.messages)
        try:
            changed_model = UpdateDashboardCommand(g.user, pk, item).run()
            last_modified_time = changed_model.changed_on.replace(
                microsecond=0).timestamp()
            response = self.response(
                200,
                id=changed_model.id,
                result=item,
                last_modified_time=last_modified_time,
            )
        except DashboardNotFoundError:
            response = self.response_404()
        except DashboardForbiddenError:
            response = self.response_403()
        except DashboardInvalidError as ex:
            return self.response_422(message=ex.normalized_messages())
        except DashboardUpdateFailedError as ex:
            logger.error(
                "Error updating model %s: %s",
                self.__class__.__name__,
                str(ex),
                exc_info=True,
            )
            response = self.response_422(message=str(ex))
        return response

    @expose("/<pk>", methods=["DELETE"])
    @protect()
    @safe
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.delete",
        log_to_statsd=False,
    )
    def delete(self, pk: int) -> Response:
        """Deletes a Dashboard
        ---
        delete:
          description: >-
            Deletes a Dashboard.
          parameters:
          - in: path
            schema:
              type: integer
            name: pk
          responses:
            200:
              description: Dashboard deleted
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      message:
                        type: string
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        try:
            DeleteDashboardCommand(g.user, pk).run()
            return self.response(200, message="OK")
        except DashboardNotFoundError:
            return self.response_404()
        except DashboardForbiddenError:
            return self.response_403()
        except DashboardDeleteFailedError as ex:
            logger.error(
                "Error deleting model %s: %s",
                self.__class__.__name__,
                str(ex),
                exc_info=True,
            )
            return self.response_422(message=str(ex))

    @expose("/", methods=["DELETE"])
    @protect()
    @safe
    @statsd_metrics
    @rison(get_delete_ids_schema)
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.bulk_delete",
        log_to_statsd=False,
    )
    def bulk_delete(self, **kwargs: Any) -> Response:
        """Delete bulk Dashboards
        ---
        delete:
          description: >-
            Deletes multiple Dashboards in a bulk operation.
          parameters:
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/get_delete_ids_schema'
          responses:
            200:
              description: Dashboard bulk delete
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      message:
                        type: string
            401:
              $ref: '#/components/responses/401'
            403:
              $ref: '#/components/responses/403'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        item_ids = kwargs["rison"]
        try:
            BulkDeleteDashboardCommand(g.user, item_ids).run()
            return self.response(
                200,
                message=ngettext(
                    "Deleted %(num)d dashboard",
                    "Deleted %(num)d dashboards",
                    num=len(item_ids),
                ),
            )
        except DashboardNotFoundError:
            return self.response_404()
        except DashboardForbiddenError:
            return self.response_403()
        except DashboardBulkDeleteFailedError as ex:
            return self.response_422(message=str(ex))

    @expose("/export/", methods=["GET"])
    @protect()
    @safe
    @statsd_metrics
    @rison(get_export_ids_schema)
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.export",
        log_to_statsd=False,
    )  # pylint: disable=too-many-locals
    def export(self, **kwargs: Any) -> Response:
        """Export dashboards
        ---
        get:
          description: >-
            Exports multiple Dashboards and downloads them as YAML files.
          parameters:
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/get_export_ids_schema'
          responses:
            200:
              description: Dashboard export
              content:
                text/plain:
                  schema:
                    type: string
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        requested_ids = kwargs["rison"]
        token = request.args.get("token")

        if is_feature_enabled("VERSIONED_EXPORT"):
            timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
            root = f"dashboard_export_{timestamp}"
            filename = f"{root}.zip"

            buf = BytesIO()
            with ZipFile(buf, "w") as bundle:
                try:
                    for file_name, file_content in ExportDashboardsCommand(
                            requested_ids).run():
                        with bundle.open(f"{root}/{file_name}", "w") as fp:
                            fp.write(file_content.encode())
                except DashboardNotFoundError:
                    return self.response_404()
            buf.seek(0)

            response = send_file(
                buf,
                mimetype="application/zip",
                as_attachment=True,
                attachment_filename=filename,
            )
            if token:
                response.set_cookie(token, "done", max_age=600)
            return response

        query = self.datamodel.session.query(Dashboard).filter(
            Dashboard.id.in_(requested_ids))
        query = self._base_filters.apply_all(query)
        ids = [item.id for item in query.all()]
        if not ids:
            return self.response_404()
        export = Dashboard.export_dashboards(ids)
        resp = make_response(export, 200)
        resp.headers["Content-Disposition"] = generate_download_headers(
            "json")["Content-Disposition"]
        if token:
            resp.set_cookie(token, "done", max_age=600)
        return resp

    @expose("/<pk>/thumbnail/<digest>/", methods=["GET"])
    @protect()
    @safe
    @rison(thumbnail_query_schema)
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.thumbnail",
        log_to_statsd=False,
    )
    def thumbnail(self, pk: int, digest: str,
                  **kwargs: Any) -> WerkzeugResponse:
        """Get Dashboard thumbnail
        ---
        get:
          description: >-
            Compute async or get already computed dashboard thumbnail from cache.
          parameters:
          - in: path
            schema:
              type: integer
            name: pk
          - in: path
            name: digest
            description: A hex digest that makes this dashboard unique
            schema:
              type: string
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/thumbnail_query_schema'
          responses:
            200:
              description: Dashboard thumbnail image
              content:
               image/*:
                 schema:
                   type: string
                   format: binary
            202:
              description: Thumbnail does not exist on cache, fired async to compute
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      message:
                        type: string
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        dashboard = self.datamodel.get(pk, self._base_filters)
        if not dashboard:
            return self.response_404()

        dashboard_url = get_url_path("Superset.dashboard",
                                     dashboard_id_or_slug=dashboard.id)
        # If force, request a screenshot from the workers
        if kwargs["rison"].get("force", False):
            cache_dashboard_thumbnail.delay(dashboard_url,
                                            dashboard.digest,
                                            force=True)
            return self.response(202, message="OK Async")
        # fetch the dashboard screenshot using the current user and cache if set
        screenshot = DashboardScreenshot(
            dashboard_url,
            dashboard.digest).get_from_cache(cache=thumbnail_cache)
        # If the screenshot does not exist, request one from the workers
        if not screenshot:
            self.incr_stats("async", self.thumbnail.__name__)
            cache_dashboard_thumbnail.delay(dashboard_url,
                                            dashboard.digest,
                                            force=True)
            return self.response(202, message="OK Async")
        # If digests
        if dashboard.digest != digest:
            self.incr_stats("redirect", self.thumbnail.__name__)
            return redirect(
                url_for(
                    f"{self.__class__.__name__}.thumbnail",
                    pk=pk,
                    digest=dashboard.digest,
                ))
        self.incr_stats("from_cache", self.thumbnail.__name__)
        return Response(FileWrapper(screenshot),
                        mimetype="image/png",
                        direct_passthrough=True)

    @expose("/favorite_status/", methods=["GET"])
    @protect()
    @safe
    @statsd_metrics
    @rison(get_fav_star_ids_schema)
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
        f".favorite_status",
        log_to_statsd=False,
    )
    def favorite_status(self, **kwargs: Any) -> Response:
        """Favorite Stars for Dashboards
        ---
        get:
          description: >-
            Check favorited dashboards for current user
          parameters:
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/get_fav_star_ids_schema'
          responses:
            200:
              description:
              content:
                application/json:
                  schema:
                    $ref: "#/components/schemas/GetFavStarIdsSchema"
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            500:
              $ref: '#/components/responses/500'
        """
        requested_ids = kwargs["rison"]
        dashboards = DashboardDAO.find_by_ids(requested_ids)
        if not dashboards:
            return self.response_404()
        favorited_dashboard_ids = DashboardDAO.favorited_ids(
            dashboards, g.user.get_id())
        res = [{
            "id": request_id,
            "value": request_id in favorited_dashboard_ids
        } for request_id in requested_ids]
        return self.response(200, result=res)

    @expose("/import/", methods=["POST"])
    @protect()
    @statsd_metrics
    @event_logger.log_this_with_context(
        action=lambda self, *args, **kwargs:
        f"{self.__class__.__name__}.import_",
        log_to_statsd=False,
    )
    @requires_form_data
    def import_(self) -> Response:
        """Import dashboard(s) with associated charts/datasets/databases
        ---
        post:
          requestBody:
            required: true
            content:
              multipart/form-data:
                schema:
                  type: object
                  properties:
                    formData:
                      description: upload file (ZIP or JSON)
                      type: string
                      format: binary
                    passwords:
                      description: JSON map of passwords for each file
                      type: string
                    overwrite:
                      description: overwrite existing databases?
                      type: boolean
          responses:
            200:
              description: Dashboard import result
              content:
                application/json:
                  schema:
                    type: object
                    properties:
                      message:
                        type: string
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        upload = request.files.get("formData")
        if not upload:
            return self.response_400()
        if is_zipfile(upload):
            with ZipFile(upload) as bundle:
                contents = get_contents_from_bundle(bundle)
        else:
            upload.seek(0)
            contents = {upload.filename: upload.read()}

        if not contents:
            raise NoValidFilesFoundError()

        passwords = (json.loads(request.form["passwords"])
                     if "passwords" in request.form else None)
        overwrite = request.form.get("overwrite") == "true"

        command = ImportDashboardsCommand(contents,
                                          passwords=passwords,
                                          overwrite=overwrite)
        command.run()
        return self.response(200, message="OK")
Exemple #12
0
import sqlalchemy as sqla
from flask_appbuilder import Model
from flask_appbuilder.models.decorators import renders
from markupsafe import escape, Markup
from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text
from sqlalchemy.orm import make_transient, relationship

from superset import ConnectorRegistry, db, is_feature_enabled, security_manager
from superset.legacy import update_time_range
from superset.models.helpers import AuditMixinNullable, ImportMixin
from superset.models.tags import ChartUpdater
from superset.tasks.thumbnails import cache_chart_thumbnail
from superset.utils import core as utils

if is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
    from superset.viz_sip38 import BaseViz, viz_types  # type: ignore
else:
    from superset.viz import BaseViz, viz_types  # type: ignore

if TYPE_CHECKING:
    # pylint: disable=unused-import
    from superset.connectors.base.models import BaseDatasource

metadata = Model.metadata  # pylint: disable=no-member
slice_user = Table(
    "slice_user",
    metadata,
    Column("id", Integer, primary_key=True),
    Column("user_id", Integer, ForeignKey("ab_user.id")),
    Column("slice_id", Integer, ForeignKey("slices.id")),
Exemple #13
0
 def test_feature_flags(self):
     self.assertEquals(is_feature_enabled('foo'), 'bar')
     self.assertEquals(is_feature_enabled('super'), 'set')
Exemple #14
0
def execute_sql_statement(  # pylint: disable=too-many-arguments,too-many-statements
    sql_statement: str,
    query: Query,
    session: Session,
    cursor: Any,
    log_params: Optional[Dict[str, Any]],
    apply_ctas: bool = False,
) -> SupersetResultSet:
    """Executes a single SQL statement"""
    database: Database = query.database
    db_engine_spec = database.db_engine_spec

    parsed_query = ParsedQuery(sql_statement)
    if is_feature_enabled("RLS_IN_SQLLAB"):
        # Insert any applicable RLS predicates
        parsed_query = ParsedQuery(
            str(
                insert_rls(
                    parsed_query._parsed[0],  # pylint: disable=protected-access
                    database.id,
                    query.schema,
                )))

    sql = parsed_query.stripped()
    # This is a test to see if the query is being
    # limited by either the dropdown or the sql.
    # We are testing to see if more rows exist than the limit.
    increased_limit = None if query.limit is None else query.limit + 1

    if not db_engine_spec.is_readonly_query(
            parsed_query) and not database.allow_dml:
        raise SupersetErrorException(
            SupersetError(
                message=__(
                    "Only SELECT statements are allowed against this database."
                ),
                error_type=SupersetErrorType.DML_NOT_ALLOWED_ERROR,
                level=ErrorLevel.ERROR,
            ))
    if apply_ctas:
        if not query.tmp_table_name:
            start_dttm = datetime.fromtimestamp(query.start_time)
            query.tmp_table_name = "tmp_{}_table_{}".format(
                query.user_id, start_dttm.strftime("%Y_%m_%d_%H_%M_%S"))
        sql = parsed_query.as_create_table(
            query.tmp_table_name,
            schema_name=query.tmp_schema_name,
            method=query.ctas_method,
        )
        query.select_as_cta_used = True

    # Do not apply limit to the CTA queries when SQLLAB_CTAS_NO_LIMIT is set to true
    if db_engine_spec.is_select_query(parsed_query) and not (
            query.select_as_cta_used and SQLLAB_CTAS_NO_LIMIT):
        if SQL_MAX_ROW and (not query.limit or query.limit > SQL_MAX_ROW):
            query.limit = SQL_MAX_ROW
        sql = apply_limit_if_exists(database, increased_limit, query, sql)

    # Hook to allow environment-specific mutation (usually comments) to the SQL
    sql = SQL_QUERY_MUTATOR(
        sql,
        user_name=get_username(),  # TODO(john-bodley): Deprecate in 3.0.
        security_manager=security_manager,
        database=database,
    )
    try:
        query.executed_sql = sql
        if log_query:
            log_query(
                query.database.sqlalchemy_uri,
                query.executed_sql,
                query.schema,
                get_username(),
                __name__,
                security_manager,
                log_params,
            )
        session.commit()
        with stats_timing("sqllab.query.time_executing_query", stats_logger):
            logger.debug("Query %d: Running query: %s", query.id, sql)
            db_engine_spec.execute(cursor, sql, async_=True)
            logger.debug("Query %d: Handling cursor", query.id)
            db_engine_spec.handle_cursor(cursor, query, session)

        with stats_timing("sqllab.query.time_fetching_results", stats_logger):
            logger.debug(
                "Query %d: Fetching data for query object: %s",
                query.id,
                str(query.to_dict()),
            )
            data = db_engine_spec.fetch_data(cursor, increased_limit)
            if query.limit is None or len(data) <= query.limit:
                query.limiting_factor = LimitingFactor.NOT_LIMITED
            else:
                # return 1 row less than increased_query
                data = data[:-1]
    except SoftTimeLimitExceeded as ex:
        query.status = QueryStatus.TIMED_OUT

        logger.warning("Query %d: Time limit exceeded", query.id)
        logger.debug("Query %d: %s", query.id, ex)
        raise SupersetErrorException(
            SupersetError(
                message=__(
                    "The query was killed after %(sqllab_timeout)s seconds. It might "
                    "be too complex, or the database might be under heavy load.",
                    sqllab_timeout=SQLLAB_TIMEOUT,
                ),
                error_type=SupersetErrorType.SQLLAB_TIMEOUT_ERROR,
                level=ErrorLevel.ERROR,
            )) from ex
    except Exception as ex:
        # query is stopped in another thread/worker
        # stopping raises expected exceptions which we should skip
        session.refresh(query)
        if query.status == QueryStatus.STOPPED:
            raise SqlLabQueryStoppedException() from ex

        logger.error("Query %d: %s", query.id, type(ex), exc_info=True)
        logger.debug("Query %d: %s", query.id, ex)
        raise SqlLabException(db_engine_spec.extract_error_message(ex)) from ex

    logger.debug("Query %d: Fetching cursor description", query.id)
    cursor_description = cursor.description
    return SupersetResultSet(data, cursor_description, db_engine_spec)
Exemple #15
0
 def ensure_thumbnails_enabled(self) -> Optional[Response]:
     if not is_feature_enabled("THUMBNAILS"):
         return self.response_404()
     return None
Exemple #16
0
 def test_nonexistent_feature_flags(self):
     self.assertFalse(is_feature_enabled('FOO'))
Exemple #17
0
class TestCore(SupersetTestCase):
    def setUp(self):
        db.session.query(Query).delete()
        db.session.query(DatasourceAccessRequest).delete()
        db.session.query(models.Log).delete()
        self.table_ids = {
            tbl.table_name: tbl.id
            for tbl in (db.session.query(SqlaTable).all())
        }
        self.original_unsafe_db_setting = app.config[
            "PREVENT_UNSAFE_DB_CONNECTIONS"]

    def tearDown(self):
        db.session.query(Query).delete()
        app.config[
            "PREVENT_UNSAFE_DB_CONNECTIONS"] = self.original_unsafe_db_setting

    def test_login(self):
        resp = self.get_resp("/login/",
                             data=dict(username="******", password="******"))
        self.assertNotIn("User confirmation needed", resp)

        resp = self.get_resp("/logout/", follow_redirects=True)
        self.assertIn("User confirmation needed", resp)

        resp = self.get_resp("/login/",
                             data=dict(username="******",
                                       password="******"))
        self.assertIn("User confirmation needed", resp)

    def test_dashboard_endpoint(self):
        self.login()
        resp = self.client.get("/superset/dashboard/-1/")
        assert resp.status_code == 404

    def test_slice_endpoint(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)
        resp = self.get_resp("/superset/slice/{}/".format(slc.id))
        assert "Time Column" in resp
        assert "List Roles" in resp

        # Testing overrides
        resp = self.get_resp("/superset/slice/{}/?standalone=true".format(
            slc.id))
        assert '<div class="navbar' not in resp

        resp = self.client.get("/superset/slice/-1/")
        assert resp.status_code == 404

    def test_viz_cache_key(self):
        self.login(username="******")
        slc = self.get_slice("Girls", db.session)

        viz = slc.viz
        qobj = viz.query_obj()
        cache_key = viz.cache_key(qobj)

        qobj["groupby"] = []
        cache_key_with_groupby = viz.cache_key(qobj)
        self.assertNotEqual(cache_key, cache_key_with_groupby)

        self.assertNotEqual(viz.cache_key(qobj),
                            viz.cache_key(qobj, time_compare="12 weeks"))

        self.assertNotEqual(
            viz.cache_key(qobj, time_compare="28 days"),
            viz.cache_key(qobj, time_compare="12 weeks"),
        )

        qobj["inner_from_dttm"] = datetime.datetime(1901, 1, 1)

        self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj))

    def test_get_superset_tables_not_allowed(self):
        example_db = utils.get_example_database()
        schema_name = self.default_schema_backend_map[example_db.backend]
        self.login(username="******")
        uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    def test_get_superset_tables_substr(self):
        example_db = utils.get_example_database()
        if example_db.backend in {"presto", "hive"}:
            # TODO: change table to the real table that is in examples.
            return
        self.login(username="******")
        schema_name = self.default_schema_backend_map[example_db.backend]
        uri = f"superset/tables/{example_db.id}/{schema_name}/ab_role/"
        rv = self.client.get(uri)
        response = json.loads(rv.data.decode("utf-8"))
        self.assertEqual(rv.status_code, 200)

        expected_response = {
            "options": [{
                "label": "ab_role",
                "schema": schema_name,
                "title": "ab_role",
                "type": "table",
                "value": "ab_role",
                "extra": None,
            }],
            "tableLength":
            1,
        }
        self.assertEqual(response, expected_response)

    def test_get_superset_tables_not_found(self):
        self.login(username="******")
        uri = f"superset/tables/invalid/public/undefined/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    def test_annotation_json_endpoint(self):
        # Set up an annotation layer and annotation
        layer = AnnotationLayer(name="foo", descr="bar")
        db.session.add(layer)
        db.session.commit()

        annotation = Annotation(
            layer_id=layer.id,
            short_descr="my_annotation",
            start_dttm=datetime.datetime(2020, 5, 20, 18, 21, 51),
            end_dttm=datetime.datetime(2020, 5, 20, 18, 31, 51),
        )

        db.session.add(annotation)
        db.session.commit()

        self.login()
        resp_annotations = json.loads(
            self.get_resp("annotationlayermodelview/api/read"))
        # the UI needs id and name to function
        self.assertIn("id", resp_annotations["result"][0])
        self.assertIn("name", resp_annotations["result"][0])

        response = self.get_resp(
            f"/superset/annotation_json/{layer.id}?form_data=" +
            quote(json.dumps({"time_range": "100 years ago : now"})))
        assert "my_annotation" in response

        # Rollback changes
        db.session.delete(annotation)
        db.session.delete(layer)
        db.session.commit()

    def test_admin_only_permissions(self):
        def assert_admin_permission_in(role_name, assert_func):
            role = security_manager.find_role(role_name)
            permissions = [p.permission.name for p in role.permissions]
            assert_func("can_sync_druid_source", permissions)
            assert_func("can_approve", permissions)

        assert_admin_permission_in("Admin", self.assertIn)
        assert_admin_permission_in("Alpha", self.assertNotIn)
        assert_admin_permission_in("Gamma", self.assertNotIn)

    def test_admin_only_menu_views(self):
        def assert_admin_view_menus_in(role_name, assert_func):
            role = security_manager.find_role(role_name)
            view_menus = [p.view_menu.name for p in role.permissions]
            assert_func("ResetPasswordView", view_menus)
            assert_func("RoleModelView", view_menus)
            assert_func("Security", view_menus)
            assert_func("SQL Lab", view_menus)

        assert_admin_view_menus_in("Admin", self.assertIn)
        assert_admin_view_menus_in("Alpha", self.assertNotIn)
        assert_admin_view_menus_in("Gamma", self.assertNotIn)

    @pytest.mark.usefixtures("load_energy_table_with_slice")
    def test_save_slice(self):
        self.login(username="******")
        slice_name = f"Energy Sankey"
        slice_id = self.get_slice(slice_name, db.session).id
        copy_name_prefix = "Test Sankey"
        copy_name = f"{copy_name_prefix}[save]{random.random()}"
        tbl_id = self.table_ids.get("energy_usage")
        new_slice_name = f"{copy_name_prefix}[overwrite]{random.random()}"

        url = ("/superset/explore/table/{}/?slice_name={}&"
               "action={}&datasource_name=energy_usage")

        form_data = {
            "adhoc_filters": [],
            "viz_type": "sankey",
            "groupby": ["target"],
            "metric": "sum__value",
            "row_limit": 5000,
            "slice_id": slice_id,
            "time_range_endpoints": ["inclusive", "exclusive"],
        }
        # Changing name and save as a new slice
        resp = self.client.post(
            url.format(tbl_id, copy_name, "saveas"),
            data={"form_data": json.dumps(form_data)},
        )
        db.session.expunge_all()
        new_slice_id = resp.json["form_data"]["slice_id"]
        slc = db.session.query(Slice).filter_by(id=new_slice_id).one()

        self.assertEqual(slc.slice_name, copy_name)
        form_data.pop("slice_id")  # We don't save the slice id when saving as
        self.assertEqual(slc.viz.form_data, form_data)

        form_data = {
            "adhoc_filters": [],
            "viz_type": "sankey",
            "groupby": ["source"],
            "metric": "sum__value",
            "row_limit": 5000,
            "slice_id": new_slice_id,
            "time_range": "now",
            "time_range_endpoints": ["inclusive", "exclusive"],
        }
        # Setting the name back to its original name by overwriting new slice
        self.client.post(
            url.format(tbl_id, new_slice_name, "overwrite"),
            data={"form_data": json.dumps(form_data)},
        )
        db.session.expunge_all()
        slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
        self.assertEqual(slc.slice_name, new_slice_name)
        self.assertEqual(slc.viz.form_data, form_data)

        # Cleanup
        slices = (db.session.query(Slice).filter(
            Slice.slice_name.like(copy_name_prefix + "%")).all())
        for slc in slices:
            db.session.delete(slc)
        db.session.commit()

    @pytest.mark.usefixtures("load_energy_table_with_slice")
    def test_filter_endpoint(self):
        self.login(username="******")
        slice_name = "Energy Sankey"
        slice_id = self.get_slice(slice_name, db.session).id
        db.session.commit()
        tbl_id = self.table_ids.get("energy_usage")
        table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id)
        table.filter_select_enabled = True
        url = (
            "/superset/filter/table/{}/target/?viz_type=sankey&groupby=source"
            "&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&"
            "slice_id={}&datasource_name=energy_usage&"
            "datasource_id=1&datasource_type=table")

        # Changing name
        resp = self.get_resp(url.format(tbl_id, slice_id))
        assert len(resp) > 0
        assert "energy_target0" in resp

    def test_slice_data(self):
        # slice data should have some required attributes
        self.login(username="******")
        slc = self.get_slice(slice_name="Girls",
                             session=db.session,
                             expunge_from_session=False)
        slc_data_attributes = slc.data.keys()
        assert "changed_on" in slc_data_attributes
        assert "modified" in slc_data_attributes
        assert "owners" in slc_data_attributes

    @pytest.mark.usefixtures("load_energy_table_with_slice")
    def test_slices(self):
        # Testing by hitting the two supported end points for all slices
        self.login(username="******")
        Slc = Slice
        urls = []
        for slc in db.session.query(Slc).all():
            urls += [
                (slc.slice_name, "explore", slc.slice_url),
            ]
        for name, method, url in urls:
            logger.info(f"[{name}]/[{method}]: {url}")
            print(f"[{name}]/[{method}]: {url}")
            resp = self.client.get(url)
            self.assertEqual(resp.status_code, 200)

    def test_tablemodelview_list(self):
        self.login(username="******")

        url = "/tablemodelview/list/"
        resp = self.get_resp(url)

        # assert that a table is listed
        table = db.session.query(SqlaTable).first()
        assert table.name in resp
        assert "/superset/explore/table/{}".format(table.id) in resp

    def test_add_slice(self):
        self.login(username="******")
        # assert that /chart/add responds with 200
        url = "/chart/add"
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)

    def test_get_user_slices_for_owners(self):
        self.login(username="******")
        user = security_manager.find_user("alpha")
        slice_name = "Girls"

        # ensure user is not owner of any slices
        url = f"/superset/user_slices/{user.id}/"
        resp = self.client.get(url)
        data = json.loads(resp.data)
        self.assertEqual(data, [])

        # make user owner of slice and verify that endpoint returns said slice
        slc = self.get_slice(slice_name=slice_name,
                             session=db.session,
                             expunge_from_session=False)
        slc.owners = [user]
        db.session.merge(slc)
        db.session.commit()
        url = f"/superset/user_slices/{user.id}/"
        resp = self.client.get(url)
        data = json.loads(resp.data)
        self.assertEqual(len(data), 1)
        self.assertEqual(data[0]["title"], slice_name)

        # remove ownership and ensure user no longer gets slice
        slc = self.get_slice(slice_name=slice_name,
                             session=db.session,
                             expunge_from_session=False)
        slc.owners = []
        db.session.merge(slc)
        db.session.commit()
        url = f"/superset/user_slices/{user.id}/"
        resp = self.client.get(url)
        data = json.loads(resp.data)
        self.assertEqual(data, [])

    def test_get_user_slices(self):
        self.login(username="******")
        userid = security_manager.find_user("admin").id
        url = f"/sliceasync/api/read?_flt_0_created_by={userid}"
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)

    @pytest.mark.usefixtures("load_energy_table_with_slice")
    def test_slices_V2(self):
        # Add explore-v2-beta role to admin user
        # Test all slice urls as user with with explore-v2-beta role
        security_manager.add_role("explore-v2-beta")

        security_manager.add_user(
            "explore_beta",
            "explore_beta",
            " user",
            "*****@*****.**",
            security_manager.find_role("explore-v2-beta"),
            password="******",
        )
        self.login(username="******", password="******")

        Slc = Slice
        urls = []
        for slc in db.session.query(Slc).all():
            urls += [(slc.slice_name, "slice_url", slc.slice_url)]
        for name, method, url in urls:
            print(f"[{name}]/[{method}]: {url}")
            self.client.get(url)

    def test_doctests(self):
        modules = [utils, models, sql_lab]
        for mod in modules:
            failed, tests = doctest.testmod(mod)
            if failed:
                raise Exception("Failed a doctest")

    def test_misc(self):
        assert self.get_resp("/health") == "OK"
        assert self.get_resp("/healthcheck") == "OK"
        assert self.get_resp("/ping") == "OK"

    def test_testconn(self, username="******"):
        # need to temporarily allow sqlite dbs, teardown will undo this
        app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
        self.login(username=username)
        database = utils.get_example_database()
        # validate that the endpoint works with the password-masked sqlalchemy uri
        data = json.dumps({
            "uri": database.safe_sqlalchemy_uri(),
            "name": "examples",
            "impersonate_user": False,
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 200
        assert response.headers["Content-Type"] == "application/json"

        # validate that the endpoint works with the decrypted sqlalchemy uri
        data = json.dumps({
            "uri": database.sqlalchemy_uri_decrypted,
            "name": "examples",
            "impersonate_user": False,
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 200
        assert response.headers["Content-Type"] == "application/json"

    def test_testconn_failed_conn(self, username="******"):
        self.login(username=username)

        data = json.dumps({
            "uri": "broken://url",
            "name": "examples",
            "impersonate_user": False
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 400
        assert response.headers["Content-Type"] == "application/json"
        response_body = json.loads(response.data.decode("utf-8"))
        expected_body = {"error": "Could not load database driver: broken"}
        assert response_body == expected_body, "%s != %s" % (
            response_body,
            expected_body,
        )

        data = json.dumps({
            "uri": "mssql+pymssql://url",
            "name": "examples",
            "impersonate_user": False,
        })
        response = self.client.post("/superset/testconn",
                                    data=data,
                                    content_type="application/json")
        assert response.status_code == 400
        assert response.headers["Content-Type"] == "application/json"
        response_body = json.loads(response.data.decode("utf-8"))
        expected_body = {
            "error": "Could not load database driver: mssql+pymssql"
        }
        assert response_body == expected_body, "%s != %s" % (
            response_body,
            expected_body,
        )

    def test_testconn_unsafe_uri(self, username="******"):
        self.login(username=username)
        app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True

        response = self.client.post(
            "/superset/testconn",
            data=json.dumps({
                "uri": "sqlite:///home/superset/unsafe.db",
                "name": "unsafe",
                "impersonate_user": False,
            }),
            content_type="application/json",
        )
        self.assertEqual(400, response.status_code)
        response_body = json.loads(response.data.decode("utf-8"))
        expected_body = {
            "error":
            "SQLite database cannot be used as a data source for security reasons."
        }
        self.assertEqual(expected_body, response_body)

    def test_custom_password_store(self):
        database = utils.get_example_database()
        conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)

        def custom_password_store(uri):
            return "password_store_test"

        models.custom_password_store = custom_password_store
        conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
        if conn_pre.password:
            assert conn.password == "password_store_test"
            assert conn.password != conn_pre.password
        # Disable for password store for later tests
        models.custom_password_store = None

    def test_databaseview_edit(self, username="******"):
        # validate that sending a password-masked uri does not over-write the decrypted
        # uri
        self.login(username=username)
        database = utils.get_example_database()
        sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
        url = "databaseview/edit/{}".format(database.id)
        data = {
            k: database.__getattribute__(k)
            for k in DatabaseView.add_columns
        }
        data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri()
        self.client.post(url, data=data)
        database = utils.get_example_database()
        self.assertEqual(sqlalchemy_uri_decrypted,
                         database.sqlalchemy_uri_decrypted)

        # Need to clean up after ourselves
        database.impersonate_user = False
        database.allow_dml = False
        database.allow_run_async = False
        db.session.commit()

    @pytest.mark.usefixtures("load_energy_table_with_slice")
    def test_warm_up_cache(self):
        self.login()
        slc = self.get_slice("Girls", db.session)
        data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(
            slc.id))
        self.assertEqual(data, [{
            "slice_id": slc.id,
            "viz_error": None,
            "viz_status": "success"
        }])

        data = self.get_json_resp(
            "/superset/warm_up_cache?table_name=energy_usage&db_name=main")
        assert len(data) > 0

        dashboard = self.get_dash_by_slug("births")

        assert self.get_json_resp(
            f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}"
        ) == [{
            "slice_id": slc.id,
            "viz_error": None,
            "viz_status": "success"
        }]

        assert self.get_json_resp(
            f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters="
            + quote(
                json.dumps([{
                    "col": "name",
                    "op": "in",
                    "val": ["Jennifer"]
                }]))) == [{
                    "slice_id": slc.id,
                    "viz_error": None,
                    "viz_status": "success"
                }]

    def test_cache_logging(self):
        girls_slice = self.get_slice("Girls", db.session)
        self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(
            girls_slice.id))
        ck = db.session.query(CacheKey).order_by(CacheKey.id.desc()).first()
        assert ck.datasource_uid == f"{girls_slice.table.id}__table"

    def test_shortner(self):
        self.login(username="******")
        data = (
            "//superset/explore/table/1/?viz_type=sankey&groupby=source&"
            "groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
            "flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name="
            "Energy+Sankey&collapsed_fieldsets=&action=&datasource_name="
            "energy_usage&datasource_id=1&datasource_type=table&"
            "previous_viz_type=sankey")
        resp = self.client.post("/r/shortner/", data=dict(data=data))
        assert re.search(r"\/r\/[0-9]+", resp.data.decode("utf-8"))

    @skipUnless((is_feature_enabled("KV_STORE")),
                "skipping as /kv/ endpoints are not enabled")
    def test_kv(self):
        self.login(username="******")

        resp = self.client.get("/kv/10001/")
        self.assertEqual(404, resp.status_code)

        value = json.dumps({"data": "this is a test"})
        resp = self.client.post("/kv/store/", data=dict(data=value))
        self.assertEqual(resp.status_code, 200)
        kv = db.session.query(models.KeyValue).first()
        kv_value = kv.value
        self.assertEqual(json.loads(value), json.loads(kv_value))

        resp = self.client.get("/kv/{}/".format(kv.id))
        self.assertEqual(resp.status_code, 200)
        self.assertEqual(json.loads(value),
                         json.loads(resp.data.decode("utf-8")))

    def test_gamma(self):
        self.login(username="******")
        assert "Charts" in self.get_resp("/chart/list/")
        assert "Dashboards" in self.get_resp("/dashboard/list/")

    def test_csv_endpoint(self):
        self.login()
        sql = """
            SELECT name
            FROM birth_names
            WHERE name = 'James'
            LIMIT 1
        """
        client_id = "{}".format(random.getrandbits(64))[:10]
        self.run_sql(sql, client_id, raise_on_error=True)

        resp = self.get_resp("/superset/csv/{}".format(client_id))
        data = csv.reader(io.StringIO(resp))
        expected_data = csv.reader(io.StringIO("name\nJames\n"))

        client_id = "{}".format(random.getrandbits(64))[:10]
        self.run_sql(sql, client_id, raise_on_error=True)

        resp = self.get_resp("/superset/csv/{}".format(client_id))
        data = csv.reader(io.StringIO(resp))
        expected_data = csv.reader(io.StringIO("name\nJames\n"))

        self.assertEqual(list(expected_data), list(data))
        self.logout()

    def test_extra_table_metadata(self):
        self.login()
        example_db = utils.get_example_database()
        schema = "default" if example_db.backend in {"presto", "hive"
                                                     } else "superset"
        self.get_json_resp(
            f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/"
        )

    def test_templated_sql_json(self):
        if utils.get_example_database().backend == "presto":
            # TODO: make it work for presto
            return
        self.login()
        sql = "SELECT '{{ 1+1 }}' as test"
        data = self.run_sql(sql, "fdaklj3ws")
        self.assertEqual(data["data"][0]["test"], "2")

    @mock.patch("tests.superset_test_custom_template_processors.datetime")
    @mock.patch("superset.sql_lab.get_sql_results")
    def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None:
        """Test sqllab receives macros expanded query."""
        mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1))
        self.login()
        sql = "SELECT '$DATE()' as test"
        resp = {
            "status": utils.QueryStatus.SUCCESS,
            "query": {
                "rows": 1
            },
            "data": [{
                "test": "'1970-01-01'"
            }],
        }
        sql_lab_mock.return_value = resp

        dbobj = self.create_fake_db_for_macros()
        json_payload = dict(database_id=dbobj.id, sql=sql)
        self.get_json_resp("/superset/sql_json/",
                           raise_on_error=False,
                           json_=json_payload)
        assert sql_lab_mock.called
        self.assertEqual(sql_lab_mock.call_args[0][1],
                         "SELECT '1970-01-01' as test")

        self.delete_fake_db_for_macros()

    def test_fetch_datasource_metadata(self):
        self.login(username="******")
        url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table"
        resp = self.get_json_resp(url)
        keys = [
            "name",
            "type",
            "order_by_choices",
            "granularity_sqla",
            "time_grain_sqla",
            "id",
        ]
        for k in keys:
            self.assertIn(k, resp.keys())

    def test_user_profile(self, username="******"):
        self.login(username=username)
        slc = self.get_slice("Girls", db.session)

        # Setting some faves
        url = f"/superset/favstar/Slice/{slc.id}/select/"
        resp = self.get_json_resp(url)
        self.assertEqual(resp["count"], 1)

        dash = db.session.query(Dashboard).filter_by(slug="births").first()
        url = f"/superset/favstar/Dashboard/{dash.id}/select/"
        resp = self.get_json_resp(url)
        self.assertEqual(resp["count"], 1)

        userid = security_manager.find_user("admin").id
        resp = self.get_resp(f"/superset/profile/{username}/")
        self.assertIn('"app"', resp)
        data = self.get_json_resp(f"/superset/recent_activity/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(f"/superset/created_slices/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(f"/superset/created_dashboards/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(f"/superset/fave_slices/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(f"/superset/fave_dashboards/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(f"/superset/user_slices/{userid}/")
        self.assertNotIn("message", data)
        data = self.get_json_resp(
            f"/superset/fave_dashboards_by_username/{username}/")
        self.assertNotIn("message", data)

    def test_slice_id_is_always_logged_correctly_on_web_request(self):
        # superset/explore case
        slc = db.session.query(Slice).filter_by(slice_name="Girls").one()
        qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
        self.get_resp(slc.slice_url, {"form_data": json.dumps(slc.form_data)})
        self.assertEqual(1, qry.count())

    def create_sample_csvfile(self, filename: str, content: List[str]) -> None:
        with open(filename, "w+") as test_file:
            for l in content:
                test_file.write(f"{l}\n")

    def create_sample_excelfile(self, filename: str,
                                content: Dict[str, str]) -> None:
        pd.DataFrame(content).to_excel(filename)

    def enable_csv_upload(self, database: models.Database) -> None:
        """Enables csv upload in the given database."""
        database.allow_csv_upload = True
        db.session.commit()
        add_datasource_page = self.get_resp("/databaseview/list/")
        self.assertIn("Upload a CSV", add_datasource_page)

        form_get = self.get_resp("/csvtodatabaseview/form")
        self.assertIn("CSV to Database configuration", form_get)

    def test_dataframe_timezone(self):
        tz = pytz.FixedOffset(60)
        data = [
            (datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz), ),
            (datetime.datetime(2017, 11, 18, 22, 6, 30, tzinfo=tz), ),
        ]
        results = SupersetResultSet(list(data), [["data"]], BaseEngineSpec)
        df = results.to_pandas_df()
        data = dataframe.df_to_records(df)
        json_str = json.dumps(data,
                              default=utils.pessimistic_json_iso_dttm_ser)
        self.assertDictEqual(
            data[0],
            {"data": pd.Timestamp("2017-11-18 21:53:00.219225+0100", tz=tz)})
        self.assertDictEqual(
            data[1], {"data": pd.Timestamp("2017-11-18 22:06:30+0100", tz=tz)})
        self.assertEqual(
            json_str,
            '[{"data": "2017-11-18T21:53:00.219225+01:00"}, {"data": "2017-11-18T22:06:30+01:00"}]',
        )

    def test_mssql_engine_spec_pymssql(self):
        # Test for case when tuple is returned (pymssql)
        data = [
            (1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)),
            (2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)),
        ]
        results = SupersetResultSet(list(data), [["col1"], ["col2"], ["col3"]],
                                    MssqlEngineSpec)
        df = results.to_pandas_df()
        data = dataframe.df_to_records(df)
        self.assertEqual(len(data), 2)
        self.assertEqual(
            data[0],
            {
                "col1": 1,
                "col2": 1,
                "col3": pd.Timestamp("2017-10-19 23:39:16.660000")
            },
        )

    def test_comments_in_sqlatable_query(self):
        clean_query = "SELECT '/* val 1 */' as c1, '-- val 2' as c2 FROM tbl"
        commented_query = "/* comment 1 */" + clean_query + "-- comment 2"
        table = SqlaTable(
            table_name="test_comments_in_sqlatable_query_table",
            sql=commented_query,
            database=get_example_database(),
        )
        rendered_query = str(table.get_from_clause())
        self.assertEqual(clean_query, rendered_query)

    def test_slice_payload_no_datasource(self):
        self.login(username="******")
        data = self.get_json_resp("/superset/explore_json/",
                                  raise_on_error=False)

        self.assertEqual(
            data["errors"][0]["message"],
            "The datasource associated with this chart no longer exists",
        )

    @mock.patch(
        "superset.security.SupersetSecurityManager.get_schemas_accessible_by_user"
    )
    @mock.patch("superset.security.SupersetSecurityManager.can_access_database"
                )
    @mock.patch(
        "superset.security.SupersetSecurityManager.can_access_all_datasources")
    def test_schemas_access_for_csv_upload_endpoint(
        self,
        mock_can_access_all_datasources,
        mock_can_access_database,
        mock_schemas_accessible,
    ):
        self.login(username="******")
        dbobj = self.create_fake_db()
        mock_can_access_all_datasources.return_value = False
        mock_can_access_database.return_value = False
        mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
        data = self.get_json_resp(
            url="/superset/schemas_access_for_csv_upload?db_id={db_id}".format(
                db_id=dbobj.id))
        assert data == ["this_schema_is_allowed_too"]
        self.delete_fake_db()

    def test_select_star(self):
        self.login(username="******")
        examples_db = utils.get_example_database()
        resp = self.get_resp(
            f"/superset/select_star/{examples_db.id}/birth_names")
        self.assertIn("gender", resp)

    def test_get_select_star_not_allowed(self):
        """
        Database API: Test get select star not allowed
        """
        self.login(username="******")
        example_db = utils.get_example_database()
        resp = self.client.get(
            f"/superset/select_star/{example_db.id}/birth_names")
        self.assertEqual(resp.status_code, 404)

    @mock.patch("superset.views.core.results_backend_use_msgpack", False)
    @mock.patch("superset.views.core.results_backend")
    def test_display_limit(self, mock_results_backend):
        self.login()

        data = [{"col_0": i} for i in range(100)]
        payload = {
            "status": utils.QueryStatus.SUCCESS,
            "query": {
                "rows": 100
            },
            "data": data,
        }
        # limit results to 1
        expected_key = {
            "status": "success",
            "query": {
                "rows": 100
            },
            "data": data
        }
        limited_data = data[:1]
        expected_limited = {
            "status": "success",
            "query": {
                "rows": 100
            },
            "data": limited_data,
            "displayLimitReached": True,
        }

        query_mock = mock.Mock()
        query_mock.sql = "SELECT *"
        query_mock.database = 1
        query_mock.schema = "superset"

        # do not apply msgpack serialization
        use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"]
        app.config["RESULTS_BACKEND_USE_MSGPACK"] = False
        serialized_payload = sql_lab._serialize_payload(payload, False)
        compressed = utils.zlib_compress(serialized_payload)
        mock_results_backend.get.return_value = compressed

        with mock.patch("superset.views.core.db") as mock_superset_db:
            mock_superset_db.session.query().filter_by(
            ).one_or_none.return_value = (query_mock)
            # get all results
            result_key = json.loads(self.get_resp("/superset/results/key/"))
            result_limited = json.loads(
                self.get_resp("/superset/results/key/?rows=1"))

        self.assertEqual(result_key, expected_key)
        self.assertEqual(result_limited, expected_limited)

        app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack

    def test_results_default_deserialization(self):
        use_new_deserialization = False
        data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
        cursor_descr = (
            ("a", "string"),
            ("b", "int"),
            ("c", "float"),
            ("d", "datetime"),
        )
        db_engine_spec = BaseEngineSpec()
        results = SupersetResultSet(data, cursor_descr, db_engine_spec)
        query = {
            "database_id": 1,
            "sql": "SELECT * FROM birth_names LIMIT 100",
            "status": utils.QueryStatus.PENDING,
        }
        (
            serialized_data,
            selected_columns,
            all_columns,
            expanded_columns,
        ) = sql_lab._serialize_and_expand_data(results, db_engine_spec,
                                               use_new_deserialization)
        payload = {
            "query_id": 1,
            "status": utils.QueryStatus.SUCCESS,
            "state": utils.QueryStatus.SUCCESS,
            "data": serialized_data,
            "columns": all_columns,
            "selected_columns": selected_columns,
            "expanded_columns": expanded_columns,
            "query": query,
        }

        serialized_payload = sql_lab._serialize_payload(
            payload, use_new_deserialization)
        self.assertIsInstance(serialized_payload, str)

        query_mock = mock.Mock()
        deserialized_payload = superset.views.utils._deserialize_results_payload(
            serialized_payload, query_mock, use_new_deserialization)

        self.assertDictEqual(deserialized_payload, payload)
        query_mock.assert_not_called()

    def test_results_msgpack_deserialization(self):
        use_new_deserialization = True
        data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
        cursor_descr = (
            ("a", "string"),
            ("b", "int"),
            ("c", "float"),
            ("d", "datetime"),
        )
        db_engine_spec = BaseEngineSpec()
        results = SupersetResultSet(data, cursor_descr, db_engine_spec)
        query = {
            "database_id": 1,
            "sql": "SELECT * FROM birth_names LIMIT 100",
            "status": utils.QueryStatus.PENDING,
        }
        (
            serialized_data,
            selected_columns,
            all_columns,
            expanded_columns,
        ) = sql_lab._serialize_and_expand_data(results, db_engine_spec,
                                               use_new_deserialization)
        payload = {
            "query_id": 1,
            "status": utils.QueryStatus.SUCCESS,
            "state": utils.QueryStatus.SUCCESS,
            "data": serialized_data,
            "columns": all_columns,
            "selected_columns": selected_columns,
            "expanded_columns": expanded_columns,
            "query": query,
        }

        serialized_payload = sql_lab._serialize_payload(
            payload, use_new_deserialization)
        self.assertIsInstance(serialized_payload, bytes)

        with mock.patch.object(
                db_engine_spec, "expand_data",
                wraps=db_engine_spec.expand_data) as expand_data:
            query_mock = mock.Mock()
            query_mock.database.db_engine_spec.expand_data = expand_data

            deserialized_payload = superset.views.utils._deserialize_results_payload(
                serialized_payload, query_mock, use_new_deserialization)
            df = results.to_pandas_df()
            payload["data"] = dataframe.df_to_records(df)

            self.assertDictEqual(deserialized_payload, payload)
            expand_data.assert_called_once()

    @mock.patch.dict(
        "superset.extensions.feature_flag_manager._feature_flags",
        {"FOO": lambda x: 1},
        clear=True,
    )
    def test_feature_flag_serialization(self):
        """
        Functions in feature flags don't break bootstrap data serialization.
        """
        self.login()

        encoded = json.dumps(
            {
                "FOO": lambda x: 1,
                "super": "set"
            },
            default=utils.pessimistic_json_iso_dttm_ser,
        )
        html_string = (html.escape(encoded,
                                   quote=False).replace("'", "&#39;").replace(
                                       '"', "&#34;"))

        urls = [
            "/superset/sqllab",
            "/superset/welcome",
            "/superset/dashboard/1/",
            "/superset/profile/admin/",
            "/superset/explore/table/1",
        ]
        for url in urls:
            data = self.get_resp(url)
            self.assertTrue(html_string in data)

    @mock.patch.dict(
        "superset.extensions.feature_flag_manager._feature_flags",
        {"SQLLAB_BACKEND_PERSISTENCE": True},
        clear=True,
    )
    def test_sqllab_backend_persistence_payload(self):
        username = "******"
        self.login(username)
        user_id = security_manager.find_user(username).id

        # create a tab
        data = {
            "queryEditor":
            json.dumps({
                "title": "Untitled Query 1",
                "dbId": 1,
                "schema": None,
                "autorun": False,
                "sql": "SELECT ...",
                "queryLimit": 1000,
            })
        }
        resp = self.get_json_resp("/tabstateview/", data=data)
        tab_state_id = resp["id"]

        # run a query in the created tab
        self.run_sql(
            "SELECT name FROM birth_names",
            "client_id_1",
            user_name=username,
            raise_on_error=True,
            sql_editor_id=tab_state_id,
        )
        # run an orphan query (no tab)
        self.run_sql(
            "SELECT name FROM birth_names",
            "client_id_2",
            user_name=username,
            raise_on_error=True,
        )

        # we should have only 1 query returned, since the second one is not
        # associated with any tabs
        payload = views.Superset._get_sqllab_tabs(user_id=user_id)
        self.assertEqual(len(payload["queries"]), 1)

    def test_virtual_table_explore_visibility(self):
        # test that default visibility it set to True
        database = utils.get_example_database()
        self.assertEqual(database.allows_virtual_table_explore, True)

        # test that visibility is disabled when extra is set to False
        extra = database.get_extra()
        extra["allows_virtual_table_explore"] = False
        database.extra = json.dumps(extra)
        self.assertEqual(database.allows_virtual_table_explore, False)

        # test that visibility is enabled when extra is set to True
        extra = database.get_extra()
        extra["allows_virtual_table_explore"] = True
        database.extra = json.dumps(extra)
        self.assertEqual(database.allows_virtual_table_explore, True)

        # test that visibility is not broken with bad values
        extra = database.get_extra()
        extra["allows_virtual_table_explore"] = "trash value"
        database.extra = json.dumps(extra)
        self.assertEqual(database.allows_virtual_table_explore, True)

    def test_explore_database_id(self):
        database = utils.get_example_database()
        explore_database = utils.get_example_database()

        # test that explore_database_id is the regular database
        # id if none is set in the extra
        self.assertEqual(database.explore_database_id, database.id)

        # test that explore_database_id is correct if the extra is set
        extra = database.get_extra()
        extra["explore_database_id"] = explore_database.id
        database.extra = json.dumps(extra)
        self.assertEqual(database.explore_database_id, explore_database.id)

    def test_get_column_names_from_metric(self):
        simple_metric = {
            "expressionType": utils.AdhocMetricExpressionType.SIMPLE.value,
            "column": {
                "column_name": "my_col"
            },
            "aggregate": "SUM",
            "label": "My Simple Label",
        }
        assert utils.get_column_name_from_metric(simple_metric) == "my_col"

        sql_metric = {
            "expressionType": utils.AdhocMetricExpressionType.SQL.value,
            "sqlExpression": "SUM(my_label)",
            "label": "My SQL Label",
        }
        assert utils.get_column_name_from_metric(sql_metric) is None
        assert utils.get_column_names_from_metrics([simple_metric,
                                                    sql_metric]) == ["my_col"]
Exemple #18
0
 def test_feature_flags(self):
     self.assertEquals(is_feature_enabled("foo"), "bar")
     self.assertEquals(is_feature_enabled("super"), "set")
    def expand_data(
            cls, columns: List[dict],
            data: List[dict]) -> Tuple[List[dict], List[dict], List[dict]]:
        """
        We do not immediately display rows and arrays clearly in the data grid. This
        method separates out nested fields and data values to help clearly display
        structural columns.

        Example: ColumnA is a row(nested_obj varchar) and ColumnB is an array(int)
        Original data set = [
            {'ColumnA': ['a1'], 'ColumnB': [1, 2]},
            {'ColumnA': ['a2'], 'ColumnB': [3, 4]},
        ]
        Expanded data set = [
            {'ColumnA': ['a1'], 'ColumnA.nested_obj': 'a1', 'ColumnB': 1},
            {'ColumnA': '',     'ColumnA.nested_obj': '',   'ColumnB': 2},
            {'ColumnA': ['a2'], 'ColumnA.nested_obj': 'a2', 'ColumnB': 3},
            {'ColumnA': '',     'ColumnA.nested_obj': '',   'ColumnB': 4},
        ]
        :param columns: columns selected in the query
        :param data: original data set
        :return: list of all columns(selected columns and their nested fields),
                 expanded data set, listed of nested fields
        """
        if not is_feature_enabled("PRESTO_EXPAND_DATA"):
            return columns, data, []

        all_columns: List[dict] = []
        # Get the list of all columns (selected fields and their nested fields)
        for column in columns:
            if column["type"].startswith("ARRAY") or column["type"].startswith(
                    "ROW"):
                cls._parse_structural_column(column["name"],
                                             column["type"].lower(),
                                             all_columns)
            else:
                all_columns.append(column)

        # Build graphs where the root node is a row or array and its children are that
        # column's nested fields
        row_column_hierarchy, array_column_hierarchy, expanded_columns = cls._create_row_and_array_hierarchy(
            columns)

        # Pull out a row's nested fields and their values into separate columns
        ordered_row_columns = row_column_hierarchy.keys()
        for datum in data:
            for row_column in ordered_row_columns:
                cls._expand_row_data(datum, row_column, row_column_hierarchy)

        while array_column_hierarchy:
            array_columns = list(array_column_hierarchy.keys())
            # Determine what columns are ready to be processed.
            array_columns_to_process, unprocessed_array_columns = cls._split_array_columns_by_process_state(
                array_columns, array_column_hierarchy, data[0])
            all_array_data = cls._process_array_data(data, all_columns,
                                                     array_column_hierarchy)
            # Consolidate the original data set and the expanded array data
            cls._consolidate_array_data_into_data(data, all_array_data)
            # Remove processed array columns from the graph
            cls._remove_processed_array_columns(unprocessed_array_columns,
                                                array_column_hierarchy)

        return all_columns, data, expanded_columns
Exemple #20
0
    def is_actor_owner(self) -> bool:
        if g.user is None or g.user.is_anonymous or not g.user.is_authenticated:
            return False
        return g.user.id in set(map(lambda user: user.id, self.owners))


def id_or_slug_filter(id_or_slug: str) -> BinaryExpression:
    if id_or_slug.isdigit():
        return Dashboard.id == int(id_or_slug)
    return Dashboard.slug == id_or_slug


OnDashboardChange = Callable[[Mapper, Connection, Dashboard], Any]

# events for updating tags
if is_feature_enabled("TAGGING_SYSTEM"):
    sqla.event.listen(Dashboard, "after_insert", DashboardUpdater.after_insert)
    sqla.event.listen(Dashboard, "after_update", DashboardUpdater.after_update)
    sqla.event.listen(Dashboard, "after_delete", DashboardUpdater.after_delete)

if is_feature_enabled("THUMBNAILS_SQLA_LISTENERS"):
    update_thumbnail: OnDashboardChange = lambda _, __, dash: dash.update_thumbnail(
    )
    sqla.event.listen(Dashboard, "after_insert", update_thumbnail)
    sqla.event.listen(Dashboard, "after_update", update_thumbnail)

if is_feature_enabled("DASHBOARD_CACHE"):

    def clear_dashboard_cache(
        _mapper: Mapper,
        _connection: Connection,
Exemple #21
0
    def log(self, pk: int) -> FlaskResponse:  # pylint: disable=unused-argument
        if not (is_feature_enabled("ENABLE_REACT_CRUD_VIEWS")
                and is_feature_enabled("ALERT_REPORTS")):
            return super().list()

        return super().render_app_template()
Exemple #22
0
 def test_existing_feature_flags(self):
     self.assertTrue(is_feature_enabled('FOO'))
import simplejson as json
from flask import g, request
from flask_appbuilder.security.sqla.models import User

import superset.models.core as models
from superset import app, db, is_feature_enabled
from superset.connectors.connector_registry import ConnectorRegistry
from superset.exceptions import SupersetException
from superset.legacy import update_time_range
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.typing import FormData
from superset.utils.core import QueryStatus, TimeRangeEndpoint
from superset.viz import BaseViz

if is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
    from superset import viz_sip38 as viz
else:
    from superset import viz  # type: ignore

FORM_DATA_KEY_BLACKLIST: List[str] = []
if not app.config["ENABLE_JAVASCRIPT_CONTROLS"]:
    FORM_DATA_KEY_BLACKLIST = [
        "js_tooltip", "js_onclick_href", "js_data_mutator"
    ]


def bootstrap_user_data(user: User,
                        include_perms: bool = False) -> Dict[str, Any]:
    if user.is_anonymous:
        return {}
Exemple #24
0
 def test_feature_flags(self):
     self.assertEquals(is_feature_enabled('foo'), 'bar')
     self.assertEquals(is_feature_enabled('super'), 'set')
class TestThumbnails(SupersetTestCase):

    mock_image = b"bytes mock image"

    def test_dashboard_thumbnail_disabled(self):
        """
            Thumbnails: Dashboard thumbnail disabled
        """
        if is_feature_enabled("THUMBNAILS"):
            return
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    def test_chart_thumbnail_disabled(self):
        """
            Thumbnails: Chart thumbnail disabled
        """
        if is_feature_enabled("THUMBNAILS"):
            return
        chart = db.session.query(Slice).all()[0]
        self.login(username="******")
        uri = f"api/v1/chart/{chart}/thumbnail/{chart.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_dashboard_screenshot(self):
        """
            Thumbnails: Simple get async dashboard screenshot
        """
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        with patch("superset.tasks.thumbnails.cache_dashboard_thumbnail.delay"
                   ) as mock_task:
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 202)
            mock_task.assert_called_with(dashboard.id, force=True)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_dashboard_notfound(self):
        """
            Thumbnails: Simple get async dashboard not found
        """
        max_id = db.session.query(func.max(Dashboard.id)).scalar()
        self.login(username="******")
        uri = f"api/v1/dashboard/{max_id + 1}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_dashboard_not_allowed(self):
        """
            Thumbnails: Simple get async dashboard not allowed
        """
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_chart_screenshot(self):
        """
            Thumbnails: Simple get async chart screenshot
        """
        chart = db.session.query(Slice).all()[0]
        self.login(username="******")
        uri = f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/"
        with patch("superset.tasks.thumbnails.cache_chart_thumbnail.delay"
                   ) as mock_task:
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 202)
            mock_task.assert_called_with(chart.id, force=True)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_chart_notfound(self):
        """
            Thumbnails: Simple get async chart not found
        """
        max_id = db.session.query(func.max(Slice.id)).scalar()
        self.login(username="******")
        uri = f"api/v1/chart/{max_id + 1}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_cached_chart_wrong_digest(self):
        """
            Thumbnails: Simple get chart with wrong digest
        """
        chart = db.session.query(Slice).all()[0]
        # Cache a test "image"
        screenshot = ChartScreenshot(model_id=chart.id)
        thumbnail_cache.set(screenshot.cache_key, self.mock_image)
        self.login(username="******")
        uri = f"api/v1/chart/{chart.id}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 302)
        self.assertRedirects(
            rv, f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/")

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_cached_dashboard_screenshot(self):
        """
            Thumbnails: Simple get cached dashboard screenshot
        """
        dashboard = db.session.query(Dashboard).all()[0]
        # Cache a test "image"
        screenshot = DashboardScreenshot(model_id=dashboard.id)
        thumbnail_cache.set(screenshot.cache_key, self.mock_image)
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv.data, self.mock_image)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_cached_chart_screenshot(self):
        """
            Thumbnails: Simple get cached chart screenshot
        """
        chart = db.session.query(Slice).all()[0]
        # Cache a test "image"
        screenshot = ChartScreenshot(model_id=chart.id)
        thumbnail_cache.set(screenshot.cache_key, self.mock_image)
        self.login(username="******")
        uri = f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv.data, self.mock_image)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_cached_dashboard_wrong_digest(self):
        """
            Thumbnails: Simple get dashboard with wrong digest
        """
        dashboard = db.session.query(Dashboard).all()[0]
        # Cache a test "image"
        screenshot = DashboardScreenshot(model_id=dashboard.id)
        thumbnail_cache.set(screenshot.cache_key, self.mock_image)
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 302)
        self.assertRedirects(
            rv,
            f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/")
Exemple #26
0
    def export(self, **kwargs: Any) -> Response:
        """Export dashboards
        ---
        get:
          description: >-
            Exports multiple Dashboards and downloads them as YAML files.
          parameters:
          - in: query
            name: q
            content:
              application/json:
                schema:
                  $ref: '#/components/schemas/get_export_ids_schema'
          responses:
            200:
              description: Dashboard export
              content:
                text/plain:
                  schema:
                    type: string
            400:
              $ref: '#/components/responses/400'
            401:
              $ref: '#/components/responses/401'
            404:
              $ref: '#/components/responses/404'
            422:
              $ref: '#/components/responses/422'
            500:
              $ref: '#/components/responses/500'
        """
        requested_ids = kwargs["rison"]

        if is_feature_enabled("VERSIONED_EXPORT"):
            timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
            root = f"dashboard_export_{timestamp}"
            filename = f"{root}.zip"

            buf = BytesIO()
            with ZipFile(buf, "w") as bundle:
                try:
                    for file_name, file_content in ExportDashboardsCommand(
                            requested_ids).run():
                        with bundle.open(f"{root}/{file_name}", "w") as fp:
                            fp.write(file_content.encode())
                except DashboardNotFoundError:
                    return self.response_404()
            buf.seek(0)

            return send_file(
                buf,
                mimetype="application/zip",
                as_attachment=True,
                attachment_filename=filename,
            )

        query = self.datamodel.session.query(Dashboard).filter(
            Dashboard.id.in_(requested_ids))
        query = self._base_filters.apply_all(query)
        ids = [item.id for item in query.all()]
        if not ids:
            return self.response_404()
        export = Dashboard.export_dashboards(ids)
        resp = make_response(export, 200)
        resp.headers["Content-Disposition"] = generate_download_headers(
            "json")["Content-Disposition"]
        return resp
Exemple #27
0
class Dashboard(  # pylint: disable=too-many-instance-attributes
        Model, AuditMixinNullable, ImportExportMixin):
    """The dashboard object!"""

    __tablename__ = "dashboards"
    id = Column(Integer, primary_key=True)
    dashboard_title = Column(String(500))
    position_json = Column(utils.MediumText())
    description = Column(Text)
    css = Column(Text)
    json_metadata = Column(Text)
    slug = Column(String(255), unique=True)
    slices = relationship(Slice,
                          secondary=dashboard_slices,
                          backref="dashboards")
    owners = relationship(security_manager.user_model,
                          secondary=dashboard_user)
    published = Column(Boolean, default=False)

    export_fields = [
        "dashboard_title",
        "position_json",
        "json_metadata",
        "description",
        "css",
        "slug",
    ]

    def __repr__(self) -> str:
        return f"Dashboard<{self.id or self.slug}>"

    @property
    def table_names(self) -> str:
        # pylint: disable=no-member
        return ", ".join(str(s.datasource.full_name) for s in self.slices)

    @property
    def url(self) -> str:
        url = f"/superset/dashboard/{self.slug or self.id}/"
        if self.json_metadata:
            # add default_filters to the preselect_filters of dashboard
            json_metadata = json.loads(self.json_metadata)
            default_filters = json_metadata.get("default_filters")
            # make sure default_filters is not empty and is valid
            if default_filters and default_filters != "{}":
                try:
                    if json.loads(default_filters):
                        filters = parse.quote(default_filters.encode("utf8"))
                        return "/superset/dashboard/{}/?preselect_filters={}".format(
                            self.slug or self.id, filters)
                except (TypeError, JSONDecodeError) as exc:
                    logger.error(
                        "Unable to parse json for url: %r. Returning default url.",
                        exc,
                        exc_info=True,
                    )
                    return url
        return url

    @property
    def datasources(self) -> Set[BaseDatasource]:
        return {slc.datasource for slc in self.slices}

    @property
    def charts(self) -> List[BaseDatasource]:
        return [slc.chart for slc in self.slices]

    @property
    def sqla_metadata(self) -> None:
        # pylint: disable=no-member
        meta = MetaData(bind=self.get_sqla_engine())
        meta.reflect()

    @renders("dashboard_title")
    def dashboard_link(self) -> Markup:
        title = escape(self.dashboard_title or "<empty>")
        return Markup(f'<a href="{self.url}">{title}</a>')

    @property
    def digest(self) -> str:
        """
        Returns a MD5 HEX digest that makes this dashboard unique
        """
        unique_string = f"{self.position_json}.{self.css}.{self.json_metadata}"
        return utils.md5_hex(unique_string)

    @property
    def thumbnail_url(self) -> str:
        """
        Returns a thumbnail URL with a HEX digest. We want to avoid browser cache
        if the dashboard has changed
        """
        return f"/api/v1/dashboard/{self.id}/thumbnail/{self.digest}/"

    @property
    def changed_by_name(self) -> str:
        if not self.changed_by:
            return ""
        return str(self.changed_by)

    @property
    def changed_by_url(self) -> str:
        if not self.changed_by:
            return ""
        return f"/superset/profile/{self.changed_by.username}"

    @property
    def data(self) -> Dict[str, Any]:
        positions = self.position_json
        if positions:
            positions = json.loads(positions)
        return {
            "id":
            self.id,
            "metadata":
            self.params_dict,
            "css":
            self.css,
            "dashboard_title":
            self.dashboard_title,
            "published":
            self.published,
            "slug":
            self.slug,
            "slices": [slc.data for slc in self.slices],
            "position_json":
            positions,
            "last_modified_time":
            self.changed_on.replace(microsecond=0).timestamp(),
        }

    @cache.memoize(
        # manage cache version manually
        make_name=lambda fname: f"{fname}-v2.1",
        timeout=config["DASHBOARD_CACHE_TIMEOUT"],
        unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"),
    )
    def full_data(self) -> Dict[str, Any]:
        """Bootstrap data for rendering the dashboard page."""
        slices = self.slices
        datasource_slices = utils.indexed(slices, "datasource")
        return {
            # dashboard metadata
            "dashboard": self.data,
            # slices metadata
            "slices": [slc.data for slc in slices],
            # datasource metadata
            "datasources": {
                # Filter out unneeded fields from the datasource payload
                datasource.uid: datasource.data_for_slices(slices)
                for datasource, slices in datasource_slices.items()
            },
        }

    @property  # type: ignore
    def params(self) -> str:  # type: ignore
        return self.json_metadata

    @params.setter
    def params(self, value: str) -> None:
        self.json_metadata = value

    @property
    def position(self) -> Dict[str, Any]:
        if self.position_json:
            return json.loads(self.position_json)
        return {}

    def update_thumbnail(self) -> None:
        url = get_url_path("Superset.dashboard", dashboard_id_or_slug=self.id)
        cache_dashboard_thumbnail.delay(url, self.digest, force=True)

    @debounce(0.1)
    def clear_cache(self) -> None:
        cache.delete_memoized(Dashboard.full_data, self)

    @classmethod
    @debounce(0.1)
    def clear_cache_for_slice(cls, slice_id: int) -> None:
        filter_query = select(
            [dashboard_slices.c.dashboard_id],
            distinct=True).where(dashboard_slices.c.slice_id == slice_id)
        for (dashboard_id, ) in db.engine.execute(filter_query):
            cls(id=dashboard_id).clear_cache()

    @classmethod
    @debounce(0.1)
    def clear_cache_for_datasource(cls, datasource_id: int) -> None:
        filter_query = select(
            [dashboard_slices.c.dashboard_id],
            distinct=True,
        ).select_from(
            join(
                dashboard_slices,
                Slice,
                (Slice.id == dashboard_slices.c.slice_id)
                & (Slice.datasource_id == datasource_id),
            ))
        for (dashboard_id, ) in db.engine.execute(filter_query):
            cls(id=dashboard_id).clear_cache()

    @classmethod
    def export_dashboards(  # pylint: disable=too-many-locals
            cls, dashboard_ids: List[int]) -> str:
        copied_dashboards = []
        datasource_ids = set()
        for dashboard_id in dashboard_ids:
            # make sure that dashboard_id is an integer
            dashboard_id = int(dashboard_id)
            dashboard = (db.session.query(Dashboard).options(
                subqueryload(
                    Dashboard.slices)).filter_by(id=dashboard_id).first())
            # remove ids and relations (like owners, created by, slices, ...)
            copied_dashboard = dashboard.copy()
            for slc in dashboard.slices:
                datasource_ids.add((slc.datasource_id, slc.datasource_type))
                copied_slc = slc.copy()
                # save original id into json
                # we need it to update dashboard's json metadata on import
                copied_slc.id = slc.id
                # add extra params for the import
                copied_slc.alter_params(
                    remote_id=slc.id,
                    datasource_name=slc.datasource.datasource_name,
                    schema=slc.datasource.schema,
                    database_name=slc.datasource.database.name,
                )
                # set slices without creating ORM relations
                slices = copied_dashboard.__dict__.setdefault("slices", [])
                slices.append(copied_slc)
            copied_dashboard.alter_params(remote_id=dashboard_id)
            copied_dashboards.append(copied_dashboard)

        eager_datasources = []
        for datasource_id, datasource_type in datasource_ids:
            eager_datasource = ConnectorRegistry.get_eager_datasource(
                db.session, datasource_type, datasource_id)
            copied_datasource = eager_datasource.copy()
            copied_datasource.alter_params(
                remote_id=eager_datasource.id,
                database_name=eager_datasource.database.name,
            )
            datasource_class = copied_datasource.__class__
            for field_name in datasource_class.export_children:
                field_val = getattr(eager_datasource, field_name).copy()
                # set children without creating ORM relations
                copied_datasource.__dict__[field_name] = field_val
            eager_datasources.append(copied_datasource)

        return json.dumps(
            {
                "dashboards": copied_dashboards,
                "datasources": eager_datasources
            },
            cls=utils.DashboardEncoder,
            indent=4,
        )
Exemple #28
0
    def list(self) -> FlaskResponse:
        if not is_feature_enabled("ENABLE_REACT_CRUD_VIEWS"):
            return super().list()

        return super().render_app_template()
Exemple #29
0
def set_related_perm(_mapper: Mapper, _connection: Connection,
                     target: Slice) -> None:
    src_class = target.cls_model
    id_ = target.datasource_id
    if id_:
        ds = db.session.query(src_class).filter_by(id=int(id_)).first()
        if ds:
            target.perm = ds.perm
            target.schema_perm = ds.schema_perm


def event_after_chart_changed(_mapper: Mapper, _connection: Connection,
                              target: Slice) -> None:
    url = get_url_path("Superset.slice", slice_id=target.id, standalone="true")
    cache_chart_thumbnail.delay(url, target.digest, force=True)


sqla.event.listen(Slice, "before_insert", set_related_perm)
sqla.event.listen(Slice, "before_update", set_related_perm)

# events for updating tags
if is_feature_enabled("TAGGING_SYSTEM"):
    sqla.event.listen(Slice, "after_insert", ChartUpdater.after_insert)
    sqla.event.listen(Slice, "after_update", ChartUpdater.after_update)
    sqla.event.listen(Slice, "after_delete", ChartUpdater.after_delete)

# events for updating tags
if is_feature_enabled("THUMBNAILS_SQLA_LISTENERS"):
    sqla.event.listen(Slice, "after_insert", event_after_chart_changed)
    sqla.event.listen(Slice, "after_update", event_after_chart_changed)
Exemple #30
0
            return slc_to_override.id
        session.add(slc_to_import)
        logging.info("Final slice: %s", str(slc_to_import.to_json()))
        session.flush()
        return slc_to_import.id

    @property
    def url(self) -> str:
        return f"/superset/explore/?form_data=%7B%22slice_id%22%3A%20{self.id}%7D"


def set_related_perm(mapper, connection, target):
    # pylint: disable=unused-argument
    src_class = target.cls_model
    id_ = target.datasource_id
    if id_:
        ds = db.session.query(src_class).filter_by(id=int(id_)).first()
        if ds:
            target.perm = ds.perm
            target.schema_perm = ds.schema_perm


sqla.event.listen(Slice, "before_insert", set_related_perm)
sqla.event.listen(Slice, "before_update", set_related_perm)

# events for updating tags
if is_feature_enabled("TAGGING_SYSTEM"):
    sqla.event.listen(Slice, "after_insert", ChartUpdater.after_insert)
    sqla.event.listen(Slice, "after_update", ChartUpdater.after_update)
    sqla.event.listen(Slice, "after_delete", ChartUpdater.after_delete)
Exemple #31
0
class TestThumbnails(SupersetTestCase):

    mock_image = b"bytes mock image"

    @with_feature_flags(THUMBNAILS=False)
    def test_dashboard_thumbnail_disabled(self):
        """
        Thumbnails: Dashboard thumbnail disabled
        """
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @with_feature_flags(THUMBNAILS=False)
    def test_chart_thumbnail_disabled(self):
        """
        Thumbnails: Chart thumbnail disabled
        """
        chart = db.session.query(Slice).all()[0]
        self.login(username="******")
        uri = f"api/v1/chart/{chart}/thumbnail/{chart.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_async_dashboard_screenshot(self):
        """
        Thumbnails: Simple get async dashboard screenshot
        """
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        with patch("superset.tasks.thumbnails.cache_dashboard_thumbnail.delay"
                   ) as mock_task:
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 202)

            expected_uri = f"{get_url_host()}superset/dashboard/{dashboard.id}/"
            expected_digest = dashboard.digest
            expected_kwargs = {"force": True}
            mock_task.assert_called_with(expected_uri, expected_digest,
                                         **expected_kwargs)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_async_dashboard_notfound(self):
        """
        Thumbnails: Simple get async dashboard not found
        """
        max_id = db.session.query(func.max(Dashboard.id)).scalar()
        self.login(username="******")
        uri = f"api/v1/dashboard/{max_id + 1}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @skipUnless((is_feature_enabled("THUMBNAILS")), "Thumbnails feature")
    def test_get_async_dashboard_not_allowed(self):
        """
        Thumbnails: Simple get async dashboard not allowed
        """
        dashboard = db.session.query(Dashboard).all()[0]
        self.login(username="******")
        uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_async_chart_screenshot(self):
        """
        Thumbnails: Simple get async chart screenshot
        """
        chart = db.session.query(Slice).all()[0]
        self.login(username="******")
        uri = f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/"
        with patch("superset.tasks.thumbnails.cache_chart_thumbnail.delay"
                   ) as mock_task:
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 202)
            expected_uri = f"{get_url_host()}superset/slice/{chart.id}/?standalone=true"
            expected_digest = chart.digest
            expected_kwargs = {"force": True}
            mock_task.assert_called_with(expected_uri, expected_digest,
                                         **expected_kwargs)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_async_chart_notfound(self):
        """
        Thumbnails: Simple get async chart not found
        """
        max_id = db.session.query(func.max(Slice.id)).scalar()
        self.login(username="******")
        uri = f"api/v1/chart/{max_id + 1}/thumbnail/1234/"
        rv = self.client.get(uri)
        self.assertEqual(rv.status_code, 404)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_cached_chart_wrong_digest(self):
        """
        Thumbnails: Simple get chart with wrong digest
        """
        chart = db.session.query(Slice).all()[0]
        with patch.object(ChartScreenshot,
                          "get_from_cache",
                          return_value=BytesIO(self.mock_image)):
            self.login(username="******")
            uri = f"api/v1/chart/{chart.id}/thumbnail/1234/"
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 302)
            self.assertRedirects(
                rv, f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/")

    @with_feature_flags(THUMBNAILS=True)
    def test_get_cached_dashboard_screenshot(self):
        """
        Thumbnails: Simple get cached dashboard screenshot
        """
        dashboard = db.session.query(Dashboard).all()[0]
        with patch.object(DashboardScreenshot,
                          "get_from_cache",
                          return_value=BytesIO(self.mock_image)):
            self.login(username="******")
            uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv.data, self.mock_image)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_cached_chart_screenshot(self):
        """
        Thumbnails: Simple get cached chart screenshot
        """
        chart = db.session.query(Slice).all()[0]
        with patch.object(ChartScreenshot,
                          "get_from_cache",
                          return_value=BytesIO(self.mock_image)):
            self.login(username="******")
            uri = f"api/v1/chart/{chart.id}/thumbnail/{chart.digest}/"
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv.data, self.mock_image)

    @with_feature_flags(THUMBNAILS=True)
    def test_get_cached_dashboard_wrong_digest(self):
        """
        Thumbnails: Simple get dashboard with wrong digest
        """
        dashboard = db.session.query(Dashboard).all()[0]
        with patch.object(DashboardScreenshot,
                          "get_from_cache",
                          return_value=BytesIO(self.mock_image)):
            self.login(username="******")
            uri = f"api/v1/dashboard/{dashboard.id}/thumbnail/1234/"
            rv = self.client.get(uri)
            self.assertEqual(rv.status_code, 302)
            self.assertRedirects(
                rv,
                f"api/v1/dashboard/{dashboard.id}/thumbnail/{dashboard.digest}/"
            )
Exemple #32
0
    def expand_data(  # pylint: disable=too-many-locals
        cls, columns: List[Dict[Any, Any]],
        data: List[Dict[Any, Any]]) -> Tuple[List[Dict[Any, Any]], List[Dict[
            Any, Any]], List[Dict[Any, Any]]]:
        """
        We do not immediately display rows and arrays clearly in the data grid. This
        method separates out nested fields and data values to help clearly display
        structural columns.

        Example: ColumnA is a row(nested_obj varchar) and ColumnB is an array(int)
        Original data set = [
            {'ColumnA': ['a1'], 'ColumnB': [1, 2]},
            {'ColumnA': ['a2'], 'ColumnB': [3, 4]},
        ]
        Expanded data set = [
            {'ColumnA': ['a1'], 'ColumnA.nested_obj': 'a1', 'ColumnB': 1},
            {'ColumnA': '',     'ColumnA.nested_obj': '',   'ColumnB': 2},
            {'ColumnA': ['a2'], 'ColumnA.nested_obj': 'a2', 'ColumnB': 3},
            {'ColumnA': '',     'ColumnA.nested_obj': '',   'ColumnB': 4},
        ]
        :param columns: columns selected in the query
        :param data: original data set
        :return: list of all columns(selected columns and their nested fields),
                 expanded data set, listed of nested fields
        """
        if not is_feature_enabled("PRESTO_EXPAND_DATA"):
            return columns, data, []

        # process each column, unnesting ARRAY types and
        # expanding ROW types into new columns
        to_process = deque((column, 0) for column in columns)
        all_columns: List[Dict[str, Any]] = []
        expanded_columns = []
        current_array_level = None
        while to_process:
            column, level = to_process.popleft()
            if column["name"] not in [
                    column["name"] for column in all_columns
            ]:
                all_columns.append(column)

            # When unnesting arrays we need to keep track of how many extra rows
            # were added, for each original row. This is necessary when we expand
            # multiple arrays, so that the arrays after the first reuse the rows
            # added by the first. every time we change a level in the nested arrays
            # we reinitialize this.
            if level != current_array_level:
                unnested_rows: Dict[int, int] = defaultdict(int)
                current_array_level = level

            name = column["name"]
            values: Optional[Union[str, List[Any]]]

            if column["type"].startswith("ARRAY("):
                # keep processing array children; we append to the right so that
                # multiple nested arrays are processed breadth-first
                to_process.append((get_children(column)[0], level + 1))

                # unnest array objects data into new rows
                i = 0
                while i < len(data):
                    row = data[i]
                    values = row.get(name)
                    if isinstance(values, str):
                        row[name] = values = destringify(values)
                    if values:
                        # how many extra rows we need to unnest the data?
                        extra_rows = len(values) - 1

                        # how many rows were already added for this row?
                        current_unnested_rows = unnested_rows[i]

                        # add any necessary rows
                        missing = extra_rows - current_unnested_rows
                        for _ in range(missing):
                            data.insert(i + current_unnested_rows + 1, {})
                            unnested_rows[i] += 1

                        # unnest array into rows
                        for j, value in enumerate(values):
                            data[i + j][name] = value

                        # skip newly unnested rows
                        i += unnested_rows[i]

                    i += 1

            if column["type"].startswith("ROW("):
                # expand columns; we append them to the left so they are added
                # immediately after the parent
                expanded = get_children(column)
                to_process.extendleft(
                    (column, level) for column in expanded[::-1])
                expanded_columns.extend(expanded)

                # expand row objects into new columns
                for row in data:
                    values = row.get(name) or []
                    if isinstance(values, str):
                        row[name] = values = cast(List[Any],
                                                  destringify(values))
                    for value, col in zip(values, expanded):
                        row[col["name"]] = value

        data = [{k["name"]: row.get(k["name"], "")
                 for k in all_columns} for row in data]

        return all_columns, data, expanded_columns
Exemple #33
0
 def test_existing_feature_flags(self):
     self.assertTrue(is_feature_enabled('FOO'))