コード例 #1
0
class DatabaseMixin:
    list_title = _("Databases")
    show_title = _("Show Database")
    add_title = _("Add Database")
    edit_title = _("Edit Database")

    list_columns = [
        "database_name",
        "backend",
        "allow_run_async",
        "allow_dml",
        "allow_csv_upload",
        "expose_in_sqllab",
        "creator",
        "modified",
    ]
    order_columns = [
        "database_name",
        "allow_run_async",
        "allow_dml",
        "modified",
        "allow_csv_upload",
        "expose_in_sqllab",
    ]
    add_columns = [
        "database_name",
        "sqlalchemy_uri",
        "cache_timeout",
        "expose_in_sqllab",
        "allow_run_async",
        "allow_csv_upload",
        "allow_ctas",
        "allow_dml",
        "force_ctas_schema",
        "impersonate_user",
        "allow_multi_schema_metadata_fetch",
        "extra",
    ]
    search_exclude_columns = (
        "password",
        "tables",
        "created_by",
        "changed_by",
        "queries",
        "saved_queries",
    )
    edit_columns = add_columns
    show_columns = [
        "tables",
        "cache_timeout",
        "extra",
        "database_name",
        "sqlalchemy_uri",
        "perm",
        "created_by",
        "created_on",
        "changed_by",
        "changed_on",
    ]
    base_order = ("changed_on", "desc")
    description_columns = {
        "sqlalchemy_uri":
        utils.markdown(
            "Refer to the "
            "[SqlAlchemy docs]"
            "(https://docs.sqlalchemy.org/en/rel_1_2/core/engines.html#"
            "database-urls) "
            "for more information on how to structure your URI.",
            True,
        ),
        "expose_in_sqllab":
        _("Expose this DB in SQL Lab"),
        "allow_run_async":
        _("Operate the database in asynchronous mode, meaning  "
          "that the queries are executed on remote workers as opposed "
          "to on the web server itself. "
          "This assumes that you have a Celery worker setup as well "
          "as a results backend. Refer to the installation docs "
          "for more information."),
        "allow_ctas":
        _("Allow CREATE TABLE AS option in SQL Lab"),
        "allow_dml":
        _("Allow users to run non-SELECT statements "
          "(UPDATE, DELETE, CREATE, ...) "
          "in SQL Lab"),
        "force_ctas_schema":
        _("When allowing CREATE TABLE AS option in SQL Lab, "
          "this option forces the table to be created in this schema"),
        "extra":
        utils.markdown(
            "JSON string containing extra configuration elements.<br/>"
            "1. The ``engine_params`` object gets unpacked into the "
            "[sqlalchemy.create_engine]"
            "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
            "sqlalchemy.create_engine) call, while the ``metadata_params`` "
            "gets unpacked into the [sqlalchemy.MetaData]"
            "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
            "#sqlalchemy.schema.MetaData) call.<br/>"
            "2. The ``metadata_cache_timeout`` is a cache timeout setting "
            "in seconds for metadata fetch of this database. Specify it as "
            '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
            '"table_cache_timeout": 600}**. '
            "If unset, cache will not be enabled for the functionality. "
            "A timeout of 0 indicates that the cache never expires.<br/>"
            "3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
            "of schemas that CSVs are allowed to upload to. "
            'Specify it as **"schemas_allowed_for_csv_upload": '
            '["public", "csv_upload"]**. '
            "If database flavor does not support schema or any schema is allowed "
            "to be accessed, just leave the list empty"
            "4. the ``version`` field is a string specifying the this db's version. "
            "This should be used with Presto DBs so that the syntax is correct",
            True,
        ),
        "impersonate_user":
        _("If Presto, all the queries in SQL Lab are going to be executed as the "
          "currently logged on user who must have permission to run them.<br/>"
          "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
          "service account, but impersonate the currently logged on user "
          "via hive.server2.proxy.user property."),
        "allow_multi_schema_metadata_fetch":
        _("Allow SQL Lab to fetch a list of all tables and all views across "
          "all database schemas. For large data warehouse with thousands of "
          "tables, this can be expensive and put strain on the system."),
        "cache_timeout":
        _("Duration (in seconds) of the caching timeout for charts of this database. "
          "A timeout of 0 indicates that the cache never expires. "
          "Note this defaults to the global timeout if undefined."),
        "allow_csv_upload":
        _("If selected, please set the schemas allowed for csv upload in Extra."
          ),
    }
    base_filters = [["id", DatabaseFilter, lambda: []]]
    label_columns = {
        "expose_in_sqllab": _("Expose in SQL Lab"),
        "allow_ctas": _("Allow CREATE TABLE AS"),
        "allow_dml": _("Allow DML"),
        "force_ctas_schema": _("CTAS Schema"),
        "database_name": _("Database"),
        "creator": _("Creator"),
        "changed_on_": _("Last Changed"),
        "sqlalchemy_uri": _("SQLAlchemy URI"),
        "cache_timeout": _("Chart Cache Timeout"),
        "extra": _("Extra"),
        "allow_run_async": _("Asynchronous Query Execution"),
        "impersonate_user": _("Impersonate the logged on user"),
        "allow_csv_upload": _("Allow Csv Upload"),
        "modified": _("Modified"),
        "allow_multi_schema_metadata_fetch":
        _("Allow Multi Schema Metadata Fetch"),
        "backend": _("Backend"),
    }

    def _pre_add_update(self, db):
        self.check_extra(db)
        db.set_sqlalchemy_uri(db.sqlalchemy_uri)
        security_manager.add_permission_view_menu("database_access", db.perm)
        # adding a new database we always want to force refresh schema list
        for schema in db.get_all_schema_names():
            security_manager.add_permission_view_menu(
                "schema_access", security_manager.get_schema_perm(db, schema))

    def pre_add(self, db):
        self._pre_add_update(db)

    def pre_update(self, db):
        self._pre_add_update(db)

    def pre_delete(self, obj):
        if obj.tables:
            raise SupersetException(
                Markup("Cannot delete a database that has tables attached. "
                       "Here's the list of associated tables: " +
                       ", ".join("{}".format(o) for o in obj.tables)))

    def check_extra(self, db):
        # this will check whether json.loads(extra) can succeed
        try:
            extra = db.get_extra()
        except Exception as e:
            raise Exception("Extra field cannot be decoded by JSON. {}".format(
                str(e)))

        # this will check whether 'metadata_params' is configured correctly
        metadata_signature = inspect.signature(MetaData)
        for key in extra.get("metadata_params", {}):
            if key not in metadata_signature.parameters:
                raise Exception("The metadata_params in Extra field "
                                "is not configured correctly. The key "
                                "{} is invalid.".format(key))
コード例 #2
0
class PandasMetricInlineView(CompactCRUDMixin, SupersetModelView,
                             DeleteMixin):  # noqa
    datamodel = SQLAInterface(PandasMetric)

    list_title = _('List Metrics')
    show_title = _('Show Metric')
    add_title = _('Add Metric')
    edit_title = _('Edit Metric')

    list_columns = ['metric_name', 'verbose_name', 'metric_type']
    edit_columns = [
        'metric_name', 'description', 'verbose_name', 'metric_type', 'source',
        'expression', 'datasource', 'd3format', 'is_restricted', 'warning_text'
    ]
    description_columns = {
        'source':
        markdown(
            'a comma-separated list of column(s) used to calculate '
            ' the metric. Example: `claim_amount`', True),
        'expression':
        markdown(
            'a valid Pandas expression as supported by the underlying '
            'backend. Example: `count()`', True),
        'is_restricted':
        _('Whether the access to this metric is restricted '
          'to certain roles. Only roles with the permission '
          "'metric access on XXX (the name of this metric)' "
          'are allowed to access this metric'),
        'd3format':
        markdown(
            'd3 formatting string as defined [here]'
            '(https://github.com/d3/d3-format/blob/master/README.md#format). '
            'For instance, this default formatting applies in the Table '
            'visualization and allow for different metric to use different '
            'formats',
            True,
        ),
    }
    add_columns = edit_columns
    page_size = 500
    label_columns = {
        'metric_name': _('Metric'),
        'description': _('Description'),
        'verbose_name': _('Verbose Name'),
        'metric_type': _('Type'),
        'source': _('Pandas Source Columns'),
        'expression': _('Pandas Expression'),
        'datasource': _('Datasource'),
        'd3format': _('D3 Format'),
        'is_restricted': _('Is Restricted'),
        'warning_text': _('Warning Message'),
    }

    def post_add(self, metric):
        if metric.is_restricted:
            security.merge_perm(security_manager, 'metric_access',
                                metric.get_perm())

    def post_update(self, metric):
        if metric.is_restricted:
            security.merge_perm(security_manager, 'metric_access',
                                metric.get_perm())
コード例 #3
0
ファイル: views.py プロジェクト: dodopizza/superset
class TableModelView(  # pylint: disable=too-many-ancestors
        DatasourceModelView, DeleteMixin, YamlExportMixin):
    datamodel = SQLAInterface(models.SqlaTable)
    class_permission_name = "Dataset"
    method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
    include_route_methods = RouteMethod.CRUD_SET

    list_title = _("Tables")
    show_title = _("Show Table")
    add_title = _("Import a table definition")
    edit_title = _("Edit Table")

    list_columns = ["link", "database_name", "changed_by_", "modified"]
    order_columns = ["modified"]
    add_columns = ["database", "schema", "table_name"]
    edit_columns = [
        "table_name",
        "sql",
        "filter_select_enabled",
        "fetch_values_predicate",
        "database",
        "schema",
        "description",
        "owners",
        "main_dttm_col",
        "default_endpoint",
        "offset",
        "cache_timeout",
        "is_sqllab_view",
        "template_params",
        "extra",
    ]
    base_filters = [["id", DatasourceFilter, lambda: []]]
    show_columns = edit_columns + ["perm", "slices"]
    related_views = [
        TableColumnInlineView,
        SqlMetricInlineView,
    ]
    base_order = ("changed_on", "desc")
    search_columns = ("database", "schema", "table_name", "owners",
                      "is_sqllab_view")
    description_columns = {
        "slices":
        _("The list of charts associated with this table. By "
          "altering this datasource, you may change how these associated "
          "charts behave. "
          "Also note that charts need to point to a datasource, so "
          "this form will fail at saving if removing charts from a "
          "datasource. If you want to change the datasource for a chart, "
          "overwrite the chart from the 'explore view'"),
        "offset":
        _("Timezone offset (in hours) for this datasource"),
        "table_name":
        _("Name of the table that exists in the source database"),
        "schema":
        _("Schema, as used only in some databases like Postgres, Redshift "
          "and DB2"),
        "description":
        Markup(
            'Supports <a href="https://daringfireball.net/projects/markdown/">'
            "markdown</a>"),
        "sql":
        _("This fields acts a Superset view, meaning that Superset will "
          "run a query against this string as a subquery."),
        "fetch_values_predicate":
        _("Predicate applied when fetching distinct value to "
          "populate the filter control component. Supports "
          "jinja template syntax. Applies only when "
          "`Enable Filter Select` is on."),
        "default_endpoint":
        _("Redirects to this endpoint when clicking on the table "
          "from the table list"),
        "filter_select_enabled":
        _("Whether to populate the filter's dropdown in the explore "
          "view's filter section with a list of distinct values fetched "
          "from the backend on the fly"),
        "is_sqllab_view":
        _("Whether the table was generated by the 'Visualize' flow "
          "in SQL Lab"),
        "template_params":
        _("A set of parameters that become available in the query using "
          "Jinja templating syntax"),
        "cache_timeout":
        _("Duration (in seconds) of the caching timeout for this table. "
          "A timeout of 0 indicates that the cache never expires. "
          "Note this defaults to the database timeout if undefined."),
        "extra":
        utils.markdown(
            "Extra data to specify table metadata. Currently supports "
            'metadata of the format: `{ "certification": { "certified_by": '
            '"Data Platform Team", "details": "This table is the source of truth." '
            '}, "warning_markdown": "This is a warning." }`.',
            True,
        ),
    }
    label_columns = {
        "slices": _("Associated Charts"),
        "link": _("Table"),
        "changed_by_": _("Changed By"),
        "database": _("Database"),
        "database_name": _("Database"),
        "changed_on_": _("Last Changed"),
        "filter_select_enabled": _("Enable Filter Select"),
        "schema": _("Schema"),
        "default_endpoint": _("Default Endpoint"),
        "offset": _("Offset"),
        "cache_timeout": _("Cache Timeout"),
        "table_name": _("Table Name"),
        "fetch_values_predicate": _("Fetch Values Predicate"),
        "owners": _("Owners"),
        "main_dttm_col": _("Main Datetime Column"),
        "description": _("Description"),
        "is_sqllab_view": _("SQL Lab View"),
        "template_params": _("Template parameters"),
        "extra": _("Extra"),
        "modified": _("Modified"),
    }
    edit_form_extra_fields = {
        "database":
        QuerySelectField(
            "Database",
            query_factory=lambda: db.session.query(models.Database),
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    def post_add(  # pylint: disable=arguments-differ
        self,
        item: "TableModelView",
        flash_message: bool = True,
        fetch_metadata: bool = True,
    ) -> None:
        if fetch_metadata:
            item.fetch_metadata()
        create_table_permissions(item)
        if flash_message:
            flash(
                _("The table was created. "
                  "As part of this two-phase configuration "
                  "process, you should now click the edit button by "
                  "the new table to configure it."),
                "info",
            )

    def post_update(self, item: "TableModelView") -> None:
        self.post_add(item, flash_message=False, fetch_metadata=False)

    def _delete(self, pk: int) -> None:
        DeleteMixin._delete(self, pk)

    @expose("/edit/<pk>", methods=["GET", "POST"])
    @has_access
    def edit(self, pk: str) -> FlaskResponse:
        """Simple hack to redirect to explore view after saving"""
        resp = super().edit(pk)
        if isinstance(resp, str):
            return resp
        return redirect("/superset/explore/table/{}/".format(pk))

    @expose("/list/")
    @has_access
    def list(self) -> FlaskResponse:
        return super().render_app_template()
コード例 #4
0
ファイル: views.py プロジェクト: wandagroup/cm
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.DruidColumn)

    list_title = _('List Druid Column')
    show_title = _('Show Druid Column')
    add_title = _('Add Druid Column')
    edit_title = _('Edit Druid Column')

    list_widget = ListWidgetWithCheckboxes

    edit_columns = [
        'column_name', 'verbose_name', 'description', 'dimension_spec_json', 'datasource',
        'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
    add_columns = edit_columns
    list_columns = [
        'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
        'sum', 'min', 'max']
    can_delete = False
    page_size = 500
    label_columns = {
        'column_name': _('Column'),
        'type': _('Type'),
        'datasource': _('Datasource'),
        'groupby': _('Groupable'),
        'filterable': _('Filterable'),
        'count_distinct': _('Count Distinct'),
        'sum': _('Sum'),
        'min': _('Min'),
        'max': _('Max'),
        'verbose_name': _('Verbose Name'),
        'description': _('Description'),
    }
    description_columns = {
        'filterable': _(
            'Whether this column is exposed in the `Filters` section '
            'of the explore view.'),
        'dimension_spec_json': utils.markdown(
            'this field can be used to specify  '
            'a `dimensionSpec` as documented [here]'
            '(http://druid.io/docs/latest/querying/dimensionspecs.html). '
            'Make sure to input valid JSON and that the '
            '`outputName` matches the `column_name` defined '
            'above.',
            True),
    }

    def pre_update(self, col):
        # If a dimension spec JSON is given, ensure that it is
        # valid JSON and that `outputName` is specified
        if col.dimension_spec_json:
            try:
                dimension_spec = json.loads(col.dimension_spec_json)
            except ValueError as e:
                raise ValueError('Invalid Dimension Spec JSON: ' + str(e))
            if not isinstance(dimension_spec, dict):
                raise ValueError('Dimension Spec must be a JSON object')
            if 'outputName' not in dimension_spec:
                raise ValueError('Dimension Spec does not contain `outputName`')
            if 'dimension' not in dimension_spec:
                raise ValueError('Dimension Spec is missing `dimension`')
            # `outputName` should be the same as the `column_name`
            if dimension_spec['outputName'] != col.column_name:
                raise ValueError(
                    '`outputName` [{}] unequal to `column_name` [{}]'
                    .format(dimension_spec['outputName'], col.column_name))

    def post_update(self, col):
        col.refresh_metrics()

    def post_add(self, col):
        self.post_update(col)
コード例 #5
0
ファイル: core.py プロジェクト: yxchang/incubator-superset
 def description_markeddown(self):
     return utils.markdown(self.description)
コード例 #6
0
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.DruidMetric)

    list_title = _('Metrics')
    show_title = _('Show Druid Metric')
    add_title = _('Add Druid Metric')
    edit_title = _('Edit Druid Metric')

    list_columns = ['metric_name', 'verbose_name', 'metric_type']
    edit_columns = [
        'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
        'datasource', 'd3format', 'is_restricted', 'warning_text'
    ]
    add_columns = edit_columns
    page_size = 500
    validators_columns = {
        'json': [validate_json],
    }
    description_columns = {
        'metric_type':
        utils.markdown(
            'use `postagg` as the metric type if you are defining a '
            '[Druid Post Aggregation]'
            '(http://druid.io/docs/latest/querying/post-aggregations.html)',
            True),
        'is_restricted':
        _('Whether access to this metric is restricted '
          'to certain roles. Only roles with the permission '
          "'metric access on XXX (the name of this metric)' "
          'are allowed to access this metric'),
    }
    label_columns = {
        'metric_name': _('Metric'),
        'description': _('Description'),
        'verbose_name': _('Verbose Name'),
        'metric_type': _('Type'),
        'json': _('JSON'),
        'datasource': _('Druid Datasource'),
        'warning_text': _('Warning Message'),
        'is_restricted': _('Is Restricted'),
    }

    add_form_extra_fields = {
        'datasource':
        QuerySelectField(
            'Datasource',
            query_factory=lambda: db.session().query(models.DruidDatasource),
            allow_blank=True,
            widget=Select2Widget(extra_classes='readonly'),
        ),
    }

    edit_form_extra_fields = add_form_extra_fields

    def post_add(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu('metric_access',
                                                      metric.get_perm())

    def post_update(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu('metric_access',
                                                      metric.get_perm())
コード例 #7
0
class TableModelView(  # pylint: disable=too-many-ancestors
        DatasourceModelView, DeleteMixin, YamlExportMixin):
    datamodel = SQLAInterface(models.SqlaTable)
    include_route_methods = RouteMethod.CRUD_SET

    list_title = _("Tables")
    show_title = _("Show Table")
    add_title = _("Import a table definition")
    edit_title = _("Edit Table")

    list_columns = ["link", "database_name", "changed_by_", "modified"]
    order_columns = ["modified"]
    add_columns = ["database", "schema", "table_name"]
    edit_columns = [
        "table_name",
        "sql",
        "filter_select_enabled",
        "fetch_values_predicate",
        "database",
        "schema",
        "description",
        "owners",
        "main_dttm_col",
        "default_endpoint",
        "offset",
        "cache_timeout",
        "is_sqllab_view",
        "template_params",
        "extra",
    ]
    base_filters = [["id", DatasourceFilter, lambda: []]]
    show_columns = edit_columns + ["perm", "slices"]
    related_views = [
        TableColumnInlineView,
        SqlMetricInlineView,
    ]
    base_order = ("changed_on", "desc")
    search_columns = ("database", "schema", "table_name", "owners",
                      "is_sqllab_view")
    description_columns = {
        "slices":
        _("The list of charts associated with this table. By "
          "altering this datasource, you may change how these associated "
          "charts behave. "
          "Also note that charts need to point to a datasource, so "
          "this form will fail at saving if removing charts from a "
          "datasource. If you want to change the datasource for a chart, "
          "overwrite the chart from the 'explore view'"),
        "offset":
        _("Timezone offset (in hours) for this datasource"),
        "table_name":
        _("Name of the table that exists in the source database"),
        "schema":
        _("Schema, as used only in some databases like Postgres, Redshift "
          "and DB2"),
        "description":
        Markup(
            'Supports <a href="https://daringfireball.net/projects/markdown/">'
            "markdown</a>"),
        "sql":
        _("This fields acts a Superset view, meaning that Superset will "
          "run a query against this string as a subquery."),
        "fetch_values_predicate":
        _("Predicate applied when fetching distinct value to "
          "populate the filter control component. Supports "
          "jinja template syntax. Applies only when "
          "`Enable Filter Select` is on."),
        "default_endpoint":
        _("Redirects to this endpoint when clicking on the table "
          "from the table list"),
        "filter_select_enabled":
        _("Whether to populate the filter's dropdown in the explore "
          "view's filter section with a list of distinct values fetched "
          "from the backend on the fly"),
        "is_sqllab_view":
        _("Whether the table was generated by the 'Visualize' flow "
          "in SQL Lab"),
        "template_params":
        _("A set of parameters that become available in the query using "
          "Jinja templating syntax"),
        "cache_timeout":
        _("Duration (in seconds) of the caching timeout for this table. "
          "A timeout of 0 indicates that the cache never expires. "
          "Note this defaults to the database timeout if undefined."),
        "extra":
        utils.markdown(
            "Extra data to specify table metadata. Currently supports "
            'certification data of the format: `{ "certification": { "certified_by": '
            '"Data Platform Team", "details": "This table is the source of truth." '
            "} }`.",
            True,
        ),
    }
    label_columns = {
        "slices": _("Associated Charts"),
        "link": _("Table"),
        "changed_by_": _("Changed By"),
        "database": _("Database"),
        "database_name": _("Database"),
        "changed_on_": _("Last Changed"),
        "filter_select_enabled": _("Enable Filter Select"),
        "schema": _("Schema"),
        "default_endpoint": _("Default Endpoint"),
        "offset": _("Offset"),
        "cache_timeout": _("Cache Timeout"),
        "table_name": _("Table Name"),
        "fetch_values_predicate": _("Fetch Values Predicate"),
        "owners": _("Owners"),
        "main_dttm_col": _("Main Datetime Column"),
        "description": _("Description"),
        "is_sqllab_view": _("SQL Lab View"),
        "template_params": _("Template parameters"),
        "extra": _("Extra"),
        "modified": _("Modified"),
    }
    edit_form_extra_fields = {
        "database":
        QuerySelectField(
            "Database",
            query_factory=lambda: db.session.query(models.Database),
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    def pre_add(self, item: "TableModelView") -> None:
        validate_sqlatable(item)

    def post_add(  # pylint: disable=arguments-differ
            self,
            item: "TableModelView",
            flash_message: bool = True) -> None:
        item.fetch_metadata()
        create_table_permissions(item)
        if flash_message:
            flash(
                _("The table was created. "
                  "As part of this two-phase configuration "
                  "process, you should now click the edit button by "
                  "the new table to configure it."),
                "info",
            )

    def post_update(self, item: "TableModelView") -> None:
        self.post_add(item, flash_message=False)

    def _delete(self, pk: int) -> None:
        DeleteMixin._delete(self, pk)

    @expose("/edit/<pk>", methods=["GET", "POST"])
    @has_access
    def edit(self, pk: int) -> FlaskResponse:
        """Simple hack to redirect to explore view after saving"""
        resp = super(TableModelView, self).edit(pk)
        if isinstance(resp, str):
            return resp
        return redirect("/superset/explore/table/{}/".format(pk))

    @action("refresh", __("Refresh Metadata"), __("Refresh column metadata"),
            "fa-refresh")
    def refresh(  # pylint: disable=no-self-use, too-many-branches
        self, tables: Union["TableModelView",
                            List["TableModelView"]]) -> FlaskResponse:
        if not isinstance(tables, list):
            tables = [tables]

        @dataclass
        class RefreshResults:
            successes: List[TableModelView] = field(default_factory=list)
            failures: List[TableModelView] = field(default_factory=list)
            added: Dict[str, List[str]] = field(default_factory=dict)
            removed: Dict[str, List[str]] = field(default_factory=dict)
            modified: Dict[str, List[str]] = field(default_factory=dict)

        results = RefreshResults()

        for table_ in tables:
            try:
                metadata_results = table_.fetch_metadata()
                if metadata_results.added:
                    results.added[table_.table_name] = metadata_results.added
                if metadata_results.removed:
                    results.removed[
                        table_.table_name] = metadata_results.removed
                if metadata_results.modified:
                    results.modified[
                        table_.table_name] = metadata_results.modified
                results.successes.append(table_)
            except Exception:  # pylint: disable=broad-except
                results.failures.append(table_)

        if len(results.successes) > 0:
            success_msg = _(
                "Metadata refreshed for the following table(s): %(tables)s",
                tables=", ".join([t.table_name for t in results.successes]),
            )
            flash(success_msg, "info")
        if results.added:
            added_tables = []
            for table, cols in results.added.items():
                added_tables.append(f"{table} ({', '.join(cols)})")
            flash(
                _(
                    "The following tables added new columns: %(tables)s",
                    tables=", ".join(added_tables),
                ),
                "info",
            )
        if results.removed:
            removed_tables = []
            for table, cols in results.removed.items():
                removed_tables.append(f"{table} ({', '.join(cols)})")
            flash(
                _(
                    "The following tables removed columns: %(tables)s",
                    tables=", ".join(removed_tables),
                ),
                "info",
            )
        if results.modified:
            modified_tables = []
            for table, cols in results.modified.items():
                modified_tables.append(f"{table} ({', '.join(cols)})")
            flash(
                _(
                    "The following tables update column metadata: %(tables)s",
                    tables=", ".join(modified_tables),
                ),
                "info",
            )
        if len(results.failures) > 0:
            failure_msg = _(
                "Unable to refresh metadata for the following table(s): %(tables)s",
                tables=", ".join([t.table_name for t in results.failures]),
            )
            flash(failure_msg, "danger")

        return redirect("/tablemodelview/list/")

    @expose("/list/")
    @has_access
    def list(self) -> FlaskResponse:
        if not is_feature_enabled("ENABLE_REACT_CRUD_VIEWS"):
            return super().list()

        return super().render_app_template()
コード例 #8
0
class DruidColumnInlineView(CompactCRUDMixin, EnsureEnabledMixin,
                            SupersetModelView):
    datamodel = SQLAInterface(models.DruidColumn)
    include_route_methods = RouteMethod.RELATED_VIEW_SET

    list_title = _("Columns")
    show_title = _("Show Druid Column")
    add_title = _("Add Druid Column")
    edit_title = _("Edit Druid Column")

    list_widget = ListWidgetWithCheckboxes

    edit_columns = [
        "column_name",
        "verbose_name",
        "description",
        "dimension_spec_json",
        "datasource",
        "groupby",
        "filterable",
    ]
    add_columns = edit_columns
    list_columns = [
        "column_name", "verbose_name", "type", "groupby", "filterable"
    ]
    can_delete = False
    page_size = 500
    label_columns = {
        "column_name": _("Column"),
        "type": _("Type"),
        "datasource": _("Datasource"),
        "groupby": _("Groupable"),
        "filterable": _("Filterable"),
    }
    description_columns = {
        "filterable":
        _("Whether this column is exposed in the `Filters` section "
          "of the explore view."),
        "dimension_spec_json":
        utils.markdown(
            "this field can be used to specify  "
            "a `dimensionSpec` as documented [here]"
            "(http://druid.io/docs/latest/querying/dimensionspecs.html). "
            "Make sure to input valid JSON and that the "
            "`outputName` matches the `column_name` defined "
            "above.",
            True,
        ),
    }

    add_form_extra_fields = {
        "datasource":
        QuerySelectField(
            "Datasource",
            query_factory=lambda: db.session.query(models.DruidDatasource),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields

    def pre_update(self, item: "DruidColumnInlineView") -> None:
        # If a dimension spec JSON is given, ensure that it is
        # valid JSON and that `outputName` is specified
        if item.dimension_spec_json:
            try:
                dimension_spec = json.loads(item.dimension_spec_json)
            except ValueError as ex:
                raise ValueError("Invalid Dimension Spec JSON: " + str(ex))
            if not isinstance(dimension_spec, dict):
                raise ValueError("Dimension Spec must be a JSON object")
            if "outputName" not in dimension_spec:
                raise ValueError(
                    "Dimension Spec does not contain `outputName`")
            if "dimension" not in dimension_spec:
                raise ValueError("Dimension Spec is missing `dimension`")
            # `outputName` should be the same as the `column_name`
            if dimension_spec["outputName"] != item.column_name:
                raise ValueError(
                    "`outputName` [{}] unequal to `column_name` [{}]".format(
                        dimension_spec["outputName"], item.column_name))

    def post_update(self, item: "DruidColumnInlineView") -> None:
        item.refresh_metrics()

    def post_add(self, item: "DruidColumnInlineView") -> None:
        self.post_update(item)
コード例 #9
0
class AlertModelView(SupersetModelView):  # pylint: disable=too-many-ancestors
    datamodel = SQLAInterface(Alert)
    route_base = "/alert"
    include_route_methods = RouteMethod.CRUD_SET
    _extra_data: Dict[str, Union[bool, Optional[str]]] = {
        "test_alert": False,
        "test_email_recipients": None,
    }

    list_columns = (
        "label",
        "database",
        "crontab",
        "last_eval_dttm",
        "last_state",
        "active",
    )
    add_columns = (
        "label",
        "active",
        "crontab",
        "database",
        "sql",
        # TODO: implement different types of alerts
        # "alert_type",
        "owners",
        "recipients",
        "slack_channel",
        "slice",
        # TODO: implement dashboard screenshots with alerts
        # "dashboard",
        "log_retention",
        "grace_period",
        "test_alert",
        "test_email_recipients",
        "test_slack_channel",
    )
    label_columns = {
        "sql": "SQL",
        "log_retention": _("Log Retentions (days)"),
    }
    description_columns = {
        "sql":
        _("A SQL statement that defines whether the alert should get "
          "triggered or not. If the statement return no row, the alert "
          "is not triggered. If the statement returns one or many rows, "
          "the cells will be evaluated to see if they are 'truthy' "
          "if any cell is truthy, the alert will fire. Truthy values "
          "are non zero, non null, non empty strings."),
        "crontab":
        markdown(
            "A CRON-like expression. "
            "[Crontab Guru](https://crontab.guru/) is "
            "a helpful resource that can help you craft a CRON expression.",
            True,
        ),
        "recipients":
        _("A semicolon ';' delimited list of email addresses"),
        "log_retention":
        _("How long to keep the logs around for this alert"),
        "grace_period":
        _("Once an alert is triggered, how long, in seconds, before "
          "Superset nags you again."),
    }

    add_form_extra_fields = {
        "test_alert":
        BooleanField(
            "Send Test Alert",
            default=False,
            description=
            "If enabled, a test alert will be sent on the creation / update"
            " of an active alert. All alerts after will be sent only if the SQL "
            "statement defined above returns True.",
        ),
        "test_email_recipients":
        StringField(
            "Test Email Recipients",
            default=None,
            description="List of recipients to send test email to. "
            "If empty, an email will be sent to the original recipients.",
        ),
        "test_slack_channel":
        StringField(
            "Test Slack Channel",
            default=None,
            description="A slack channel to send a test message to. "
            "If empty, an alert will be sent to the original channel.",
        ),
    }
    edit_form_extra_fields = add_form_extra_fields
    edit_columns = add_columns
    related_views = [AlertLogModelView]

    def process_form(self, form: Form, is_created: bool) -> None:
        email_recipients = None
        if form.test_email_recipients.data:
            email_recipients = get_email_address_str(
                form.test_email_recipients.data)

        test_slack_channel = (form.test_slack_channel.data.strip()
                              if form.test_slack_channel.data else None)

        self._extra_data["test_alert"] = form.test_alert.data
        self._extra_data["test_email_recipients"] = email_recipients
        self._extra_data["test_slack_channel"] = test_slack_channel

    def pre_add(self, item: "AlertModelView") -> None:
        item.recipients = get_email_address_str(item.recipients)

        if not croniter.is_valid(item.crontab):
            raise SupersetException("Invalid crontab format")

    def post_add(self, item: "AlertModelView") -> None:
        if self._extra_data["test_alert"]:
            recipients = self._extra_data[
                "test_email_recipients"] or item.recipients
            slack_channel = self._extra_data[
                "test_slack_channel"] or item.slack_channel
            args = (ScheduleType.alert, item.id)
            kwargs = dict(recipients=recipients, slack_channel=slack_channel)
            schedule_alert_query.apply_async(args=args, kwargs=kwargs)

    def post_update(self, item: "AlertModelView") -> None:
        self.post_add(item)
コード例 #10
0
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):
    datamodel = SQLAInterface(models.SqlMetric)
    include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET

    list_title = _("Metrics")
    show_title = _("Show Metric")
    add_title = _("Add Metric")
    edit_title = _("Edit Metric")

    list_columns = ["metric_name", "verbose_name", "metric_type"]
    edit_columns = [
        "metric_name",
        "description",
        "verbose_name",
        "metric_type",
        "expression",
        "table",
        "d3format",
        "warning_text",
    ]
    description_columns = {
        "expression":
        utils.markdown(
            "a valid, *aggregating* SQL expression as supported by the "
            "underlying backend. Example: `count(DISTINCT userid)`",
            True,
        ),
        "d3format":
        utils.markdown(
            "d3 formatting string as defined [here]"
            "(https://github.com/d3/d3-format/blob/master/README.md#format). "
            "For instance, this default formatting applies in the Table "
            "visualization and allow for different metric to use different "
            "formats",
            True,
        ),
    }
    add_columns = edit_columns
    page_size = 500
    label_columns = {
        "metric_name": _("Metric"),
        "description": _("Description"),
        "verbose_name": _("Verbose Name"),
        "metric_type": _("Type"),
        "expression": _("SQL Expression"),
        "table": _("Table"),
        "d3format": _("D3 Format"),
        "warning_text": _("Warning Message"),
    }

    add_form_extra_fields = {
        "table":
        QuerySelectField(
            "Table",
            query_factory=lambda: db.session().query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields
コード例 #11
0
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.TableColumn)

    list_title = _("Columns")
    show_title = _("Show Column")
    add_title = _("Add Column")
    edit_title = _("Edit Column")

    can_delete = False
    list_widget = ListWidgetWithCheckboxes
    edit_columns = [
        "column_name",
        "verbose_name",
        "description",
        "type",
        "groupby",
        "filterable",
        "table",
        "expression",
        "is_dttm",
        "python_date_format",
        "database_expression",
    ]
    add_columns = edit_columns
    list_columns = [
        "column_name",
        "verbose_name",
        "type",
        "groupby",
        "filterable",
        "is_dttm",
    ]
    page_size = 500
    description_columns = {
        "is_dttm":
        _("Whether to make this column available as a "
          "[Time Granularity] option, column has to be DATETIME or "
          "DATETIME-like"),
        "filterable":
        _("Whether this column is exposed in the `Filters` section "
          "of the explore view."),
        "type":
        _("The data type that was inferred by the database. "
          "It may be necessary to input a type manually for "
          "expression-defined columns in some cases. In most case "
          "users should not need to alter this."),
        "expression":
        utils.markdown(
            "a valid, *non-aggregating* SQL expression as supported by the "
            "underlying backend. Example: `substr(name, 1, 1)`",
            True,
        ),
        "python_date_format":
        utils.markdown(
            Markup(
                "The pattern of timestamp format, use "
                '<a href="https://docs.python.org/2/library/'
                'datetime.html#strftime-strptime-behavior">'
                "python datetime string pattern</a> "
                "expression. If time is stored in epoch "
                "format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` "
                "below empty if timestamp is stored in "
                "String or Integer(epoch) type"),
            True,
        ),
        "database_expression":
        utils.markdown(
            "The database expression to cast internal datetime "
            "constants to database date/timestamp type according to the DBAPI. "
            "The expression should follow the pattern of "
            "%Y-%m-%d %H:%M:%S, based on different DBAPI. "
            "The string should be a python string formatter \n"
            "`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle "
            "Superset uses default expression based on DB URI if this "
            "field is blank.",
            True,
        ),
    }
    label_columns = {
        "column_name": _("Column"),
        "verbose_name": _("Verbose Name"),
        "description": _("Description"),
        "groupby": _("Groupable"),
        "filterable": _("Filterable"),
        "table": _("Table"),
        "expression": _("Expression"),
        "is_dttm": _("Is temporal"),
        "python_date_format": _("Datetime Format"),
        "database_expression": _("Database Expression"),
        "type": _("Type"),
    }

    add_form_extra_fields = {
        "table":
        QuerySelectField(
            "Table",
            query_factory=lambda: db.session().query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields
コード例 #12
0
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.SqlMetric)

    list_title = _("Metrics")
    show_title = _("Show Metric")
    add_title = _("Add Metric")
    edit_title = _("Edit Metric")

    list_columns = ["metric_name", "verbose_name", "metric_type"]
    edit_columns = [
        "metric_name",
        "description",
        "verbose_name",
        "metric_type",
        "expression",
        "table",
        "d3format",
        "is_restricted",
        "warning_text",
    ]
    description_columns = {
        "expression":
        utils.markdown(
            "a valid, *aggregating* SQL expression as supported by the "
            "underlying backend. Example: `count(DISTINCT userid)`",
            True,
        ),
        "is_restricted":
        _("Whether access to this metric is restricted "
          "to certain roles. Only roles with the permission "
          "'metric access on XXX (the name of this metric)' "
          "are allowed to access this metric"),
        "d3format":
        utils.markdown(
            "d3 formatting string as defined [here]"
            "(https://github.com/d3/d3-format/blob/master/README.md#format). "
            "For instance, this default formatting applies in the Table "
            "visualization and allow for different metric to use different "
            "formats",
            True,
        ),
    }
    add_columns = edit_columns
    page_size = 500
    label_columns = {
        "metric_name": _("Metric"),
        "description": _("Description"),
        "verbose_name": _("Verbose Name"),
        "metric_type": _("Type"),
        "expression": _("SQL Expression"),
        "table": _("Table"),
        "d3format": _("D3 Format"),
        "is_restricted": _("Is Restricted"),
        "warning_text": _("Warning Message"),
    }

    add_form_extra_fields = {
        "table":
        QuerySelectField(
            "Table",
            query_factory=lambda: db.session().query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields

    def post_add(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu("metric_access",
                                                      metric.get_perm())

    def post_update(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu("metric_access",
                                                      metric.get_perm())
コード例 #13
0
class ValidatorInlineView(  # pylint: disable=too-many-ancestors
    CompactCRUDMixin, SupersetModelView
):
    datamodel = SQLAInterface(Validator)
    include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET
    list_title = _("Validators")
    show_title = _("Show Validator")
    add_title = _("Add Validator")
    edit_title = _("Edit Validator")

    edit_columns = [
        "alert",
        "validator_type",
        "config",
    ]

    add_columns = edit_columns

    list_columns = [
        "validator_type",
        "alert.label",
    ]

    label_columns = {
        "validator_type": _("Validator Type"),
        "alert": _("Alert"),
    }

    description_columns = {
        "validator_type": utils.markdown(
            "Determines when to trigger alert based off value from SQLObserver query. "
            "Alerts will be triggered with these validator types:"
            "<ul><li>Not Null - When the return value is Not NULL, Empty, or 0</li>"
            "<li>Operator - When `sql_return_value comparison_operator threshold`"
            " is True e.g. `50 <= 75`<br>Supports the comparison operators <, <=, "
            ">, >=, ==, and !=</li></ul>",
            True,
        ),
        "config": utils.markdown(
            "JSON string containing values the validator will compare against. "
            "Each validator needs the following values:"
            "<ul><li>Not Null - Nothing. You can leave the config as it is.</li>"
            '<li>Operator<ul><li>`"op": "operator"` with an operator from ["<", '
            '"<=", ">", ">=", "==", "!="] e.g. `"op": ">="`</li>'
            '<li>`"threshold": threshold_value` e.g. `"threshold": 50`'
            '</li></ul>Example config:<br>{<br> "op":">=",<br>"threshold": 60<br>}'
            "</li></ul>",
            True,
        ),
    }

    def pre_add(self, item: "ValidatorInlineView") -> None:
        if item.alert.validators and item.alert.validators[0].id != item.id:
            raise SupersetException(
                "Error: Alerts currently only support 1 validator per alert."
            )

        item.validator_type = item.validator_type.lower()
        check_validator(item.validator_type, item.config)

    def pre_update(self, item: "ValidatorInlineView") -> None:
        item.validator_type = item.validator_type.lower()
        check_validator(item.validator_type, item.config)
コード例 #14
0
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.TableColumn)

    list_title = _('Columns')
    show_title = _('Show Column')
    add_title = _('Add Column')
    edit_title = _('Edit Column')

    can_delete = False
    list_widget = ListWidgetWithCheckboxes
    edit_columns = [
        'column_name', 'verbose_name', 'description',
        'type', 'groupby', 'filterable',
        'table', 'expression',
        'is_dttm', 'python_date_format', 'database_expression']
    add_columns = edit_columns
    list_columns = [
        'column_name', 'verbose_name', 'type', 'groupby', 'filterable',
        'is_dttm']
    page_size = 500
    description_columns = {
        'is_dttm': _(
            'Whether to make this column available as a '
            '[Time Granularity] option, column has to be DATETIME or '
            'DATETIME-like'),
        'filterable': _(
            'Whether this column is exposed in the `Filters` section '
            'of the explore view.'),
        'type': _(
            'The data type that was inferred by the database. '
            'It may be necessary to input a type manually for '
            'expression-defined columns in some cases. In most case '
            'users should not need to alter this.'),
        'expression': utils.markdown(
            'a valid, *non-aggregating* SQL expression as supported by the '
            'underlying backend. Example: `substr(name, 1, 1)`', True),
        'python_date_format': utils.markdown(Markup(
            'The pattern of timestamp format, use '
            '<a href="https://docs.python.org/2/library/'
            'datetime.html#strftime-strptime-behavior">'
            'python datetime string pattern</a> '
            'expression. If time is stored in epoch '
            'format, put `epoch_s` or `epoch_ms`. Leave `Database Expression` '
            'below empty if timestamp is stored in '
            'String or Integer(epoch) type'), True),
        'database_expression': utils.markdown(
            'The database expression to cast internal datetime '
            'constants to database date/timestamp type according to the DBAPI. '
            'The expression should follow the pattern of '
            '%Y-%m-%d %H:%M:%S, based on different DBAPI. '
            'The string should be a python string formatter \n'
            "`Ex: TO_DATE('{}', 'YYYY-MM-DD HH24:MI:SS')` for Oracle "
            'Superset uses default expression based on DB URI if this '
            'field is blank.', True),
    }
    label_columns = {
        'column_name': _('Column'),
        'verbose_name': _('Verbose Name'),
        'description': _('Description'),
        'groupby': _('Groupable'),
        'filterable': _('Filterable'),
        'table': _('Table'),
        'expression': _('Expression'),
        'is_dttm': _('Is temporal'),
        'python_date_format': _('Datetime Format'),
        'database_expression': _('Database Expression'),
        'type': _('Type'),
    }
コード例 #15
0
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):
    datamodel = SQLAInterface(models.TableColumn)
    # TODO TODO, review need for this on related_views
    class_permission_name = "Dataset"
    method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
    include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET

    list_title = _("Columns")
    show_title = _("Show Column")
    add_title = _("Add Column")
    edit_title = _("Edit Column")

    can_delete = False
    list_widget = ListWidgetWithCheckboxes
    edit_columns = [
        "column_name",
        "verbose_name",
        "description",
        "type",
        "groupby",
        "filterable",
        "table",
        "expression",
        "is_dttm",
        "python_date_format",
        "extra",
    ]
    add_columns = edit_columns
    list_columns = [
        "column_name",
        "verbose_name",
        "type",
        "groupby",
        "filterable",
        "is_dttm",
    ]
    page_size = 500
    description_columns = {
        "is_dttm":
        _("Whether to make this column available as a "
          "[Time Granularity] option, column has to be DATETIME or "
          "DATETIME-like"),
        "filterable":
        _("Whether this column is exposed in the `Filters` section "
          "of the explore view."),
        "type":
        _("The data type that was inferred by the database. "
          "It may be necessary to input a type manually for "
          "expression-defined columns in some cases. In most case "
          "users should not need to alter this."),
        "expression":
        utils.markdown(
            "a valid, *non-aggregating* SQL expression as supported by the "
            "underlying backend. Example: `substr(name, 1, 1)`",
            True,
        ),
        "python_date_format":
        utils.markdown(
            Markup(
                "The pattern of timestamp format. For strings use "
                '<a href="https://docs.python.org/2/library/'
                'datetime.html#strftime-strptime-behavior">'
                "python datetime string pattern</a> expression which needs to "
                'adhere to the <a href="https://en.wikipedia.org/wiki/ISO_8601">'
                "ISO 8601</a> standard to ensure that the lexicographical ordering "
                "coincides with the chronological ordering. If the timestamp "
                "format does not adhere to the ISO 8601 standard you will need to "
                "define an expression and type for transforming the string into a "
                "date or timestamp. Note currently time zones are not supported. "
                "If time is stored in epoch format, put `epoch_s` or `epoch_ms`."
                "If no pattern is specified we fall back to using the optional "
                "defaults on a per database/column name level via the extra parameter."
                ""),
            True,
        ),
        "extra":
        utils.markdown(
            "Extra data to specify column metadata. Currently supports "
            'certification data of the format: `{ "certification": "certified_by": '
            '"Taylor Swift", "details": "This column is the source of truth." '
            "} }`. This should be modified from the edit datasource model in "
            "Explore to ensure correct formatting.",
            True,
        ),
    }
    label_columns = {
        "column_name": _("Column"),
        "verbose_name": _("Verbose Name"),
        "description": _("Description"),
        "groupby": _("Groupable"),
        "filterable": _("Filterable"),
        "table": _("Table"),
        "expression": _("Expression"),
        "is_dttm": _("Is temporal"),
        "python_date_format": _("Datetime Format"),
        "type": _("Type"),
    }
    validators_columns = {
        "python_date_format": [
            # Restrict viable values to epoch_s, epoch_ms, or a strftime format
            # which adhere's to the ISO 8601 format (without time zone).
            Regexp(
                re.compile(
                    r"""
                    ^(
                        epoch_s|epoch_ms|
                        (?P<date>%Y(-%m(-%d)?)?)([\sT](?P<time>%H(:%M(:%S(\.%f)?)?)?))?
                    )$
                    """,
                    re.VERBOSE,
                ),
                message=_("Invalid date/timestamp format"),
            )
        ]
    }

    add_form_extra_fields = {
        "table":
        QuerySelectField(
            "Table",
            query_factory=lambda: db.session.query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields

    def pre_add(self, item: "models.SqlMetric") -> None:
        logger.warning(
            "This endpoint is deprecated and will be removed in version 2.0.0")
        if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]:
            check_ownership(item.table)

    def pre_update(self, item: "models.SqlMetric") -> None:
        logger.warning(
            "This endpoint is deprecated and will be removed in version 2.0.0")
        if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]:
            check_ownership(item.table)

    def pre_delete(self, item: "models.SqlMetric") -> None:
        logger.warning(
            "This endpoint is deprecated and will be removed in version 2.0.0")
        if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]:
            check_ownership(item.table)
コード例 #16
0
ファイル: views.py プロジェクト: tong900801/beta_v1.0
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView):
    datamodel = SQLAInterface(models.TableColumn)

    list_title = _("Columns")
    show_title = _("Show Column")
    add_title = _("Add Column")
    edit_title = _("Edit Column")

    can_delete = False
    list_widget = ListWidgetWithCheckboxes
    edit_columns = [
        "column_name",
        "verbose_name",
        "description",
        "type",
        "groupby",
        "filterable",
        "table",
        "expression",
        "is_dttm",
        "python_date_format",
    ]
    add_columns = edit_columns
    list_columns = [
        "column_name",
        "verbose_name",
        "type",
        "groupby",
        "filterable",
        "is_dttm",
    ]
    page_size = 500
    description_columns = {
        "is_dttm": _(
            "Whether to make this column available as a "
            "[Time Granularity] option, column has to be DATETIME or "
            "DATETIME-like"
        ),
        "filterable": _(
            "Whether this column is exposed in the `Filters` section "
            "of the explore view."
        ),
        "type": _(
            "The data type that was inferred by the database. "
            "It may be necessary to input a type manually for "
            "expression-defined columns in some cases. In most case "
            "users should not need to alter this."
        ),
        "expression": utils.markdown(
            "a valid, *non-aggregating* SQL expression as supported by the "
            "underlying backend. Example: `substr(name, 1, 1)`",
            True,
        ),
        "python_date_format": utils.markdown(
            Markup(
                "The pattern of timestamp format. For strings use "
                '<a href="https://docs.python.org/2/library/'
                'datetime.html#strftime-strptime-behavior">'
                "python datetime string pattern</a> expression which needs to "
                'adhere to the <a href="https://en.wikipedia.org/wiki/ISO_8601">'
                "ISO 8601</a> standard to ensure that the lexicographical ordering "
                "coincides with the chronological ordering. If the timestamp "
                "format does not adhere to the ISO 8601 standard you will need to "
                "define an expression and type for transforming the string into a "
                "date or timestamp. Note currently time zones are not supported. "
                "If time is stored in epoch format, put `epoch_s` or `epoch_ms`."
                "If no pattern is specified we fall back to using the optional "
                "defaults on a per database/column name level via the extra parameter."
                ""
            ),
            True,
        ),
    }
    label_columns = {
        "column_name": _("Column"),
        "verbose_name": _("Verbose Name"),
        "description": _("Description"),
        "groupby": _("Groupable"),
        "filterable": _("Filterable"),
        "table": _("Table"),
        "expression": _("Expression"),
        "is_dttm": _("Is temporal"),
        "python_date_format": _("Datetime Format"),
        "type": _("Type"),
    }
    validators_columns = {
        "python_date_format": [
            # Restrict viable values to epoch_s, epoch_ms, or a strftime format
            # which adhere's to the ISO 8601 format (without time zone).
            Regexp(
                re.compile(
                    r"""
                    ^(
                        epoch_s|epoch_ms|
                        (?P<date>%Y(-%m(-%d)?)?)([\sT](?P<time>%H(:%M(:%S(\.%f)?)?)?))?
                    )$
                    """,
                    re.VERBOSE,
                ),
                message=_("Invalid date/timestamp format"),
            )
        ]
    }

    add_form_extra_fields = {
        "table": QuerySelectField(
            "Table",
            query_factory=lambda: db.session().query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields
コード例 #17
0
    "Allow SQL Lab to fetch a list of all tables and all views across "
    "all database schemas. For large data warehouse with thousands of "
    "tables, this can be expensive and put strain on the system.")  # pylint: disable=invalid-name
impersonate_user_description = (
    "If Presto, all the queries in SQL Lab are going to be executed as the "
    "currently logged on user who must have permission to run them.<br/>"
    "If Hive and hive.server2.enable.doAs is enabled, will run the queries as "
    "service account, but impersonate the currently logged on user "
    "via hive.server2.proxy.user property.")
force_ctas_schema_description = (
    "When allowing CREATE TABLE AS option in SQL Lab, "
    "this option forces the table to be created in this schema")
encrypted_extra_description = markdown(
    "JSON string containing additional connection configuration.<br/>"
    "This is used to provide connection information for systems like "
    "Hive, Presto, and BigQuery, which do not conform to the username:password "
    "syntax normally used by SQLAlchemy.",
    True,
)
extra_description = markdown(
    "JSON string containing extra configuration elements.<br/>"
    "1. The ``engine_params`` object gets unpacked into the "
    "[sqlalchemy.create_engine]"
    "(https://docs.sqlalchemy.org/en/latest/core/engines.html#"
    "sqlalchemy.create_engine) call, while the ``metadata_params`` "
    "gets unpacked into the [sqlalchemy.MetaData]"
    "(https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html"
    "#sqlalchemy.schema.MetaData) call.<br/>"
    "2. The ``metadata_cache_timeout`` is a cache timeout setting "
    "in seconds for metadata fetch of this database. Specify it as "
    '**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
コード例 #18
0
class AlertModelView(SupersetModelView):  # pylint: disable=too-many-ancestors
    datamodel = SQLAInterface(Alert)
    route_base = "/alert"
    include_route_methods = RouteMethod.CRUD_SET | {"log"}

    list_columns = (
        "label",
        "database",
        "sql",
        "pretty_config",
        "crontab",
        "last_eval_dttm",
        "last_state",
        "active",
        "owners",
    )
    show_columns = (
        "label",
        "database",
        "sql",
        "validator_type",
        "validator_config",
        "active",
        "crontab",
        "owners",
        "slice",
        "recipients",
        "slack_channel",
        "log_retention",
        "grace_period",
        "last_eval_dttm",
        "last_state",
    )
    order_columns = ["label", "last_eval_dttm", "last_state", "active"]
    add_columns = (
        "label",
        "database",
        "sql",
        "validator_type",
        "validator_config",
        "active",
        "crontab",
        # TODO: implement different types of alerts
        # "alert_type",
        "owners",
        "recipients",
        "slack_channel",
        "slice",
        # TODO: implement dashboard screenshots with alerts
        # "dashboard",
        "log_retention",
        "grace_period",
    )
    label_columns = {
        "log_retention": _("Log Retentions (days)"),
    }
    description_columns = {
        "crontab":
        markdown(
            "A CRON-like expression. "
            "[Crontab Guru](https://crontab.guru/) is "
            "a helpful resource that can help you craft a CRON expression.",
            True,
        ),
        "recipients":
        _("A semicolon ';' delimited list of email addresses"),
        "log_retention":
        _("How long to keep the logs around for this alert"),
        "grace_period":
        _("Once an alert is triggered, how long, in seconds, before "
          "Superset nags you again."),
        "sql":
        _("A SQL statement that defines whether the alert should get triggered or "
          "not. The query is expected to return either NULL or a number value."
          ),
        "validator_type":
        utils.markdown(
            "Determines when to trigger alert based off value from alert query. "
            "Alerts will be triggered with these validator types:"
            "<ul><li>Not Null - When the return value is Not NULL, Empty, or 0</li>"
            "<li>Operator - When `sql_return_value comparison_operator threshold`"
            " is True e.g. `50 <= 75`<br>Supports the comparison operators <, <=, "
            ">, >=, ==, and !=</li></ul>",
            True,
        ),
        "validator_config":
        utils.markdown(
            "JSON string containing values the validator will compare against. "
            "Each validator needs the following values:"
            "<ul><li>Not Null - Nothing. You can leave the config as it is.</li>"
            '<li>Operator<ul><li>`"op": "operator"` with an operator from ["<", '
            '"<=", ">", ">=", "==", "!="] e.g. `"op": ">="`</li>'
            '<li>`"threshold": threshold_value` e.g. `"threshold": 50`'
            '</li></ul>Example config:<br>{<br> "op":">=",<br>"threshold": 60<br>}'
            "</li></ul>",
            True,
        ),
    }

    edit_columns = add_columns
    related_views = [
        AlertObservationModelView,
        AlertLogModelView,
    ]

    @expose("/list/")
    @has_access
    def list(self) -> FlaskResponse:
        if not (is_feature_enabled("ENABLE_REACT_CRUD_VIEWS")
                and is_feature_enabled("ALERT_REPORTS")):
            return super().list()

        return super().render_app_template()

    @expose("/<pk>/log/", methods=["GET"])
    @has_access
    def log(self, pk: int) -> FlaskResponse:  # pylint: disable=unused-argument
        if not (is_feature_enabled("ENABLE_REACT_CRUD_VIEWS")
                and is_feature_enabled("ALERT_REPORTS")):
            return super().list()

        return super().render_app_template()

    def pre_add(self, item: "AlertModelView") -> None:
        item.recipients = get_email_address_str(item.recipients)

        if not croniter.is_valid(item.crontab):
            raise SupersetException("Invalid crontab format")

        item.validator_type = item.validator_type.lower()
        check_validator(item.validator_type, item.validator_config)

    def pre_update(self, item: "AlertModelView") -> None:
        item.validator_type = item.validator_type.lower()
        check_validator(item.validator_type, item.validator_config)

    def post_update(self, item: "AlertModelView") -> None:
        self.post_add(item)
コード例 #19
0
ファイル: views.py プロジェクト: dodopizza/superset
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView):
    datamodel = SQLAInterface(models.SqlMetric)
    class_permission_name = "Dataset"
    method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP
    include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET

    list_title = _("Metrics")
    show_title = _("Show Metric")
    add_title = _("Add Metric")
    edit_title = _("Edit Metric")

    list_columns = ["metric_name", "verbose_name", "metric_type"]
    edit_columns = [
        "metric_name",
        "description",
        "verbose_name",
        "metric_type",
        "expression",
        "table",
        "d3format",
        "extra",
        "warning_text",
    ]
    description_columns = {
        "expression":
        utils.markdown(
            "a valid, *aggregating* SQL expression as supported by the "
            "underlying backend. Example: `count(DISTINCT userid)`",
            True,
        ),
        "d3format":
        utils.markdown(
            "d3 formatting string as defined [here]"
            "(https://github.com/d3/d3-format/blob/master/README.md#format). "
            "For instance, this default formatting applies in the Table "
            "visualization and allow for different metric to use different "
            "formats",
            True,
        ),
        "extra":
        utils.markdown(
            "Extra data to specify metric metadata. Currently supports "
            'metadata of the format: `{ "certification": { "certified_by": '
            '"Data Platform Team", "details": "This metric is the source of truth." '
            '}, "warning_markdown": "This is a warning." }`. This should be modified '
            "from the edit datasource model in Explore to ensure correct formatting.",
            True,
        ),
    }
    add_columns = edit_columns
    page_size = 500
    label_columns = {
        "metric_name": _("Metric"),
        "description": _("Description"),
        "verbose_name": _("Verbose Name"),
        "metric_type": _("Type"),
        "expression": _("SQL Expression"),
        "table": _("Table"),
        "d3format": _("D3 Format"),
        "extra": _("Extra"),
        "warning_text": _("Warning Message"),
    }

    add_form_extra_fields = {
        "table":
        QuerySelectField(
            "Table",
            query_factory=lambda: db.session.query(models.SqlaTable),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields
コード例 #20
0
ファイル: core.py プロジェクト: neuroradiology/caravel
 def description_markeddown(self):
     return utils.markdown(self.description)
コード例 #21
0
ファイル: views.py プロジェクト: MK0820/LaiZu
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView):  # noqa
    datamodel = SQLAInterface(models.DruidMetric)

    list_title = _("Metrics")
    show_title = _("Show Druid Metric")
    add_title = _("Add Druid Metric")
    edit_title = _("Edit Druid Metric")

    list_columns = ["metric_name", "verbose_name", "metric_type"]
    edit_columns = [
        "metric_name",
        "description",
        "verbose_name",
        "metric_type",
        "json",
        "datasource",
        "d3format",
        "is_restricted",
        "warning_text",
    ]
    add_columns = edit_columns
    page_size = 500
    validators_columns = {"json": [validate_json]}
    description_columns = {
        "metric_type": utils.markdown(
            "use `postagg` as the metric type if you are defining a "
            "[Druid Post Aggregation]"
            "(http://druid.io/docs/latest/querying/post-aggregations.html)",
            True,
        ),
        "is_restricted": _(
            "Whether access to this metric is restricted "
            "to certain roles. Only roles with the permission "
            "'metric access on XXX (the name of this metric)' "
            "are allowed to access this metric"
        ),
    }
    label_columns = {
        "metric_name": _("Metric"),
        "description": _("Description"),
        "verbose_name": _("Verbose Name"),
        "metric_type": _("Type"),
        "json": _("JSON"),
        "datasource": _("Druid Datasource"),
        "warning_text": _("Warning Message"),
        "is_restricted": _("Is Restricted"),
    }

    add_form_extra_fields = {
        "datasource": QuerySelectField(
            "Datasource",
            query_factory=lambda: db.session().query(models.DruidDatasource),
            allow_blank=True,
            widget=Select2Widget(extra_classes="readonly"),
        )
    }

    edit_form_extra_fields = add_form_extra_fields

    def post_add(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu(
                "metric_access", metric.get_perm()
            )

    def post_update(self, metric):
        if metric.is_restricted:
            security_manager.add_permission_view_menu(
                "metric_access", metric.get_perm()
            )