class AlcoholicBeveragesView(ModelView): datamodel = SQLAInterface(AlcoholicBeveragesMD) list_columns = ['id', 'title', 'content', 'Price']
class ChartRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(Slice) resource_name = "chart" allow_browser_login = True include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { RouteMethod.EXPORT, RouteMethod.RELATED, "bulk_delete", # not using RouteMethod since locally defined "data", "viz_types", "datasources", } class_permission_name = "SliceModelView" show_columns = [ "slice_name", "description", "owners.id", "owners.username", "owners.first_name", "owners.last_name", "dashboards.id", "dashboards.dashboard_title", "viz_type", "params", "cache_timeout", ] show_select_columns = show_columns + ["table.id"] list_columns = [ "id", "slice_name", "url", "description", "changed_by_name", "changed_by_url", "changed_by.first_name", "changed_by.last_name", "changed_on_utc", "changed_on_delta_humanized", "datasource_id", "datasource_type", "datasource_name_text", "datasource_url", "table.default_endpoint", "table.table_name", "viz_type", "params", "cache_timeout", ] list_select_columns = list_columns + ["changed_on", "changed_by_fk"] order_columns = [ "slice_name", "viz_type", "datasource_name", "changed_by.first_name", "changed_on_delta_humanized", ] search_columns = ( "slice_name", "description", "viz_type", "datasource_name", "datasource_id", "datasource_type", "owners", ) base_order = ("changed_on", "desc") base_filters = [["id", ChartFilter, lambda: []]] search_filters = {"slice_name": [ChartNameOrDescriptionFilter]} # Will just affect _info endpoint edit_columns = ["slice_name"] add_columns = edit_columns add_model_schema = ChartPostSchema() edit_model_schema = ChartPutSchema() openapi_spec_tag = "Charts" """ Override the name set for this collection of endpoints """ openapi_spec_component_schemas = CHART_SCHEMAS apispec_parameter_schemas = { "screenshot_query_schema": screenshot_query_schema, "get_delete_ids_schema": get_delete_ids_schema, } """ Add extra schemas to the OpenAPI components schema section """ openapi_spec_methods = openapi_spec_methods_override """ Overrides GET methods OpenApi descriptions """ order_rel_fields = { "slices": ("slice_name", "asc"), "owners": ("first_name", "asc"), } related_field_filters = { "owners": RelatedFieldFilter("first_name", FilterRelatedOwners) } allowed_rel_fields = {"owners"} def __init__(self) -> None: if is_feature_enabled("THUMBNAILS"): self.include_route_methods = self.include_route_methods | { "thumbnail", "screenshot", "cache_screenshot", } super().__init__() @expose("/", methods=["POST"]) @protect() @safe @statsd_metrics def post(self) -> Response: """Creates a new Chart --- post: description: >- Create a new Chart. requestBody: description: Chart schema required: true content: application/json: schema: $ref: '#/components/schemas/{{self.__class__.__name__}}.post' responses: 201: description: Chart added content: application/json: schema: type: object properties: id: type: number result: $ref: '#/components/schemas/{{self.__class__.__name__}}.post' 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ if not request.is_json: return self.response_400(message="Request is not JSON") try: item = self.add_model_schema.load(request.json) # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) try: new_model = CreateChartCommand(g.user, item).run() return self.response(201, id=new_model.id, result=item) except ChartInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except ChartCreateFailedError as ex: logger.error( "Error creating model %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/<pk>", methods=["PUT"]) @protect() @safe @statsd_metrics def put( # pylint: disable=too-many-return-statements, arguments-differ self, pk: int ) -> Response: """Changes a Chart --- put: description: >- Changes a Chart. parameters: - in: path schema: type: integer name: pk requestBody: description: Chart schema required: true content: application/json: schema: $ref: '#/components/schemas/{{self.__class__.__name__}}.put' responses: 200: description: Chart changed content: application/json: schema: type: object properties: id: type: number result: $ref: '#/components/schemas/{{self.__class__.__name__}}.put' 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ if not request.is_json: return self.response_400(message="Request is not JSON") try: item = self.edit_model_schema.load(request.json) # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) try: changed_model = UpdateChartCommand(g.user, pk, item).run() return self.response(200, id=changed_model.id, result=item) except ChartNotFoundError: return self.response_404() except ChartForbiddenError: return self.response_403() except ChartInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except ChartUpdateFailedError as ex: logger.error( "Error updating model %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/<pk>", methods=["DELETE"]) @protect() @safe @statsd_metrics def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ """Deletes a Chart --- delete: description: >- Deletes a Chart. parameters: - in: path schema: type: integer name: pk responses: 200: description: Chart delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ try: DeleteChartCommand(g.user, pk).run() return self.response(200, message="OK") except ChartNotFoundError: return self.response_404() except ChartForbiddenError: return self.response_403() except ChartDeleteFailedError as ex: logger.error( "Error deleting model %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/", methods=["DELETE"]) @protect() @safe @statsd_metrics @rison(get_delete_ids_schema) def bulk_delete( self, **kwargs: Any ) -> Response: # pylint: disable=arguments-differ """Delete bulk Charts --- delete: description: >- Deletes multiple Charts in a bulk operation. parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_delete_ids_schema' responses: 200: description: Charts bulk delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ item_ids = kwargs["rison"] try: BulkDeleteChartCommand(g.user, item_ids).run() return self.response( 200, message=ngettext( "Deleted %(num)d chart", "Deleted %(num)d charts", num=len(item_ids) ), ) except ChartNotFoundError: return self.response_404() except ChartForbiddenError: return self.response_403() except ChartBulkDeleteFailedError as ex: return self.response_422(message=str(ex)) @expose("/data", methods=["POST"]) @event_logger.log_this @protect() @safe @statsd_metrics def data(self) -> Response: # pylint: disable=too-many-return-statements """ Takes a query context constructed in the client and returns payload data response for the given query. --- post: description: >- Takes a query context constructed in the client and returns payload data response for the given query. requestBody: description: >- A query context consists of a datasource from which to fetch data and one or many query objects. required: true content: application/json: schema: $ref: "#/components/schemas/ChartDataQueryContextSchema" responses: 200: description: Query result content: application/json: schema: $ref: "#/components/schemas/ChartDataResponseSchema" 400: $ref: '#/components/responses/400' 500: $ref: '#/components/responses/500' """ if request.is_json: json_body = request.json elif request.form.get("form_data"): # CSV export submits regular form data json_body = json.loads(request.form["form_data"]) else: return self.response_400(message="Request is not JSON") try: query_context = ChartDataQueryContextSchema().load(json_body) except KeyError: return self.response_400(message="Request is incorrect") except ValidationError as error: return self.response_400( message=_("Request is incorrect: %(error)s", error=error.messages) ) try: query_context.raise_for_access() except SupersetSecurityException: return self.response_401() payload = query_context.get_payload() for query in payload: if query.get("error"): return self.response_400(message=f"Error: {query['error']}") result_format = query_context.result_format if result_format == ChartDataResultFormat.CSV: # return the first result result = payload[0]["data"] return CsvResponse( result, status=200, headers=generate_download_headers("csv"), mimetype="application/csv", ) if result_format == ChartDataResultFormat.JSON: response_data = simplejson.dumps( {"result": payload}, default=json_int_dttm_ser, ignore_nan=True ) resp = make_response(response_data, 200) resp.headers["Content-Type"] = "application/json; charset=utf-8" return resp return self.response_400(message=f"Unsupported result_format: {result_format}") @expose("/<pk>/cache_screenshot/", methods=["GET"]) @protect() @rison(screenshot_query_schema) @safe @statsd_metrics def cache_screenshot(self, pk: int, **kwargs: Dict[str, bool]) -> WerkzeugResponse: """ --- get: description: Compute and cache a screenshot. parameters: - in: path schema: type: integer name: pk - in: query name: q content: application/json: schema: $ref: '#/components/schemas/screenshot_query_schema' responses: 200: description: Chart async result content: application/json: schema: $ref: "#/components/schemas/ChartCacheScreenshotResponseSchema" 302: description: Redirects to the current digest 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ rison_dict = kwargs["rison"] window_size = rison_dict.get("window_size") or (800, 600) # Don't shrink the image if thumb_size is not specified thumb_size = rison_dict.get("thumb_size") or window_size chart = self.datamodel.get(pk, self._base_filters) if not chart: return self.response_404() chart_url = get_url_path("Superset.slice", slice_id=chart.id, standalone="true") screenshot_obj = ChartScreenshot(chart_url, chart.digest) cache_key = screenshot_obj.cache_key(window_size, thumb_size) image_url = get_url_path( "ChartRestApi.screenshot", pk=chart.id, digest=cache_key ) def trigger_celery() -> WerkzeugResponse: logger.info("Triggering screenshot ASYNC") kwargs = { "url": chart_url, "digest": chart.digest, "force": True, "window_size": window_size, "thumb_size": thumb_size, } cache_chart_thumbnail.delay(**kwargs) return self.response( 202, cache_key=cache_key, chart_url=chart_url, image_url=image_url, ) return trigger_celery() @expose("/<pk>/screenshot/<digest>/", methods=["GET"]) @protect() @rison(screenshot_query_schema) @safe @statsd_metrics def screenshot(self, pk: int, digest: str) -> WerkzeugResponse: """Get Chart screenshot --- get: description: Get a computed screenshot from cache. parameters: - in: path schema: type: integer name: pk - in: path schema: type: string name: digest responses: 200: description: Chart thumbnail image content: image/*: schema: type: string format: binary 302: description: Redirects to the current digest 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ chart = self.datamodel.get(pk, self._base_filters) # Making sure the chart still exists if not chart: return self.response_404() # TODO make sure the user has access to the chart # fetch the chart screenshot using the current user and cache if set img = ChartScreenshot.get_from_cache_key(thumbnail_cache, digest) if img: return Response( FileWrapper(img), mimetype="image/png", direct_passthrough=True ) # TODO: return an empty image return self.response_404() @expose("/<pk>/thumbnail/<digest>/", methods=["GET"]) @protect() @rison(thumbnail_query_schema) @safe @statsd_metrics def thumbnail( self, pk: int, digest: str, **kwargs: Dict[str, bool] ) -> WerkzeugResponse: """Get Chart thumbnail --- get: description: Compute or get already computed chart thumbnail from cache. parameters: - in: path schema: type: integer name: pk - in: path schema: type: string name: digest responses: 200: description: Chart thumbnail image /content: image/*: schema: type: string format: binary 302: description: Redirects to the current digest 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ chart = self.datamodel.get(pk, self._base_filters) if not chart: return self.response_404() url = get_url_path("Superset.slice", slice_id=chart.id, standalone="true") if kwargs["rison"].get("force", False): logger.info( "Triggering thumbnail compute (chart id: %s) ASYNC", str(chart.id) ) cache_chart_thumbnail.delay(url, chart.digest, force=True) return self.response(202, message="OK Async") # fetch the chart screenshot using the current user and cache if set screenshot = ChartScreenshot(url, chart.digest).get_from_cache( cache=thumbnail_cache ) # If not screenshot then send request to compute thumb to celery if not screenshot: logger.info( "Triggering thumbnail compute (chart id: %s) ASYNC", str(chart.id) ) cache_chart_thumbnail.delay(url, chart.digest, force=True) return self.response(202, message="OK Async") # If digests if chart.digest != digest: return redirect( url_for( f"{self.__class__.__name__}.thumbnail", pk=pk, digest=chart.digest ) ) return Response( FileWrapper(screenshot), mimetype="image/png", direct_passthrough=True ) @expose("/datasources", methods=["GET"]) @protect() @safe def datasources(self) -> Response: """Get available datasources --- get: description: Get available datasources. responses: 200: description: Query result content: application/json: schema: $ref: "#/components/schemas/ChartGetDatasourceResponseSchema" 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ datasources = ChartDAO.fetch_all_datasources() if not datasources: return self.response(200, count=0, result=[]) result = [ { "label": str(ds), "value": {"datasource_id": ds.id, "datasource_type": ds.type}, } for ds in datasources ] return self.response(200, count=len(result), result=result)
class GroupMasterView(MasterDetailView): datamodel = SQLAInterface(ContactGroup) related_views = [ContactGeneralView]
class TableModelView( # pylint: disable=too-many-ancestors DatasourceModelView, DeleteMixin, YamlExportMixin): datamodel = SQLAInterface(models.SqlaTable) class_permission_name = "Dataset" method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP include_route_methods = RouteMethod.CRUD_SET list_title = _("Tables") show_title = _("Show Table") add_title = _("Import a table definition") edit_title = _("Edit Table") list_columns = ["link", "database_name", "changed_by_", "modified"] order_columns = ["modified"] add_columns = ["database", "schema", "table_name"] edit_columns = [ "table_name", "sql", "filter_select_enabled", "fetch_values_predicate", "database", "schema", "description", "owners", "main_dttm_col", "default_endpoint", "offset", "cache_timeout", "is_sqllab_view", "template_params", "extra", ] base_filters = [["id", DatasourceFilter, lambda: []]] show_columns = edit_columns + ["perm", "slices"] related_views = [ TableColumnInlineView, SqlMetricInlineView, ] base_order = ("changed_on", "desc") search_columns = ("database", "schema", "table_name", "owners", "is_sqllab_view") description_columns = { "slices": _("The list of charts associated with this table. By " "altering this datasource, you may change how these associated " "charts behave. " "Also note that charts need to point to a datasource, so " "this form will fail at saving if removing charts from a " "datasource. If you want to change the datasource for a chart, " "overwrite the chart from the 'explore view'"), "offset": _("Timezone offset (in hours) for this datasource"), "table_name": _("Name of the table that exists in the source database"), "schema": _("Schema, as used only in some databases like Postgres, Redshift " "and DB2"), "description": Markup( 'Supports <a href="https://daringfireball.net/projects/markdown/">' "markdown</a>"), "sql": _("This fields acts a Superset view, meaning that Superset will " "run a query against this string as a subquery."), "fetch_values_predicate": _("Predicate applied when fetching distinct value to " "populate the filter control component. Supports " "jinja template syntax. Applies only when " "`Enable Filter Select` is on."), "default_endpoint": _("Redirects to this endpoint when clicking on the table " "from the table list"), "filter_select_enabled": _("Whether to populate the filter's dropdown in the explore " "view's filter section with a list of distinct values fetched " "from the backend on the fly"), "is_sqllab_view": _("Whether the table was generated by the 'Visualize' flow " "in SQL Lab"), "template_params": _("A set of parameters that become available in the query using " "Jinja templating syntax"), "cache_timeout": _("Duration (in seconds) of the caching timeout for this table. " "A timeout of 0 indicates that the cache never expires. " "Note this defaults to the database timeout if undefined."), "extra": utils.markdown( "Extra data to specify table metadata. Currently supports " 'metadata of the format: `{ "certification": { "certified_by": ' '"Data Platform Team", "details": "This table is the source of truth." ' '}, "warning_markdown": "This is a warning." }`.', True, ), } label_columns = { "slices": _("Associated Charts"), "link": _("Table"), "changed_by_": _("Changed By"), "database": _("Database"), "database_name": _("Database"), "changed_on_": _("Last Changed"), "filter_select_enabled": _("Enable Filter Select"), "schema": _("Schema"), "default_endpoint": _("Default Endpoint"), "offset": _("Offset"), "cache_timeout": _("Cache Timeout"), "table_name": _("Table Name"), "fetch_values_predicate": _("Fetch Values Predicate"), "owners": _("Owners"), "main_dttm_col": _("Main Datetime Column"), "description": _("Description"), "is_sqllab_view": _("SQL Lab View"), "template_params": _("Template parameters"), "extra": _("Extra"), "modified": _("Modified"), } edit_form_extra_fields = { "database": QuerySelectField( "Database", query_factory=lambda: db.session.query(models.Database), widget=Select2Widget(extra_classes="readonly"), ) } def post_add( # pylint: disable=arguments-differ self, item: "TableModelView", flash_message: bool = True, fetch_metadata: bool = True, ) -> None: if fetch_metadata: item.fetch_metadata() create_table_permissions(item) if flash_message: flash( _("The table was created. " "As part of this two-phase configuration " "process, you should now click the edit button by " "the new table to configure it."), "info", ) def post_update(self, item: "TableModelView") -> None: self.post_add(item, flash_message=False, fetch_metadata=False) def _delete(self, pk: int) -> None: DeleteMixin._delete(self, pk) @expose("/edit/<pk>", methods=["GET", "POST"]) @has_access def edit(self, pk: str) -> FlaskResponse: """Simple hack to redirect to explore view after saving""" resp = super().edit(pk) if isinstance(resp, str): return resp return redirect("/superset/explore/table/{}/".format(pk)) @expose("/list/") @has_access def list(self) -> FlaskResponse: return super().render_app_template()
class SavedQueryRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(SavedQuery) include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { RouteMethod.EXPORT, RouteMethod.RELATED, RouteMethod.DISTINCT, "bulk_delete", # not using RouteMethod since locally defined } class_permission_name = "SavedQueryView" resource_name = "saved_query" allow_browser_login = True base_filters = [["id", SavedQueryFilter, lambda: []]] show_columns = [ "created_by.first_name", "created_by.id", "created_by.last_name", "database.database_name", "database.id", "description", "id", "label", "schema", "sql", "sql_tables", ] list_columns = [ "changed_on_delta_humanized", "created_on", "created_by.first_name", "created_by.id", "created_by.last_name", "database.database_name", "database.id", "db_id", "description", "id", "label", "schema", "sql", "sql_tables", "rows", "last_run_delta_humanized", ] add_columns = ["db_id", "description", "label", "schema", "sql"] edit_columns = add_columns order_columns = [ "schema", "label", "description", "sql", "rows", "created_by.first_name", "database.database_name", "created_on", "changed_on_delta_humanized", "last_run_delta_humanized", ] search_columns = ["id", "database", "label", "schema", "created_by"] search_filters = { "id": [SavedQueryFavoriteFilter], "label": [SavedQueryAllTextFilter], } apispec_parameter_schemas = { "get_delete_ids_schema": get_delete_ids_schema, "get_export_ids_schema": get_export_ids_schema, } openapi_spec_tag = "Queries" openapi_spec_methods = openapi_spec_methods_override related_field_filters = { "database": "database_name", } filter_rel_fields = {"database": [["id", DatabaseFilter, lambda: []]]} allowed_rel_fields = {"database"} allowed_distinct_fields = {"schema"} def pre_add(self, item: SavedQuery) -> None: item.user = g.user def pre_update(self, item: SavedQuery) -> None: self.pre_add(item) @expose("/", methods=["DELETE"]) @protect() @safe @statsd_metrics @rison(get_delete_ids_schema) def bulk_delete(self, **kwargs: Any) -> Response: """Delete bulk Saved Queries --- delete: description: >- Deletes multiple saved queries in a bulk operation. parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_delete_ids_schema' responses: 200: description: Saved queries bulk delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ item_ids = kwargs["rison"] try: BulkDeleteSavedQueryCommand(g.user, item_ids).run() return self.response( 200, message=ngettext( "Deleted %(num)d saved query", "Deleted %(num)d saved queries", num=len(item_ids), ), ) except SavedQueryNotFoundError: return self.response_404() except SavedQueryBulkDeleteFailedError as ex: return self.response_422(message=str(ex)) @expose("/export/", methods=["GET"]) @protect() @safe @statsd_metrics @rison(get_export_ids_schema) def export(self, **kwargs: Any) -> Response: """Export saved queries --- get: description: >- Exports multiple saved queries and downloads them as YAML files parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_export_ids_schema' responses: 200: description: A zip file with saved query(ies) and database(s) as YAML content: application/zip: schema: type: string format: binary 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") root = f"saved_query_export_{timestamp}" filename = f"{root}.zip" buf = BytesIO() with ZipFile(buf, "w") as bundle: try: for file_name, file_content in ExportSavedQueriesCommand( requested_ids).run(): with bundle.open(f"{root}/{file_name}", "w") as fp: fp.write(file_content.encode()) except SavedQueryNotFoundError: return self.response_404() buf.seek(0) return send_file( buf, mimetype="application/zip", as_attachment=True, attachment_filename=filename, )
class Notebook_ModelView_Base(): datamodel = SQLAInterface(Notebook) label_title = 'notebook' check_redirect_list_url = '/notebook_modelview/list/' crd_name = 'notebook' help_url = conf.get('HELP_URL', {}).get(datamodel.obj.__tablename__, '') if datamodel else '' datamodel = SQLAInterface(Notebook) conv = GeneralModelConverter(datamodel) base_permissions = [ 'can_add', 'can_delete', 'can_edit', 'can_list', 'can_show' ] # 默认为这些 base_order = ('changed_on', 'desc') base_filters = [["id", Notebook_Filter, lambda: []]] # 设置权限过滤器 order_columns = ['id'] search_columns = ['created_by'] add_columns = [ 'project', 'name', 'describe', 'images', 'working_dir', 'volume_mount', 'resource_memory', 'resource_cpu' ] list_columns = [ 'project', 'ide_type', 'name_url', 'describe', 'resource', 'status', 'renew', 'reset' ] add_form_query_rel_fields = { "project": [["name", Project_Join_Filter, 'org']] } edit_form_query_rel_fields = add_form_query_rel_fields # @pysnooper.snoop() def set_column(self, notebook=None): # 对编辑进行处理 self.add_form_extra_fields['name'] = StringField( _(self.datamodel.obj.lab('name')), default="%s-" % g.user.username + uuid.uuid4().hex[:4], description='英文名(字母、数字、-组成),最长50个字符', widget=MyBS3TextFieldWidget(readonly=True if notebook else False), validators=[ DataRequired(), Regexp("^[a-z][a-z0-9\-]*[a-z0-9]$"), Length(1, 54) ] # 注意不能以-开头和结尾 ) self.add_form_extra_fields['describe'] = StringField( _(self.datamodel.obj.lab('describe')), default='%s的个人notebook' % g.user.username, description='中文描述', widget=BS3TextFieldWidget(), validators=[DataRequired()]) # "project": QuerySelectField( # _(datamodel.obj.lab('project')), # query_factory=filter_join_org_project, # allow_blank=True, # widget=Select2Widget() # ), self.add_form_extra_fields['project'] = QuerySelectField( _(self.datamodel.obj.lab('project')), default='', description=_(r'部署项目组'), query_factory=filter_join_org_project, widget=MySelect2Widget( extra_classes="readonly" if notebook else None, new_web=False), ) self.add_form_extra_fields['images'] = SelectField( _(self.datamodel.obj.lab('images')), description=_(r'notebook基础环境镜像,如果显示不准确,请删除新建notebook'), widget=MySelect2Widget( extra_classes="readonly" if notebook else None, new_web=False), choices=conf.get('NOTEBOOK_IMAGES', []), # validators=[DataRequired()] ) self.add_form_extra_fields['node_selector'] = StringField( _(self.datamodel.obj.lab('node_selector')), default='cpu=true,notebook=true', description="部署task所在的机器", widget=BS3TextFieldWidget()) self.add_form_extra_fields['image_pull_policy'] = SelectField( _(self.datamodel.obj.lab('image_pull_policy')), description="镜像拉取策略(Always为总是拉取远程镜像,IfNotPresent为若本地存在则使用本地镜像)", widget=Select2Widget(), choices=[['Always', 'Always'], ['IfNotPresent', 'IfNotPresent']]) self.add_form_extra_fields['volume_mount'] = StringField( _(self.datamodel.obj.lab('volume_mount')), default=notebook.project.volume_mount if notebook else '', description= '外部挂载,格式:$pvc_name1(pvc):/$container_path1,$pvc_name2(pvc):/$container_path2', widget=BS3TextFieldWidget()) self.add_form_extra_fields['working_dir'] = StringField( _(self.datamodel.obj.lab('working_dir')), default='/mnt', description="工作目录,如果为空,则使用Dockerfile中定义的workingdir", widget=BS3TextFieldWidget()) self.add_form_extra_fields['resource_memory'] = StringField( _(self.datamodel.obj.lab('resource_memory')), default=Notebook.resource_memory.default.arg, description='内存的资源使用限制,示例:1G,20G', widget=BS3TextFieldWidget(), validators=[DataRequired()]) self.add_form_extra_fields['resource_cpu'] = StringField( _(self.datamodel.obj.lab('resource_cpu')), default=Notebook.resource_cpu.default.arg, description='cpu的资源使用限制(单位:核),示例:2', widget=BS3TextFieldWidget(), validators=[DataRequired()]) self.add_form_extra_fields['resource_gpu'] = StringField( _(self.datamodel.obj.lab('resource_gpu')), default='0', description= 'gpu的资源使用限gpu的资源使用限制(单位卡),示例:1,2,训练任务每个容器独占整卡。申请具体的卡型号,可以类似 1(V100),目前支持T4/V100/A100/VGPU', widget=BS3TextFieldWidget(), # choices=conf.get('GPU_CHOICES', [[]]), validators=[DataRequired()]) columns = [ 'name', 'describe', 'images', 'resource_memory', 'resource_cpu', 'resource_gpu' ] self.add_columns = ['project'] + columns # 添加的时候没有挂载配置,使用项目中的挂载配置 # 修改的时候管理员可以在上面添加一些特殊的挂载配置,适应一些特殊情况 if g.user.is_admin(): columns.append('volume_mount') self.edit_columns = ['project'] + columns self.edit_form_extra_fields = self.add_form_extra_fields # @pysnooper.snoop() def pre_add(self, item): item.name = item.name.replace("_", "-")[0:54].lower() # 不需要用户自己填写node selector # if core.get_gpu(item.resource_gpu)[0]: # item.node_selector = item.node_selector.replace('cpu=true','gpu=true') # else: # item.node_selector = item.node_selector.replace('gpu=true', 'cpu=true') item.resource_memory = core.check_resource_memory( item.resource_memory, self.src_item_json.get('resource_memory', None)) item.resource_cpu = core.check_resource_cpu( item.resource_cpu, self.src_item_json.get('resource_cpu', None)) if 'theia' in item.images or 'vscode' in item.images: item.ide_type = 'theia' else: item.ide_type = 'jupyter' if not item.id: item.volume_mount = item.project.volume_mount # @pysnooper.snoop(watch_explode=('item')) def pre_update(self, item): # if item.changed_by_fk: # item.changed_by=db.session.query(MyUser).filter_by(id=item.changed_by_fk).first() # if item.created_by_fk: # item.created_by=db.session.query(MyUser).filter_by(id=item.created_by_fk).first() self.pre_add(item) def post_add(self, item): flash('自动reset 一分钟后生效', 'warning') try: self.reset_notebook(item) except Exception as e: print(e) flash('reset后查看运行运行状态', 'warning') # @pysnooper.snoop(watch_explode=('item')) def post_update(self, item): flash('reset以后配置方可生效', 'warning') # item.changed_on = datetime.datetime.now() # db.session.commit() # self.reset_notebook(item) # flash('自动reset 一分钟后生效', 'warning') if self.src_item_json: item.changed_by_fk = int(self.src_item_json.get('changed_by_fk')) if self.src_item_json: item.created_by_fk = int(self.src_item_json.get('created_by_fk')) db.session.commit() def post_list(self, items): flash('注意:notebook会定时清理,如要运行长期任务请在pipeline中创建任务流进行', category='warning') # items.sort(key=lambda item:item.created_by.username==g.user.username,reverse=True) return items # @event_logger.log_this # @expose("/add", methods=["GET", "POST"]) # @has_access # def add(self): # self.set_column() # self.add_form = self.conv.create_form( # self.label_columns, # self.add_columns, # self.description_columns, # self.validators_columns, # self.add_form_extra_fields, # self.add_form_query_rel_fields, # ) # widget = self._add() # if not widget: # return redirect('/notebook_modelview/list/') # self.post_add_redirect() # else: # return self.render_template( # self.add_template, title=self.add_title, widgets=widget # ) pre_update_get = set_column pre_add_get = set_column # @pysnooper.snoop(watch_explode=('notebook')) def reset_notebook(self, notebook): notebook.changed_on = datetime.datetime.now() db.session.commit() self.reset_theia(notebook) # 部署pod,service,VirtualService # @pysnooper.snoop(watch_explode=('notebook',)) def reset_theia(self, notebook): from myapp.utils.py.py_k8s import K8s k8s_client = K8s(notebook.cluster.get('KUBECONFIG', '')) namespace = conf.get('NOTEBOOK_NAMESPACE') port = 3000 command = None workingDir = None volume_mount = notebook.volume_mount if '/dev/shm' not in volume_mount: volume_mount += ',10G(memory):/dev/shm' rewrite_url = '/' pre_command = '(nohup sh /init.sh > /notebook_init.log 2>&1 &) ; (nohup sh /mnt/%s/init.sh > /init.log 2>&1 &) ; ' % notebook.created_by.username if notebook.ide_type == 'jupyter': rewrite_url = '/notebook/jupyter/%s/' % notebook.name workingDir = '/mnt/%s' % notebook.created_by.username # command = ["sh", "-c", "%s jupyter lab --notebook-dir=%s --ip=0.0.0.0 " # "--no-browser --allow-root --port=%s " # "--NotebookApp.token='' --NotebookApp.password='' " # "--NotebookApp.allow_origin='*' " # "--NotebookApp.base_url=%s" % (pre_command,notebook.mount,port,rewrite_url)] command = [ "sh", "-c", "%s jupyter lab --notebook-dir=/ --ip=0.0.0.0 " "--no-browser --allow-root --port=%s " "--NotebookApp.token='' --NotebookApp.password='' " "--NotebookApp.allow_origin='*' " "--NotebookApp.base_url=%s" % (pre_command, port, rewrite_url) ] elif notebook.ide_type == 'theia': command = [ "bash", '-c', '%s node /home/theia/src-gen/backend/main.js /home/project --hostname=0.0.0.0 --port=%s' % (pre_command, port) ] # command = ["node","/home/theia/src-gen/backend/main.js", "/home/project","--hostname=0.0.0.0","--port=%s"%port] workingDir = '/home/theia' print(command) print(workingDir) image_secrets = conf.get('HUBSECRET', []) user_hubsecrets = db.session.query(Repository.hubsecret).filter( Repository.created_by_fk == notebook.created_by.id).all() if user_hubsecrets: for hubsecret in user_hubsecrets: if hubsecret[0] not in image_secrets: image_secrets.append(hubsecret[0]) k8s_client.create_debug_pod( namespace=namespace, name=notebook.name, labels={ "app": notebook.name, 'user': notebook.created_by.username }, command=command, args=None, volume_mount=volume_mount, working_dir=workingDir, node_selector=notebook.get_node_selector(), resource_memory="0G~" + notebook.resource_memory, resource_cpu="0~" + notebook.resource_cpu, resource_gpu=notebook.resource_gpu, image_pull_policy=conf.get('IMAGE_PULL_POLICY', 'Always'), image_pull_secrets=image_secrets, image=notebook.images, hostAliases=conf.get('HOSTALIASES', ''), env={ "NO_AUTH": "true", "USERNAME": notebook.created_by.username, "NODE_OPTIONS": "--max-old-space-size=%s" % str(int(notebook.resource_memory.replace("G", '')) * 1024) }, privileged=None, accounts=conf.get('JUPYTER_ACCOUNTS'), username=notebook.created_by.username) k8s_client.create_service(namespace=namespace, name=notebook.name, username=notebook.created_by.username, ports=[ port, ]) crd_info = conf.get('CRD_INFO', {}).get('virtualservice', {}) crd_name = "notebook-jupyter-%s" % notebook.name.replace( '_', '-') # notebook.name.replace('_', '-') vs_obj = k8s_client.get_one_crd(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural'], namespace=namespace, name=crd_name) if vs_obj: k8s_client.delete_crd(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural'], namespace=namespace, name=crd_name) time.sleep(1) host = notebook.project.cluster.get('JUPYTER_DOMAIN', request.host) if not host: host = request.host if ':' in host: host = host[:host.rindex(':')] # 如果捕获到端口号,要去掉 crd_json = { "apiVersion": "networking.istio.io/v1alpha3", "kind": "VirtualService", "metadata": { "name": crd_name, "namespace": namespace }, "spec": { "gateways": ["kubeflow/kubeflow-gateway"], "hosts": ["*" if core.checkip(host) else host], "http": [{ "match": [{ "uri": { "prefix": "/notebook/%s/%s/" % (namespace, notebook.name) } }], "rewrite": { "uri": rewrite_url }, "route": [{ "destination": { "host": "%s.%s.svc.cluster.local" % (notebook.name, namespace), "port": { "number": port } } }], "timeout": "300s" }] } } # print(crd_json) crd = k8s_client.create_crd(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural'], namespace=namespace, body=crd_json) # 创建EXTERNAL_IP的服务 SERVICE_EXTERNAL_IP = conf.get('SERVICE_EXTERNAL_IP', None) if not SERVICE_EXTERNAL_IP and notebook.project.expand: SERVICE_EXTERNAL_IP = json.loads(notebook.project.expand).get( 'SERVICE_EXTERNAL_IP', SERVICE_EXTERNAL_IP) if type(SERVICE_EXTERNAL_IP) == str: SERVICE_EXTERNAL_IP = [SERVICE_EXTERNAL_IP] if SERVICE_EXTERNAL_IP: service_ports = [[10000 + 10 * notebook.id + index, port] for index, port in enumerate([port])] service_external_name = (notebook.name + "-external").lower()[:60].strip('-') k8s_client.create_service(namespace=namespace, name=service_external_name, username=notebook.created_by.username, ports=service_ports, selector={ "app": notebook.name, 'user': notebook.created_by.username }, externalIPs=SERVICE_EXTERNAL_IP) return crd # @event_logger.log_this @expose('/reset/<notebook_id>', methods=['GET', 'POST']) def reset(self, notebook_id): notebook = db.session.query(Notebook).filter_by(id=notebook_id).first() try: notebook_crd = self.reset_notebook(notebook) flash( '已重置,Running状态后可进入。注意:notebook会定时清理,如要运行长期任务请在pipeline中创建任务流进行。', 'warning') except Exception as e: flash('重置失败,稍后重试。%s' % str(e), 'warning') self.update_redirect() return redirect(self.get_redirect()) # return redirect(self.check_redirect_list_url) # @event_logger.log_this @expose('/renew/<notebook_id>', methods=['GET', 'POST']) def renew(self, notebook_id): notebook = db.session.query(Notebook).filter_by(id=notebook_id).first() notebook.changed_on = datetime.datetime.now() db.session.commit() self.update_redirect() return redirect(self.get_redirect()) # 基础批量删除 # @pysnooper.snoop() def base_muldelete(self, items): if not items: abort(404) for item in items: try: k8s_client = py_k8s.K8s(item.cluster.get('KUBECONFIG', '')) k8s_client.delete_pods(namespace=item.namespace, pod_name=item.name) k8s_client.delete_service(namespace=item.namespace, name=item.name) k8s_client.delete_service( namespace=item.namespace, name=(item.name + "-external").lower()[:60].strip('-')) crd_info = conf.get("CRD_INFO", {}).get('virtualservice', {}) if crd_info: k8s_client.delete_crd(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural'], namespace=item.namespace, name="notebook-jupyter-%s" % item.name.replace('_', '-')) except Exception as e: flash(str(e), "warning") def pre_delete(self, item): self.base_muldelete([item]) @event_logger.log_this @expose("/list/") @has_access def list(self): args = request.args.to_dict() if '_flt_0_created_by' in args and args['_flt_0_created_by'] == '': print(request.url) print(request.path) return redirect( request.url.replace('_flt_0_created_by=', '_flt_0_created_by=%s' % g.user.id)) widgets = self._list() res = self.render_template(self.list_template, title=self.list_title, widgets=widgets) return res # @event_logger.log_this # @expose("/delete/<pk>") # @has_access # def delete(self, pk): # pk = self._deserialize_pk_if_composite(pk) # self.base_delete(pk) # url = url_for(f"{self.endpoint}.list") # return redirect(url) @action("stop_all", __("Stop"), __("Stop all Really?"), "fa-trash", single=False) def stop_all(self, items): self.base_muldelete(items) self.update_redirect() return redirect(self.get_redirect())
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): datamodel = SQLAInterface(models.SqlMetric) class_permission_name = "Dataset" method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET list_title = _("Metrics") show_title = _("Show Metric") add_title = _("Add Metric") edit_title = _("Edit Metric") list_columns = ["metric_name", "verbose_name", "metric_type"] edit_columns = [ "metric_name", "description", "verbose_name", "metric_type", "expression", "table", "d3format", "extra", "warning_text", ] description_columns = { "expression": utils.markdown( "a valid, *aggregating* SQL expression as supported by the " "underlying backend. Example: `count(DISTINCT userid)`", True, ), "d3format": utils.markdown( "d3 formatting string as defined [here]" "(https://github.com/d3/d3-format/blob/master/README.md#format). " "For instance, this default formatting applies in the Table " "visualization and allow for different metric to use different " "formats", True, ), "extra": utils.markdown( "Extra data to specify metric metadata. Currently supports " 'metadata of the format: `{ "certification": { "certified_by": ' '"Data Platform Team", "details": "This metric is the source of truth." ' '}, "warning_markdown": "This is a warning." }`. This should be modified ' "from the edit datasource model in Explore to ensure correct formatting.", True, ), } add_columns = edit_columns page_size = 500 label_columns = { "metric_name": _("Metric"), "description": _("Description"), "verbose_name": _("Verbose Name"), "metric_type": _("Type"), "expression": _("SQL Expression"), "table": _("Table"), "d3format": _("D3 Format"), "extra": _("Extra"), "warning_text": _("Warning Message"), } add_form_extra_fields = { "table": QuerySelectField( "Table", query_factory=lambda: db.session.query(models.SqlaTable), allow_blank=True, widget=Select2Widget(extra_classes="readonly"), ) } edit_form_extra_fields = add_form_extra_fields
class DepartmentView(ModelView): datamodel = SQLAInterface(Department) related_views = [EmployeeView]
class BenefitView(ModelView): datamodel = SQLAInterface(Benefit) add_columns = ['name'] edit_columns = ['name'] show_columns = ['name'] list_columns = ['name']
class InsightsCategoryView(ModelView): datamodel = SQLAInterface(News) list_columns = ['salary', 'NewsCategory']
class FunctionView(ModelView): datamodel = SQLAInterface(Function) related_views = [EmployeeView]
class EmployeeHistoryView(ModelView): datamodel = SQLAInterface(EmployeeHistory) #base_permissions = ['can_add', 'can_show'] list_columns = ['department', 'begin_date', 'end_date']
class CareerInsightsView(ModelView): datamodel = SQLAInterface(News) list_columns = [ 'jobs_title', 'salary', 'content', 'date', 'jobs_titleCat_salary' ]
class DatasetRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(SqlaTable) base_filters = [["id", DatasourceFilter, lambda: []]] resource_name = "dataset" allow_browser_login = True class_permission_name = "TableModelView" include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { RouteMethod.EXPORT, RouteMethod.IMPORT, RouteMethod.RELATED, RouteMethod.DISTINCT, "bulk_delete", "refresh", "related_objects", } list_columns = [ "id", "database.id", "database.database_name", "changed_by_name", "changed_by_url", "changed_by.first_name", "changed_by.username", "changed_on_utc", "changed_on_delta_humanized", "default_endpoint", "explore_url", "extra", "kind", "owners.id", "owners.username", "owners.first_name", "owners.last_name", "schema", "sql", "table_name", ] list_select_columns = list_columns + ["changed_on", "changed_by_fk"] order_columns = [ "table_name", "schema", "changed_by.first_name", "changed_on_delta_humanized", "database.database_name", ] show_columns = [ "id", "database.database_name", "database.id", "table_name", "sql", "filter_select_enabled", "fetch_values_predicate", "schema", "description", "main_dttm_col", "offset", "default_endpoint", "cache_timeout", "is_sqllab_view", "template_params", "owners.id", "owners.username", "owners.first_name", "owners.last_name", "columns", "metrics", "datasource_type", "url", "extra", ] add_model_schema = DatasetPostSchema() edit_model_schema = DatasetPutSchema() add_columns = ["database", "schema", "table_name", "owners"] edit_columns = [ "table_name", "sql", "filter_select_enabled", "fetch_values_predicate", "schema", "description", "main_dttm_col", "offset", "default_endpoint", "cache_timeout", "is_sqllab_view", "template_params", "owners", "columns", "metrics", "extra", ] openapi_spec_tag = "Datasets" related_field_filters = { "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), "database": "database_name", } search_filters = {"sql": [DatasetIsNullOrEmptyFilter]} filter_rel_fields = {"database": [["id", DatabaseFilter, lambda: []]]} allowed_rel_fields = {"database", "owners"} allowed_distinct_fields = {"schema"} apispec_parameter_schemas = { "get_export_ids_schema": get_export_ids_schema, } openapi_spec_component_schemas = (DatasetRelatedObjectsResponse,) @expose("/", methods=["POST"]) @protect() @safe @statsd_metrics @event_logger.log_this_with_context(log_to_statsd=False) def post(self) -> Response: """Creates a new Dataset --- post: description: >- Create a new Dataset requestBody: description: Dataset schema required: true content: application/json: schema: $ref: '#/components/schemas/{{self.__class__.__name__}}.post' responses: 201: description: Dataset added content: application/json: schema: type: object properties: id: type: number result: $ref: '#/components/schemas/{{self.__class__.__name__}}.post' 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ if not request.is_json: return self.response_400(message="Request is not JSON") try: item = self.add_model_schema.load(request.json) # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) try: new_model = CreateDatasetCommand(g.user, item).run() return self.response(201, id=new_model.id, result=item) except DatasetInvalidError as ex: return self.response_422(message=ex.normalized_messages()) except DatasetCreateFailedError as ex: logger.error( "Error creating model %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/<pk>", methods=["PUT"]) @protect() @safe @statsd_metrics @event_logger.log_this_with_context(log_to_statsd=False) def put(self, pk: int) -> Response: """Changes a Dataset --- put: description: >- Changes a Dataset parameters: - in: path schema: type: integer name: pk - in: path schema: type: bool name: override_columns requestBody: description: Dataset schema required: true content: application/json: schema: $ref: '#/components/schemas/{{self.__class__.__name__}}.put' responses: 200: description: Dataset changed content: application/json: schema: type: object properties: id: type: number result: $ref: '#/components/schemas/{{self.__class__.__name__}}.put' 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ override_columns = ( bool(strtobool(request.args["override_columns"])) if "override_columns" in request.args else False ) if not request.is_json: return self.response_400(message="Request is not JSON") try: item = self.edit_model_schema.load(request.json) # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) try: changed_model = UpdateDatasetCommand( g.user, pk, item, override_columns ).run() response = self.response(200, id=changed_model.id, result=item) except DatasetNotFoundError: response = self.response_404() except DatasetForbiddenError: response = self.response_403() except DatasetInvalidError as ex: response = self.response_422(message=ex.normalized_messages()) except DatasetUpdateFailedError as ex: logger.error( "Error updating model %s: %s", self.__class__.__name__, str(ex) ) response = self.response_422(message=str(ex)) return response @expose("/<pk>", methods=["DELETE"]) @protect() @safe @statsd_metrics @event_logger.log_this_with_context(log_to_statsd=False) def delete(self, pk: int) -> Response: """Deletes a Dataset --- delete: description: >- Deletes a Dataset parameters: - in: path schema: type: integer name: pk responses: 200: description: Dataset delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ try: DeleteDatasetCommand(g.user, pk).run() return self.response(200, message="OK") except DatasetNotFoundError: return self.response_404() except DatasetForbiddenError: return self.response_403() except DatasetDeleteFailedError as ex: logger.error( "Error deleting model %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/export/", methods=["GET"]) @protect() @safe @statsd_metrics @rison(get_export_ids_schema) @event_logger.log_this_with_context(log_to_statsd=False) def export(self, **kwargs: Any) -> Response: """Export datasets --- get: description: >- Exports multiple datasets and downloads them as YAML files parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_export_ids_schema' responses: 200: description: Dataset export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] if is_feature_enabled("VERSIONED_EXPORT"): timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") root = f"dataset_export_{timestamp}" filename = f"{root}.zip" buf = BytesIO() with ZipFile(buf, "w") as bundle: try: for file_name, file_content in ExportDatasetsCommand( requested_ids ).run(): with bundle.open(f"{root}/{file_name}", "w") as fp: fp.write(file_content.encode()) except DatasetNotFoundError: return self.response_404() buf.seek(0) return send_file( buf, mimetype="application/zip", as_attachment=True, attachment_filename=filename, ) query = self.datamodel.session.query(SqlaTable).filter( SqlaTable.id.in_(requested_ids) ) query = self._base_filters.apply_all(query) items = query.all() ids = [item.id for item in items] if len(ids) != len(requested_ids): return self.response_404() data = [t.export_to_dict() for t in items] return Response( yaml.safe_dump(data), headers=generate_download_headers("yaml"), mimetype="application/text", ) @expose("/<pk>/refresh", methods=["PUT"]) @protect() @safe @statsd_metrics @event_logger.log_this_with_context(log_to_statsd=False) def refresh(self, pk: int) -> Response: """Refresh a Dataset --- put: description: >- Refreshes and updates columns of a dataset parameters: - in: path schema: type: integer name: pk responses: 200: description: Dataset delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ try: RefreshDatasetCommand(g.user, pk).run() return self.response(200, message="OK") except DatasetNotFoundError: return self.response_404() except DatasetForbiddenError: return self.response_403() except DatasetRefreshFailedError as ex: logger.error( "Error refreshing dataset %s: %s", self.__class__.__name__, str(ex) ) return self.response_422(message=str(ex)) @expose("/<pk>/related_objects", methods=["GET"]) @protect() @safe @statsd_metrics @event_logger.log_this_with_context(log_to_statsd=False) def related_objects(self, pk: int) -> Response: """Get charts and dashboards count associated to a dataset --- get: description: Get charts and dashboards count associated to a dataset parameters: - in: path name: pk schema: type: integer responses: 200: 200: description: Query result content: application/json: schema: $ref: "#/components/schemas/DatasetRelatedObjectsResponse" 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 500: $ref: '#/components/responses/500' """ dataset = DatasetDAO.find_by_id(pk) if not dataset: return self.response_404() data = DatasetDAO.get_related_objects(pk) charts = [ { "id": chart.id, "slice_name": chart.slice_name, "viz_type": chart.viz_type, } for chart in data["charts"] ] dashboards = [ { "id": dashboard.id, "json_metadata": dashboard.json_metadata, "slug": dashboard.slug, "title": dashboard.dashboard_title, } for dashboard in data["dashboards"] ] return self.response( 200, charts={"count": len(charts), "result": charts}, dashboards={"count": len(dashboards), "result": dashboards}, ) @expose("/", methods=["DELETE"]) @protect() @safe @statsd_metrics @rison(get_delete_ids_schema) @event_logger.log_this_with_context(log_to_statsd=False) def bulk_delete(self, **kwargs: Any) -> Response: """Delete bulk Datasets --- delete: description: >- Deletes multiple Datasets in a bulk operation. parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_delete_ids_schema' responses: 200: description: Dataset bulk delete content: application/json: schema: type: object properties: message: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/403' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ item_ids = kwargs["rison"] try: BulkDeleteDatasetCommand(g.user, item_ids).run() return self.response( 200, message=ngettext( "Deleted %(num)d dataset", "Deleted %(num)d datasets", num=len(item_ids), ), ) except DatasetNotFoundError: return self.response_404() except DatasetForbiddenError: return self.response_403() except DatasetBulkDeleteFailedError as ex: return self.response_422(message=str(ex)) @expose("/import/", methods=["POST"]) @protect() @safe @statsd_metrics def import_(self) -> Response: """Import dataset(s) with associated databases --- post: requestBody: content: application/zip: schema: type: string format: binary responses: 200: description: Dataset import result content: application/json: schema: type: object properties: message: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ upload = request.files.get("formData") if not upload: return self.response_400() with ZipFile(upload) as bundle: contents = { remove_root(file_name): bundle.read(file_name).decode() for file_name in bundle.namelist() } command = ImportDatasetsCommand(contents) try: command.run() return self.response(200, message="OK") except CommandInvalidError as exc: logger.warning("Import dataset failed") return self.response_422(message=exc.normalized_messages()) except DatasetImportError as exc: logger.exception("Import dataset failed") return self.response_500(message=str(exc))
class Notebook_ModelView(Notebook_ModelView_Base, MyappModelView, DeleteMixin): datamodel = SQLAInterface(Notebook)
class MenuItemView(ModelView): datamodel = SQLAInterface(MenuItem) list_columns = ['id', 'name', 'link', 'menu_category_id']
class Notebook_ModelView_Api(Notebook_ModelView_Base, MyappModelRestApi): datamodel = SQLAInterface(Notebook) route_base = '/notebook_modelview/api'
class NewsView(ModelView): datamodel = SQLAInterface(News) list_columns = ['id', 'title', 'content', 'date', 'newsCat_id']
class QueryRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(Query) resource_name = "query" allow_browser_login = True include_route_methods = {RouteMethod.GET, RouteMethod.GET_LIST, RouteMethod.RELATED} class_permission_name = "QueryView" list_columns = [ "changed_on", "database.database_name", "rows", "schema", "sql", "sql_tables", "status", "tab_name", "user.first_name", "user.id", "user.last_name", "user.username", "start_time", "end_time", "rows", "tmp_table_name", "tracking_url", ] show_columns = [ "changed_on", "client_id", "database.id", "end_result_backend_time", "end_time", "error_message", "executed_sql", "limit", "progress", "results_key", "rows", "schema", "select_as_cta", "select_as_cta_used", "select_sql", "sql", "sql_editor_id", "start_running_time", "start_time", "status", "tab_name", "tmp_schema_name", "tmp_table_name", "tracking_url", ] base_filters = [["id", QueryFilter, lambda: []]] base_order = ("changed_on", "desc") openapi_spec_tag = "Queries" openapi_spec_methods = openapi_spec_methods_override order_columns = [ "changed_on", "database.database_name", "rows", "schema", "sql", "tab_name", "user.first_name", ] related_field_filters = { "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } search_columns = ["changed_on", "database", "sql", "status", "user"] filter_rel_fields = {"database": [["id", DatabaseFilter, lambda: []]]} allowed_rel_fields = {"database", "user"}
class NewsCategoryView(ModelView): datamodel = SQLAInterface(NewsCategory) list_columns = ['id', 'name']
class RowLevelSecurityFiltersModelView(SupersetModelView, DeleteMixin): datamodel = SQLAInterface(models.RowLevelSecurityFilter) list_widget = cast(SupersetListWidget, RowLevelSecurityListWidget) list_title = _("Row level security filter") show_title = _("Show Row level security filter") add_title = _("Add Row level security filter") edit_title = _("Edit Row level security filter") list_columns = [ "filter_type", "tables", "roles", "group_key", "clause", "creator", "modified", ] order_columns = ["filter_type", "group_key", "clause", "modified"] edit_columns = ["filter_type", "tables", "roles", "group_key", "clause"] show_columns = edit_columns search_columns = ("filter_type", "tables", "roles", "group_key", "clause") add_columns = edit_columns base_order = ("changed_on", "desc") description_columns = { "filter_type": _("Regular filters add where clauses to queries if a user belongs to a " "role referenced in the filter. Base filters apply filters to all queries " "except the roles defined in the filter, and can be used to define what " "users can see if no RLS filters within a filter group apply to them." ), "tables": _("These are the tables this filter will be applied to."), "roles": _("For regular filters, these are the roles this filter will be " "applied to. For base filters, these are the roles that the " "filter DOES NOT apply to, e.g. Admin if admin should see all " "data."), "group_key": _("Filters with the same group key will be ORed together within the group, " "while different filter groups will be ANDed together. Undefined group " "keys are treated as unique groups, i.e. are not grouped together. " "For example, if a table has three filters, of which two are for " "departments Finance and Marketing (group key = 'department'), and one " "refers to the region Europe (group key = 'region'), the filter clause " "would apply the filter (department = 'Finance' OR department = " "'Marketing') AND (region = 'Europe')."), "clause": _("This is the condition that will be added to the WHERE clause. " "For example, to only return rows for a particular client, " "you might define a regular filter with the clause `client_id = 9`. To " "display no rows unless a user belongs to a RLS filter role, a base " "filter can be created with the clause `1 = 0` (always false)."), } label_columns = { "tables": _("Tables"), "roles": _("Roles"), "clause": _("Clause"), "creator": _("Creator"), "modified": _("Modified"), } if app.config["RLS_FORM_QUERY_REL_FIELDS"]: add_form_query_rel_fields = app.config["RLS_FORM_QUERY_REL_FIELDS"] edit_form_query_rel_fields = add_form_query_rel_fields
class Abouts_UsView(ModelView): datamodel = SQLAInterface(News) list_columns = ['scope', 'title', 'content', 'date', 'aboutusCat_scope']
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): datamodel = SQLAInterface(models.TableColumn) # TODO TODO, review need for this on related_views class_permission_name = "Dataset" method_permission_name = MODEL_VIEW_RW_METHOD_PERMISSION_MAP include_route_methods = RouteMethod.RELATED_VIEW_SET | RouteMethod.API_SET list_title = _("Columns") show_title = _("Show Column") add_title = _("Add Column") edit_title = _("Edit Column") can_delete = False list_widget = ListWidgetWithCheckboxes edit_columns = [ "column_name", "verbose_name", "description", "type", "advanced_data_type", "groupby", "filterable", "table", "expression", "is_dttm", "python_date_format", "extra", ] add_columns = edit_columns list_columns = [ "column_name", "verbose_name", "type", "advanced_data_type", "groupby", "filterable", "is_dttm", ] page_size = 500 description_columns = { "is_dttm": _("Whether to make this column available as a " "[Time Granularity] option, column has to be DATETIME or " "DATETIME-like"), "filterable": _("Whether this column is exposed in the `Filters` section " "of the explore view."), "type": _("The data type that was inferred by the database. " "It may be necessary to input a type manually for " "expression-defined columns in some cases. In most case " "users should not need to alter this."), "expression": utils.markdown( "a valid, *non-aggregating* SQL expression as supported by the " "underlying backend. Example: `substr(name, 1, 1)`", True, ), "python_date_format": utils.markdown( Markup( "The pattern of timestamp format. For strings use " '<a href="https://docs.python.org/2/library/' 'datetime.html#strftime-strptime-behavior">' "python datetime string pattern</a> expression which needs to " 'adhere to the <a href="https://en.wikipedia.org/wiki/ISO_8601">' "ISO 8601</a> standard to ensure that the lexicographical ordering " "coincides with the chronological ordering. If the timestamp " "format does not adhere to the ISO 8601 standard you will need to " "define an expression and type for transforming the string into a " "date or timestamp. Note currently time zones are not supported. " "If time is stored in epoch format, put `epoch_s` or `epoch_ms`." "If no pattern is specified we fall back to using the optional " "defaults on a per database/column name level via the extra parameter." ""), True, ), "extra": utils.markdown( "Extra data to specify column metadata. Currently supports " 'certification data of the format: `{ "certification": "certified_by": ' '"Taylor Swift", "details": "This column is the source of truth." ' "} }`. This should be modified from the edit datasource model in " "Explore to ensure correct formatting.", True, ), } label_columns = { "column_name": _("Column"), "verbose_name": _("Verbose Name"), "description": _("Description"), "groupby": _("Groupable"), "filterable": _("Filterable"), "table": _("Table"), "expression": _("Expression"), "is_dttm": _("Is temporal"), "python_date_format": _("Datetime Format"), "type": _("Type"), "advanced_data_type": _("Business Data Type"), } validators_columns = { "python_date_format": [ # Restrict viable values to epoch_s, epoch_ms, or a strftime format # which adhere's to the ISO 8601 format (without time zone). Regexp( re.compile( r""" ^( epoch_s|epoch_ms| (?P<date>%Y(-%m(-%d)?)?)([\sT](?P<time>%H(:%M(:%S(\.%f)?)?)?))? )$ """, re.VERBOSE, ), message=_("Invalid date/timestamp format"), ) ] } add_form_extra_fields = { "table": QuerySelectField( "Table", query_factory=lambda: db.session.query(models.SqlaTable), allow_blank=True, widget=Select2Widget(extra_classes="readonly"), ) } edit_form_extra_fields = add_form_extra_fields
class AboutJobsdbView(ModelView): datamodel = SQLAInterface(NewsCategory) list_columns = ['scope', 'name']
def find_by_ids(model_ids: List[int]) -> List[Dashboard]: query = db.session.query(Dashboard).filter(Dashboard.id.in_(model_ids)) data_model = SQLAInterface(Dashboard, db.session) query = DashboardFilter("id", data_model).apply(query, None) return query.all()
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin, YamlExportMixin): # noqa datamodel = SQLAInterface(models.DruidDatasource) list_title = _('List Druid Datasource') show_title = _('Show Druid Datasource') add_title = _('Add Druid Datasource') edit_title = _('Edit Druid Datasource') list_columns = [ 'datasource_link', 'cluster', 'changed_by_', 'modified'] order_columns = ['datasource_link', 'modified'] related_views = [DruidColumnInlineView, DruidMetricInlineView] edit_columns = [ 'datasource_name', 'cluster', 'description', 'owner', 'is_hidden', 'filter_select_enabled', 'fetch_values_from', 'default_endpoint', 'offset', 'cache_timeout'] search_columns = ( 'datasource_name', 'cluster', 'description', 'owner', ) add_columns = edit_columns show_columns = add_columns + ['perm', 'slices'] page_size = 500 base_order = ('datasource_name', 'asc') description_columns = { 'slices': _( 'The list of charts associated with this table. By ' 'altering this datasource, you may change how these associated ' 'charts behave. ' 'Also note that charts need to point to a datasource, so ' 'this form will fail at saving if removing charts from a ' 'datasource. If you want to change the datasource for a chart, ' "overwrite the chart from the 'explore view'"), 'offset': _('Timezone offset (in hours) for this datasource'), 'description': Markup( 'Supports <a href="' 'https://daringfireball.net/projects/markdown/">markdown</a>'), 'fetch_values_from': _( 'Time expression to use as a predicate when retrieving ' 'distinct values to populate the filter component. ' 'Only applies when `Enable Filter Select` is on. If ' 'you enter `7 days ago`, the distinct list of values in ' 'the filter will be populated based on the distinct value over ' 'the past week'), 'filter_select_enabled': _( "Whether to populate the filter's dropdown in the explore " "view's filter section with a list of distinct values fetched " 'from the backend on the fly'), 'default_endpoint': _( 'Redirects to this endpoint when clicking on the datasource ' 'from the datasource list'), 'cache_timeout': _( 'Duration (in seconds) of the caching timeout for this datasource. ' 'A timeout of 0 indicates that the cache never expires. ' 'Note this defaults to the cluster timeout if undefined.'), } base_filters = [['id', DatasourceFilter, lambda: []]] label_columns = { 'slices': _('Associated Charts'), 'datasource_link': _('Data Source'), 'cluster': _('Cluster'), 'description': _('Description'), 'owner': _('Owner'), 'is_hidden': _('Is Hidden'), 'filter_select_enabled': _('Enable Filter Select'), 'default_endpoint': _('Default Endpoint'), 'offset': _('Time Offset'), 'cache_timeout': _('Cache Timeout'), 'datasource_name': _('Datasource Name'), 'fetch_values_from': _('Fetch Values From'), 'changed_by_': _('Changed By'), 'modified': _('Modified'), } def pre_add(self, datasource): with db.session.no_autoflush: query = ( db.session.query(models.DruidDatasource) .filter(models.DruidDatasource.datasource_name == datasource.datasource_name, models.DruidDatasource.cluster_name == datasource.cluster.id) ) if db.session.query(query.exists()).scalar(): raise Exception(get_datasource_exist_error_msg( datasource.full_name)) def post_add(self, datasource): datasource.refresh_metrics() security_manager.merge_perm('datasource_access', datasource.get_perm()) if datasource.schema: security_manager.merge_perm('schema_access', datasource.schema_perm) def post_update(self, datasource): self.post_add(datasource) def _delete(self, pk): DeleteMixin._delete(self, pk)
class ContactGeneralView(AdminLteModelView): datamodel = SQLAInterface(Contact) label_columns = {"contact_group": "Contacts Group"} list_columns = ["name", "personal_phone", "contact_group"] base_order = ("name", "asc") show_fieldsets = [ ("Summary", { "fields": ["name", "gender", "contact_group"] }), ( "Personal Info", { "fields": [ "address", "birthday", "personal_phone", "personal_celphone", ], "expanded": False, }, ), ] add_fieldsets = [ ("Summary", { "fields": ["name", "gender", "contact_group"] }), ( "Personal Info", { "fields": [ "address", "birthday", "personal_phone", "personal_celphone", ], "expanded": False, }, ), ] edit_fieldsets = [ ("Summary", { "fields": ["name", "gender", "contact_group"] }), ( "Personal Info", { "fields": [ "address", "birthday", "personal_phone", "personal_celphone", ], "expanded": False, }, ), ]
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa datamodel = SQLAInterface(models.DruidColumn) list_title = _('List Druid Column') show_title = _('Show Druid Column') add_title = _('Add Druid Column') edit_title = _('Edit Druid Column') list_widget = ListWidgetWithCheckboxes edit_columns = [ 'column_name', 'verbose_name', 'description', 'dimension_spec_json', 'datasource', 'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max'] add_columns = edit_columns list_columns = [ 'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max'] can_delete = False page_size = 500 label_columns = { 'column_name': _('Column'), 'type': _('Type'), 'datasource': _('Datasource'), 'groupby': _('Groupable'), 'filterable': _('Filterable'), 'count_distinct': _('Count Distinct'), 'sum': _('Sum'), 'min': _('Min'), 'max': _('Max'), 'verbose_name': _('Verbose Name'), 'description': _('Description'), } description_columns = { 'filterable': _( 'Whether this column is exposed in the `Filters` section ' 'of the explore view.'), 'dimension_spec_json': utils.markdown( 'this field can be used to specify ' 'a `dimensionSpec` as documented [here]' '(http://druid.io/docs/latest/querying/dimensionspecs.html). ' 'Make sure to input valid JSON and that the ' '`outputName` matches the `column_name` defined ' 'above.', True), } def pre_update(self, col): # If a dimension spec JSON is given, ensure that it is # valid JSON and that `outputName` is specified if col.dimension_spec_json: try: dimension_spec = json.loads(col.dimension_spec_json) except ValueError as e: raise ValueError('Invalid Dimension Spec JSON: ' + str(e)) if not isinstance(dimension_spec, dict): raise ValueError('Dimension Spec must be a JSON object') if 'outputName' not in dimension_spec: raise ValueError('Dimension Spec does not contain `outputName`') if 'dimension' not in dimension_spec: raise ValueError('Dimension Spec is missing `dimension`') # `outputName` should be the same as the `column_name` if dimension_spec['outputName'] != col.column_name: raise ValueError( '`outputName` [{}] unequal to `column_name` [{}]' .format(dimension_spec['outputName'], col.column_name)) def post_update(self, col): col.refresh_metrics() def post_add(self, col): self.post_update(col)
class GroupGeneralView(ModelView): datamodel = SQLAInterface(ContactGroup) related_views = [ContactGeneralView]
class HealthDrinkView(ModelView): datamodel = SQLAInterface(HealthDrinkMD) list_columns = ['id', 'title', 'content', 'Price']