def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): return json_error_response(_("Request missing data field."), status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") datasource_type = datasource_dict.get("type") database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) orm_datasource.database_id = database_id if "owners" in datasource_dict and orm_datasource.owner_class is not None: # Check ownership if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]: try: check_ownership(orm_datasource) except SupersetSecurityException: raise DatasetForbiddenError() datasource_dict["owners"] = (db.session.query( orm_datasource.owner_class).filter( orm_datasource.owner_class.id.in_( datasource_dict["owners"])).all()) duplicates = [ name for name, count in Counter( [col["column_name"] for col in datasource_dict["columns"]]).items() if count > 1 ] if duplicates: return json_error_response( _( "Duplicate column name(s): %(columns)s", columns=",".join(duplicates), ), status=409, ) orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data db.session.commit() return self.json_response(data)
def validate(self) -> None: # Validate/populate model exists self._models = ChartDAO.find_by_ids(self._model_ids) if not self._models or len(self._models) != len(self._model_ids): raise ChartNotFoundError() # Check there are no associated ReportSchedules reports = ReportScheduleDAO.find_by_chart_ids(self._model_ids) if reports: report_names = [report.name for report in reports] raise ChartBulkDeleteFailedReportsExistError( _("There are associated alerts or reports: %s" % ",".join(report_names))) # Check ownership for model in self._models: try: check_ownership(model) except SupersetSecurityException as ex: raise ChartForbiddenError() from ex
def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): return json_error_response(_("Request missing data field."), status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") datasource_type = datasource_dict.get("type") database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) orm_datasource.database_id = database_id if "owners" in datasource_dict and orm_datasource.owner_class is not None: # Check ownership try: check_ownership(orm_datasource) except SupersetSecurityException as ex: raise DatasetForbiddenError() from ex user = security_manager.get_user_by_id(g.user.id) datasource_dict["owners"] = populate_owners(user, datasource_dict["owners"], default_to_user=False) duplicates = [ name for name, count in Counter( [col["column_name"] for col in datasource_dict["columns"]]).items() if count > 1 ] if duplicates: return json_error_response( _( "Duplicate column name(s): %(columns)s", columns=",".join(duplicates), ), status=409, ) orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data db.session.commit() return self.json_response(sanitize_datasource_data(data))
def validate(self) -> None: exceptions = list() owner_ids: Optional[List[int]] = self._properties.get("owners") # Validate/populate model exists self._model = DatasetDAO.find_by_id(self._model_id) if not self._model: raise DatasetNotFoundError() # Check ownership try: check_ownership(self._model) except SupersetSecurityException: raise DatasetForbiddenError() database_id = self._properties.get("database", None) table_name = self._properties.get("table_name", None) # Validate uniqueness if not DatasetDAO.validate_update_uniqueness( self._model.database_id, self._model_id, table_name): exceptions.append(DatasetExistsValidationError(table_name)) # Validate/Populate database not allowed to change if database_id and database_id != self._model: exceptions.append(DatabaseChangeValidationError()) # Validate/Populate owner try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners except ValidationError as ex: exceptions.append(ex) # Validate columns columns = self._properties.get("columns") if columns: self._validate_columns(columns, exceptions) # Validate metrics metrics = self._properties.get("metrics") if metrics: self._validate_metrics(metrics, exceptions) if exceptions: exception = DatasetInvalidError() exception.add_list(exceptions) raise exception
def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): return json_error_response("Request missing data field.", status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") datasource_type = datasource_dict.get("type") database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) orm_datasource.database_id = database_id if "owners" in datasource_dict and orm_datasource.owner_class is not None: # Check ownership try: check_ownership(orm_datasource) except SupersetSecurityException: return json_error_response(f"{DatasetForbiddenError.message}", DatasetForbiddenError.status) datasource_dict["owners"] = (db.session.query( orm_datasource.owner_class).filter( orm_datasource.owner_class.id.in_( datasource_dict["owners"])).all()) duplicates = [ name for name, count in Counter( [col["column_name"] for col in datasource_dict["columns"]]).items() if count > 1 ] if duplicates: return json_error_response( f"Duplicate column name(s): {','.join(duplicates)}", status=409) orm_datasource.update_from_object(datasource_dict) if hasattr(orm_datasource, "health_check"): orm_datasource.health_check(force=True, commit=False) data = orm_datasource.data db.session.commit() return self.json_response(data)
def pre_delete(self, item: "SliceModelView") -> None: check_ownership(item)
def pre_update(self, item: "SliceModelView") -> None: utils.validate_json(item.params) check_ownership(item)
def pre_delete(self, item): check_ownership(item)
def pre_update(self, item): utils.validate_json(item.params) check_ownership(item)
def pre_update(self, item: "DashboardModelView") -> None: check_ownership(item) self.pre_add(item)
def validate(self) -> None: exceptions: List[ValidationError] = list() owner_ids: Optional[List[int]] = self._properties.get("owners") report_type = self._properties.get("type", ReportScheduleType.ALERT) name = self._properties.get("name", "") self._model = ReportScheduleDAO.find_by_id(self._model_id) # Does the report exist? if not self._model: raise ReportScheduleNotFoundError() # Change the state to not triggered when the user deactivates # A report that is currently in a working state. This prevents # an alert/report from being kept in a working state if activated back if (self._model.last_state == ReportState.WORKING and "active" in self._properties and not self._properties["active"]): self._properties["last_state"] = ReportState.NOOP # validate relation by report type if not report_type: report_type = self._model.type # Validate name type uniqueness if not ReportScheduleDAO.validate_update_uniqueness( name, report_type, report_schedule_id=self._model_id): exceptions.append(ReportScheduleNameUniquenessValidationError()) if report_type == ReportScheduleType.ALERT: database_id = self._properties.get("database") # If database_id was sent let's validate it exists if database_id: database = DatabaseDAO.find_by_id(database_id) if not database: exceptions.append(DatabaseNotFoundValidationError()) self._properties["database"] = database # Validate chart or dashboard relations self.validate_chart_dashboard(exceptions, update=True) if "validator_config_json" in self._properties: self._properties["validator_config_json"] = json.dumps( self._properties["validator_config_json"]) # Check ownership try: check_ownership(self._model) except SupersetSecurityException: raise ReportScheduleForbiddenError() # Validate/Populate owner if owner_ids is None: owner_ids = [owner.id for owner in self._model.owners] try: owners = populate_owners(self._actor, owner_ids) self._properties["owners"] = owners except ValidationError as ex: exceptions.append(ex) if exceptions: exception = ReportScheduleInvalidError() exception.add_list(exceptions) raise exception
def bulk_delete(self, **kwargs): # pylint: disable=arguments-differ """Delete bulk Dashboards --- delete: parameters: - in: query name: q content: application/json: schema: type: array items: type: integer responses: 200: description: Dashboard bulk delete content: application/json: schema: type: object properties: message: type: string 401: $ref: '#/components/responses/401' 403: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ item_ids = kwargs["rison"] query = self.datamodel.session.query(Dashboard).filter( Dashboard.id.in_(item_ids)) items = self._base_filters.apply_all(query).all() if not items: return self.response_404() # Check user ownership over the items for item in items: try: check_ownership(item) except SupersetSecurityException as e: logger.warning(f"Dashboard {item} was not deleted, " f"because the user ({g.user}) does not own it") return self.response(403, message=_("No dashboards deleted")) except SQLAlchemyError as e: logger.error(f"Error checking dashboard ownership {e}") return self.response_422(message=str(e)) # bulk delete, first delete related data for item in items: try: item.slices = [] item.owners = [] self.datamodel.session.merge(item) except SQLAlchemyError as e: logger.error( f"Error bulk deleting related data on dashboards {e}") self.datamodel.session.rollback() return self.response_422(message=str(e)) # bulk delete itself try: self.datamodel.session.query(Dashboard).filter( Dashboard.id.in_(item_ids)).delete(synchronize_session="fetch") except SQLAlchemyError as e: logger.error(f"Error bulk deleting dashboards {e}") self.datamodel.session.rollback() return self.response_422(message=str(e)) self.datamodel.session.commit() return self.response( 200, message=ngettext( f"Deleted %(num)d dashboard", f"Deleted %(num)d dashboards", num=len(items), ), )
def pre_update(self, item: "TableModelView") -> None: check_ownership(item)
def pre_delete(self, item: "models.SqlMetric") -> None: check_ownership(item.table)
def pre_update(self, item: "TableModelView") -> None: logger.warning( "This endpoint is deprecated and will be removed in version 2.0.0") if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]: check_ownership(item)
def pre_delete(self, item: "models.SqlMetric") -> None: logger.warning( "This endpoint is deprecated and will be removed in version 2.0.0") if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]: check_ownership(item.table)