def populate_owners( user: User, owner_ids: Optional[List[int]], default_to_user: bool, ) -> List[User]: """ Helper function for commands, will fetch all users from owners id's :param user: current user :param owner_ids: list of owners by id's :param default_to_user: make user the owner if `owner_ids` is None or empty :raises OwnersNotFoundValidationError: if at least one owner id can't be resolved :returns: Final list of owners """ owner_ids = owner_ids or [] owners = [] if not owner_ids and default_to_user: return [user] if user.id not in owner_ids and "admin" not in [ role.name.lower() for role in user.roles ]: # make sure non-admins can't remove themselves as owner by mistake owners.append(user) for owner_id in owner_ids: owner = security_manager.get_user_by_id(owner_id) if not owner: raise OwnersNotFoundValidationError() owners.append(owner) return owners
def load_chart_data_into_cache( job_metadata: Dict[str, Any], form_data: Dict[str, Any], ) -> None: # pylint: disable=import-outside-toplevel from superset.charts.data.commands.get_data_command import ChartDataCommand user = (security_manager.get_user_by_id(job_metadata.get("user_id")) or security_manager.get_anonymous_user()) with override_user(user, force=False): try: set_form_data(form_data) query_context = _create_query_context_from_form(form_data) command = ChartDataCommand(query_context) result = command.run(cache=True) cache_key = result["cache_key"] result_url = f"/api/v1/chart/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except SoftTimeLimitExceeded as ex: logger.warning( "A timeout occurred while loading chart data, error: %s", ex) raise ex except Exception as ex: # TODO: QueryContext should support SIP-40 style errors error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member errors = [{"message": error}] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise ex
def ensure_user_is_set(user_id: Optional[int]) -> None: user_is_not_set = not (hasattr(g, "user") and g.user is not None) if user_is_not_set and user_id is not None: # pylint: disable=assigning-non-slot g.user = security_manager.get_user_by_id(user_id) elif user_is_not_set: # pylint: disable=assigning-non-slot g.user = security_manager.get_anonymous_user()
def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): return json_error_response(_("Request missing data field."), status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") datasource_type = datasource_dict.get("type") database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) orm_datasource.database_id = database_id if "owners" in datasource_dict and orm_datasource.owner_class is not None: # Check ownership if app.config["OLD_API_CHECK_DATASET_OWNERSHIP"]: # mimic the behavior of the new dataset command that # checks ownership and ensures that non-admins aren't locked out # of the object try: check_ownership(orm_datasource) except SupersetSecurityException as ex: raise DatasetForbiddenError() from ex user = security_manager.get_user_by_id(g.user.id) datasource_dict["owners"] = populate_owners( user, datasource_dict["owners"], default_to_user=False) else: # legacy behavior datasource_dict["owners"] = (db.session.query( orm_datasource.owner_class).filter( orm_datasource.owner_class.id.in_( datasource_dict["owners"] or [])).all()) duplicates = [ name for name, count in Counter( [col["column_name"] for col in datasource_dict["columns"]]).items() if count > 1 ] if duplicates: return json_error_response( _( "Duplicate column name(s): %(columns)s", columns=",".join(duplicates), ), status=409, ) orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data db.session.commit() return self.json_response(data)
def save(self) -> FlaskResponse: data = request.form.get("data") if not isinstance(data, str): return json_error_response(_("Request missing data field."), status=500) datasource_dict = json.loads(data) datasource_id = datasource_dict.get("id") datasource_type = datasource_dict.get("type") database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session) orm_datasource.database_id = database_id if "owners" in datasource_dict and orm_datasource.owner_class is not None: # Check ownership try: check_ownership(orm_datasource) except SupersetSecurityException as ex: raise DatasetForbiddenError() from ex user = security_manager.get_user_by_id(g.user.id) datasource_dict["owners"] = populate_owners(user, datasource_dict["owners"], default_to_user=False) duplicates = [ name for name, count in Counter( [col["column_name"] for col in datasource_dict["columns"]]).items() if count > 1 ] if duplicates: return json_error_response( _( "Duplicate column name(s): %(columns)s", columns=",".join(duplicates), ), status=409, ) orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data db.session.commit() return self.json_response(sanitize_datasource_data(data))
def populate_owners(user: User, owners_ids: Optional[List[int]] = None) -> List[User]: """ Helper function for commands, will fetch all users from owners id's Can raise ValidationError :param user: The current user :param owners_ids: A List of owners by id's """ owners = list() if not owners_ids: return [user] if user.id not in owners_ids: owners.append(user) for owner_id in owners_ids: owner = security_manager.get_user_by_id(owner_id) if not owner: raise OwnersNotFoundValidationError() owners.append(owner) return owners
def test_ensure_user_is_set(self): g_user_is_set = hasattr(g, "user") original_g_user = g.user if g_user_is_set else None if g_user_is_set: del g.user self.assertFalse(hasattr(g, "user")) ensure_user_is_set(1) self.assertTrue(hasattr(g, "user")) self.assertFalse(g.user.is_anonymous) self.assertEqual("1", g.user.get_id()) del g.user self.assertFalse(hasattr(g, "user")) ensure_user_is_set(None) self.assertTrue(hasattr(g, "user")) self.assertTrue(g.user.is_anonymous) self.assertEqual(None, g.user.get_id()) del g.user g.user = security_manager.get_user_by_id(2) self.assertEqual("2", g.user.get_id()) ensure_user_is_set(1) self.assertTrue(hasattr(g, "user")) self.assertFalse(g.user.is_anonymous) self.assertEqual("2", g.user.get_id()) ensure_user_is_set(None) self.assertTrue(hasattr(g, "user")) self.assertFalse(g.user.is_anonymous) self.assertEqual("2", g.user.get_id()) if g_user_is_set: g.user = original_g_user else: del g.user
def ensure_user_is_set(user_id: Optional[int]) -> None: user_is_set = hasattr(g, "user") and g.user is not None if not user_is_set and user_id is not None: g.user = security_manager.get_user_by_id(user_id)
def ensure_user_is_set(user_id: Optional[int]) -> None: user_is_not_set = not (hasattr(g, "user") and g.user is not None) if user_is_not_set and user_id is not None: g.user = security_manager.get_user_by_id(user_id) elif user_is_not_set: g.user = security_manager.get_anonymous_user()
def load_explore_json_into_cache( # pylint: disable=too-many-locals job_metadata: Dict[str, Any], form_data: Dict[str, Any], response_type: Optional[str] = None, force: bool = False, ) -> None: cache_key_prefix = "ejr-" # ejr: explore_json request user = (security_manager.get_user_by_id(job_metadata.get("user_id")) or security_manager.get_anonymous_user()) with override_user(user, force=False): try: set_form_data(form_data) datasource_id, datasource_type = get_datasource_info( None, None, form_data) # Perform a deep copy here so that below we can cache the original # value of the form_data object. This is necessary since the viz # objects modify the form_data object. If the modified version were # to be cached here, it will lead to a cache miss when clients # attempt to retrieve the value of the completed async query. original_form_data = copy.deepcopy(form_data) viz_obj = get_viz( datasource_type=cast(str, datasource_type), datasource_id=datasource_id, form_data=form_data, force=force, ) # run query & cache results payload = viz_obj.get_payload() if viz_obj.has_error(payload): raise SupersetVizException(errors=payload["errors"]) # Cache the original form_data value for async retrieval cache_value = { "form_data": original_form_data, "response_type": response_type, } cache_key = generate_cache_key(cache_value, cache_key_prefix) set_and_log_cache(cache_manager.cache, cache_key, cache_value) result_url = f"/superset/explore_json/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except SoftTimeLimitExceeded as ex: logger.warning( "A timeout occurred while loading explore json, error: %s", ex) raise ex except Exception as ex: if isinstance(ex, SupersetVizException): errors = ex.errors # pylint: disable=no-member else: error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member errors = [error] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise ex