def load_explore_json_into_cache( # pylint: disable=too-many-locals job_metadata: Dict[str, Any], form_data: Dict[str, Any], response_type: Optional[str] = None, force: bool = False, ) -> None: cache_key_prefix = "ejr-" # ejr: explore_json request try: ensure_user_is_set(job_metadata.get("user_id")) datasource_id, datasource_type = get_datasource_info( None, None, form_data) # Perform a deep copy here so that below we can cache the original # value of the form_data object. This is necessary since the viz # objects modify the form_data object. If the modified version were # to be cached here, it will lead to a cache miss when clients # attempt to retrieve the value of the completed async query. original_form_data = copy.deepcopy(form_data) viz_obj = get_viz( datasource_type=cast(str, datasource_type), datasource_id=datasource_id, form_data=form_data, force=force, ) # run query & cache results payload = viz_obj.get_payload() if viz_obj.has_error(payload): raise SupersetVizException(errors=payload["errors"]) # Cache the original form_data value for async retrieval cache_value = { "form_data": original_form_data, "response_type": response_type, } cache_key = generate_cache_key(cache_value, cache_key_prefix) set_and_log_cache(cache_manager.cache, cache_key, cache_value) result_url = f"/superset/explore_json/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except SoftTimeLimitExceeded as ex: logger.warning( "A timeout occurred while loading explore json, error: %s", ex) raise ex except Exception as exc: if isinstance(exc, SupersetVizException): errors = exc.errors # pylint: disable=no-member else: error = ( exc.message if hasattr(exc, "message") else str(exc) # type: ignore # pylint: disable=no-member ) errors = [error] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise exc
def load_explore_json_into_cache( job_metadata: Dict[str, Any], form_data: Dict[str, Any], response_type: Optional[str] = None, force: bool = False, ) -> None: with app.app_context(): # type: ignore cache_key_prefix = "ejr-" # ejr: explore_json request try: ensure_user_is_set(job_metadata.get("user_id")) datasource_id, datasource_type = get_datasource_info( None, None, form_data) viz_obj = get_viz( datasource_type=cast(str, datasource_type), datasource_id=datasource_id, form_data=form_data, force=force, ) # run query & cache results payload = viz_obj.get_payload() if viz_obj.has_error(payload): raise SupersetVizException(errors=payload["errors"]) # cache form_data for async retrieval cache_value = { "form_data": form_data, "response_type": response_type } cache_key = generate_cache_key(cache_value, cache_key_prefix) set_and_log_cache(cache_manager.cache, cache_key, cache_value) result_url = f"/superset/explore_json/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except Exception as exc: if isinstance(exc, SupersetVizException): errors = exc.errors # pylint: disable=no-member else: error = ( exc.message if hasattr(exc, "message") else str(exc) # type: ignore # pylint: disable=no-member ) errors = [error] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise exc return None
def run(self) -> Optional[Dict[str, Any]]: initial_form_data = {} if self._permalink_key is not None: command = GetExplorePermalinkCommand(self._permalink_key) permalink_value = command.run() if not permalink_value: raise ExplorePermalinkGetFailedError() state = permalink_value["state"] initial_form_data = state["formData"] url_params = state.get("urlParams") if url_params: initial_form_data["url_params"] = dict(url_params) elif self._form_data_key: parameters = FormDataCommandParameters(key=self._form_data_key) value = GetFormDataCommand(parameters).run() initial_form_data = json.loads(value) if value else {} message = None if not initial_form_data: if self._slice_id: initial_form_data["slice_id"] = self._slice_id if self._form_data_key: message = _( "Form data not found in cache, reverting to chart metadata." ) elif self._dataset_id: initial_form_data[ "datasource" ] = f"{self._dataset_id}__{self._dataset_type}" if self._form_data_key: message = _( "Form data not found in cache, reverting to dataset metadata." ) form_data, slc = get_form_data( use_slice_data=True, initial_form_data=initial_form_data ) try: self._dataset_id, self._dataset_type = get_datasource_info( self._dataset_id, self._dataset_type, form_data ) except SupersetException: self._dataset_id = None # fallback unkonw datasource to table type self._dataset_type = SqlaTable.type dataset: Optional[BaseDatasource] = None if self._dataset_id is not None: try: dataset = DatasourceDAO.get_datasource( db.session, cast(str, self._dataset_type), self._dataset_id ) except DatasetNotFoundError: pass dataset_name = dataset.name if dataset else _("[Missing Dataset]") if dataset: if app.config["ENABLE_ACCESS_REQUEST"] and ( not security_manager.can_access_datasource(dataset) ): message = __(security_manager.get_datasource_access_error_msg(dataset)) raise DatasetAccessDeniedError( message=message, dataset_type=self._dataset_type, dataset_id=self._dataset_id, ) viz_type = form_data.get("viz_type") if not viz_type and dataset and dataset.default_endpoint: raise WrongEndpointError(redirect=dataset.default_endpoint) form_data["datasource"] = ( str(self._dataset_id) + "__" + cast(str, self._dataset_type) ) # On explore, merge legacy and extra filters into the form data utils.convert_legacy_filters_into_adhoc(form_data) utils.merge_extra_filters(form_data) dummy_dataset_data: Dict[str, Any] = { "type": self._dataset_type, "name": dataset_name, "columns": [], "metrics": [], "database": {"id": 0, "backend": ""}, } try: dataset_data = dataset.data if dataset else dummy_dataset_data except (SupersetException, SQLAlchemyError): dataset_data = dummy_dataset_data return { "dataset": sanitize_datasource_data(dataset_data), "form_data": form_data, "slice": slc.data if slc else None, "message": message, }