def load_chart_data_into_cache( job_metadata: Dict[str, Any], form_data: Dict[str, Any], ) -> None: # pylint: disable=import-outside-toplevel from superset.charts.commands.data import ChartDataCommand try: ensure_user_is_set(job_metadata.get("user_id")) set_form_data(form_data) command = ChartDataCommand() command.set_query_context(form_data) result = command.run(cache=True) cache_key = result["cache_key"] result_url = f"/api/v1/chart/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except SoftTimeLimitExceeded as ex: logger.warning( "A timeout occurred while loading chart data, error: %s", ex) raise ex except Exception as ex: # TODO: QueryContext should support SIP-40 style errors error = ex.message if hasattr(ex, "message") else str(ex) # type: ignore # pylint: disable=no-member errors = [{"message": error}] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise ex
def load_chart_data_into_cache( job_metadata: Dict[str, Any], form_data: Dict[str, Any], ) -> None: from superset.charts.commands.data import ( ChartDataCommand, ) # load here due to circular imports with app.app_context(): # type: ignore try: ensure_user_is_set(job_metadata.get("user_id")) command = ChartDataCommand() command.set_query_context(form_data) result = command.run(cache=True) cache_key = result["cache_key"] result_url = f"/api/v1/chart/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except Exception as exc: # TODO: QueryContext should support SIP-40 style errors error = exc.message if hasattr(exc, "message") else str(exc) # type: ignore # pylint: disable=no-member errors = [{"message": error}] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise exc return None
def load_explore_json_into_cache( # pylint: disable=too-many-locals job_metadata: Dict[str, Any], form_data: Dict[str, Any], response_type: Optional[str] = None, force: bool = False, ) -> None: cache_key_prefix = "ejr-" # ejr: explore_json request try: ensure_user_is_set(job_metadata.get("user_id")) datasource_id, datasource_type = get_datasource_info( None, None, form_data) # Perform a deep copy here so that below we can cache the original # value of the form_data object. This is necessary since the viz # objects modify the form_data object. If the modified version were # to be cached here, it will lead to a cache miss when clients # attempt to retrieve the value of the completed async query. original_form_data = copy.deepcopy(form_data) viz_obj = get_viz( datasource_type=cast(str, datasource_type), datasource_id=datasource_id, form_data=form_data, force=force, ) # run query & cache results payload = viz_obj.get_payload() if viz_obj.has_error(payload): raise SupersetVizException(errors=payload["errors"]) # Cache the original form_data value for async retrieval cache_value = { "form_data": original_form_data, "response_type": response_type, } cache_key = generate_cache_key(cache_value, cache_key_prefix) set_and_log_cache(cache_manager.cache, cache_key, cache_value) result_url = f"/superset/explore_json/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except SoftTimeLimitExceeded as ex: logger.warning( "A timeout occurred while loading explore json, error: %s", ex) raise ex except Exception as exc: if isinstance(exc, SupersetVizException): errors = exc.errors # pylint: disable=no-member else: error = ( exc.message if hasattr(exc, "message") else str(exc) # type: ignore # pylint: disable=no-member ) errors = [error] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise exc
def load_explore_json_into_cache( job_metadata: Dict[str, Any], form_data: Dict[str, Any], response_type: Optional[str] = None, force: bool = False, ) -> None: with app.app_context(): # type: ignore cache_key_prefix = "ejr-" # ejr: explore_json request try: ensure_user_is_set(job_metadata.get("user_id")) datasource_id, datasource_type = get_datasource_info( None, None, form_data) viz_obj = get_viz( datasource_type=cast(str, datasource_type), datasource_id=datasource_id, form_data=form_data, force=force, ) # run query & cache results payload = viz_obj.get_payload() if viz_obj.has_error(payload): raise SupersetVizException(errors=payload["errors"]) # cache form_data for async retrieval cache_value = { "form_data": form_data, "response_type": response_type } cache_key = generate_cache_key(cache_value, cache_key_prefix) set_and_log_cache(cache_manager.cache, cache_key, cache_value) result_url = f"/superset/explore_json/data/{cache_key}" async_query_manager.update_job( job_metadata, async_query_manager.STATUS_DONE, result_url=result_url, ) except Exception as exc: if isinstance(exc, SupersetVizException): errors = exc.errors # pylint: disable=no-member else: error = ( exc.message if hasattr(exc, "message") else str(exc) # type: ignore # pylint: disable=no-member ) errors = [error] async_query_manager.update_job(job_metadata, async_query_manager.STATUS_ERROR, errors=errors) raise exc return None