def wait_query(query_execution_id: str, boto3_session: Optional[boto3.Session] = None) -> Dict[str, Any]: """Wait for the query end. Parameters ---------- query_execution_id : str Athena query execution ID. boto3_session : boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. Returns ------- Dict[str, Any] Dictionary with the get_query_execution response. Examples -------- >>> import awswrangler as wr >>> res = wr.athena.wait_query(query_execution_id='query-execution-id') """ session: boto3.Session = _utils.ensure_session(session=boto3_session) response: Dict[str, Any] = get_query_execution(query_execution_id=query_execution_id, boto3_session=session) state: str = response["Status"]["State"] while state not in _QUERY_FINAL_STATES: time.sleep(_QUERY_WAIT_POLLING_DELAY) response = get_query_execution(query_execution_id=query_execution_id, boto3_session=session) state = response["Status"]["State"] _logger.debug("state: %s", state) _logger.debug("StateChangeReason: %s", response["Status"].get("StateChangeReason")) if state == "FAILED": raise exceptions.QueryFailed(response["Status"].get("StateChangeReason")) if state == "CANCELLED": raise exceptions.QueryCancelled(response["Status"].get("StateChangeReason")) return response
def wait_query( query_id: str, boto3_session: Optional[boto3.Session] = None) -> Dict[str, Any]: """Wait for the query to end. Parameters ---------- query_id : str Lake Formation query execution ID. boto3_session : boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session received None. Returns ------- Dict[str, Any] Dictionary with the get_query_state response. Examples -------- >>> import awswrangler as wr >>> res = wr.lakeformation.wait_query(query_id='query-id') """ session: boto3.Session = _utils.ensure_session(session=boto3_session) client_lakeformation: boto3.client = _utils.client( service_name="lakeformation", session=session) response: Dict[str, Any] = client_lakeformation.get_query_state( QueryId=query_id) state: str = response["State"] while state not in _QUERY_FINAL_STATES: time.sleep(_QUERY_WAIT_POLLING_DELAY) response = client_lakeformation.get_query_state(QueryId=query_id) state = response["State"] _logger.debug("state: %s", state) if state == "ERROR": raise exceptions.QueryFailed(response.get("Error")) return response
def _get_query_metadata( # pylint: disable=too-many-statements query_execution_id: str, boto3_session: boto3.Session, categories: Optional[List[str]] = None, query_execution_payload: Optional[Dict[str, Any]] = None, metadata_cache_manager: Optional[_LocalMetadataCacheManager] = None, ) -> _QueryMetadata: """Get query metadata.""" if (query_execution_payload is not None) and (query_execution_payload["Status"]["State"] in _QUERY_FINAL_STATES): if query_execution_payload["Status"]["State"] != "SUCCEEDED": reason: str = query_execution_payload["Status"][ "StateChangeReason"] raise exceptions.QueryFailed(f"Query error: {reason}") _query_execution_payload: Dict[str, Any] = query_execution_payload else: _query_execution_payload = wait_query( query_execution_id=query_execution_id, boto3_session=boto3_session) cols_types: Dict[str, str] = get_query_columns_types( query_execution_id=query_execution_id, boto3_session=boto3_session) _logger.debug("cols_types: %s", cols_types) dtype: Dict[str, str] = {} parse_timestamps: List[str] = [] parse_dates: List[str] = [] converters: Dict[str, Any] = {} binaries: List[str] = [] col_name: str col_type: str for col_name, col_type in cols_types.items(): if col_type == "array": raise exceptions.UnsupportedType( "List data type is not support with ctas_approach=False. " "Please use ctas_approach=True for List columns.") if col_type == "row": raise exceptions.UnsupportedType( "Struct data type is not support with ctas_approach=False. " "Please use ctas_approach=True for Struct columns.") pandas_type: str = _data_types.athena2pandas(dtype=col_type) if (categories is not None) and (col_name in categories): dtype[col_name] = "category" elif pandas_type in ["datetime64", "date"]: parse_timestamps.append(col_name) if pandas_type == "date": parse_dates.append(col_name) elif pandas_type == "bytes": dtype[col_name] = "string" binaries.append(col_name) elif pandas_type == "decimal": converters[col_name] = lambda x: Decimal(str(x)) if str(x) not in ( "", "none", " ", "<NA>") else None else: dtype[col_name] = pandas_type output_location: Optional[str] = None if "ResultConfiguration" in _query_execution_payload: output_location = _query_execution_payload["ResultConfiguration"].get( "OutputLocation") athena_statistics: Dict[str, Union[int, str]] = _query_execution_payload.get( "Statistics", {}) manifest_location: Optional[str] = str( athena_statistics.get("DataManifestLocation")) if metadata_cache_manager is not None and query_execution_id not in metadata_cache_manager: metadata_cache_manager.update_cache(items=[_query_execution_payload]) query_metadata: _QueryMetadata = _QueryMetadata( execution_id=query_execution_id, dtype=dtype, parse_timestamps=parse_timestamps, parse_dates=parse_dates, converters=converters, binaries=binaries, output_location=output_location, manifest_location=manifest_location, raw_payload=_query_execution_payload, ) _logger.debug("query_metadata:\n%s", query_metadata) return query_metadata