async def featured_result_query(self, request: ONSRequest): """ Executes the ONS featured result query using the given SearchEngine class :param request: :return: """ engine: AbstractSearchEngine = self.get_search_engine_instance() # Perform the query search_term = request.get_search_term() try: engine: AbstractSearchEngine = engine.featured_result_query( search_term) logger.debug(request.request_id, "Executing featured result query", extra={"query": engine.to_dict()}) response: ONSResponse = await engine.execute() except ConnectionError as e: message = "Unable to connect to Elasticsearch cluster to perform featured result query request" logger.error(request.request_id, message, exc_info=e) raise ServerError(message) except RequestSizeExceededException as e: # Log and raise a 400 BAD_REQUEST message = "Requested page size exceeds max allowed: '{0}'".format( e) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) search_result: SearchResult = response.to_featured_result_query_search_result( ) return search_result
async def departments_query(self, request: ONSRequest) -> SearchResult: """ Executes the ONS departments query using the given SearchEngine class :param request: :return: """ # Initialise the search engine engine: AbstractSearchEngine = self.get_search_engine_instance() # Perform the query search_term = request.get_search_term() page = request.get_current_page() page_size = request.get_page_size() try: engine: AbstractSearchEngine = engine.departments_query( search_term, page, page_size) logger.debug(request.request_id, "Executing departments query", extra={"query": engine.to_dict()}) response: ONSResponse = await engine.execute() except ConnectionError as e: message = "Unable to connect to Elasticsearch cluster to perform departments query request" logger.error(request.request_id, message, exc_info=e) raise ServerError(message) search_result: SearchResult = response.to_departments_query_search_result( page, page_size) return search_result
async def content_query(self, request: ONSRequest, list_type: ListType) -> SearchResult: """ Executes the ONS content query using the given SearchEngine class :param request: :param list_type: :return: """ # Initialise the search engine engine: AbstractSearchEngine = self.get_search_engine_instance() # Perform the query search_term = request.get_search_term() page = request.get_current_page() page_size = request.get_page_size() sort_by: SortField = request.get_sort_by() type_filters: List[TypeFilter] = request.get_type_filters(list_type) # Build filter functions filter_functions: List[AvailableContentTypes] = [] for type_filter in type_filters: filter_functions.extend(type_filter.get_content_types()) try: engine: AbstractSearchEngine = engine.content_query( search_term, page, page_size, sort_by=sort_by, filter_functions=filter_functions, type_filters=type_filters) logger.debug(request.request_id, "Executing content query", extra={"query": engine.to_dict()}) response: ONSResponse = await engine.execute() except ConnectionError as e: message = "Unable to connect to Elasticsearch cluster to perform content query request" logger.error(request.request_id, message, exc_info=e) raise ServerError(message) except RequestSizeExceededException as e: # Log and raise a 400 BAD_REQUEST message = "Requested page size exceeds max allowed: '{0}'".format( e) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) search_result: SearchResult = response.to_content_query_search_result( page, page_size, sort_by) return search_result
def timeout(request: ONSRequest, exception: sanic.exceptions.SanicException): if request is None: # Request has timed out and as such was awaited by the server (no longer exists) sanic_logger.debug( "RequestTimeout from error_handler for null request.", exc_info=exception) else: # Cover any future API changes which could see the request preserved during timeout (also log out # request context) logger.debug(request.request_id, "RequestTimeout from error_handler.", exc_info=exception) return json({"message": "RequestTimeout from error_handler."}, exception.status_code)
def json(request: ONSRequest, body, status=200, headers: dict = None, dumps=json_dumps, **kwargs): """ Returns response object with body in json format. :param request: The inbound request object. :param body: Response data to be serialized. :param status: Response code. :param headers: Custom Headers. :param kwargs: Remaining arguments that are passed to the json encoder. """ content_type = "application/json" # Build headers if headers is None: headers = {} if ONSRequest.request_id_header not in headers: headers[ONSRequest.request_id_header] = request.request_id logger.debug(request.request_id, "returning response [status={status}]".format(status=status), extra={ "response": { "body": body, "status": status, "headers": headers } }) return HTTPResponse(dumps(body, **kwargs), headers=headers, status=status, content_type=content_type)
async def proxy(self, request: ONSRequest) -> SearchResult: """ Proxy an Elasticsearch query over HTTP :param request: :return: """ # Initialise the search engine engine: AbstractSearchEngine = self.get_search_engine_instance() # Parse the request body for a valid Elasticsearch query body: dict = request.get_elasticsearch_query() # Parse query and filters query: dict = loads(body.get("query")) type_filters_raw = body.get("filter") # Update the search engine with the query JSON engine.update_from_dict(query) # Extract paginator params page = request.get_current_page() page_size = request.get_page_size() sort_by = request.get_sort_by() try: engine: AbstractSearchEngine = engine.paginate(page, page_size) except RequestSizeExceededException as e: # Log and raise a 400 BAD_REQUEST message = "Requested page size exceeds max allowed: '{0}'".format( e) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) # Add any type filters if type_filters_raw is not None: if not isinstance(type_filters_raw, list): type_filters_raw = [type_filters_raw] try: type_filters = AvailableTypeFilters.from_string_list( type_filters_raw) engine: AbstractSearchEngine = engine.type_filter(type_filters) except UnknownTypeFilter as e: message = "Received unknown type filter: '{0}'".format( e.unknown_type_filter) logger.error(request.request_id, message, exc_info=e) raise InvalidUsage(message) # Execute try: logger.debug(request.request_id, "Executing proxy query", extra={"query": engine.to_dict()}) response: ONSResponse = await engine.execute() except ConnectionError as e: message = "Unable to connect to Elasticsearch cluster to perform proxy query request" logger.error(request.request_id, message, e) raise ServerError(message) search_result: SearchResult = response.to_content_query_search_result( page, page_size, sort_by) return search_result