def get_info(request: Request): from optimade.models import BaseInfoResource, BaseInfoAttributes parse_result = urllib.parse.urlparse(str(request.url)) base_url = get_base_url(parse_result) return InfoResponse( meta=meta_values(str(request.url), 1, 1, more_data_available=False), data=BaseInfoResource( id=BaseInfoResource.schema()["properties"]["id"]["const"], type=BaseInfoResource.schema()["properties"]["type"]["const"], attributes=BaseInfoAttributes( api_version=__api_version__, available_api_versions=[ { "url": f"{base_url}/v{__api_version__.split('.')[0]}", "version": __api_version__, } ], formats=["json"], available_endpoints=["info", "links"] + list(ENTRY_INFO_SCHEMAS.keys()), entry_types_by_format={"json": list(ENTRY_INFO_SCHEMAS.keys())}, is_index=False, ), ), )
def check_url(url: StarletteURL): """Check URL path for versioned part. Parameters: url: A complete urllib-parsed raw URL. Raises: VersionNotSupported: If the URL represents an OPTIMADE versioned base URL and the version part is not supported by the implementation. """ base_url = get_base_url(url) optimade_path = f"{url.scheme}://{url.netloc}{url.path}"[len(base_url ):] if re.match(r"^/v[0-9]+", optimade_path): for version_prefix in BASE_URL_PREFIXES.values(): if optimade_path.startswith(f"{version_prefix}/"): break else: version_prefix = re.findall(r"(/v[0-9]+(\.[0-9]+){0,2})", optimade_path) raise VersionNotSupported(detail=( f"The parsed versioned base URL {version_prefix[0][0]!r} from " f"{url} is not supported by this implementation. " f"Supported versioned base URLs are: {', '.join(BASE_URL_PREFIXES.values())}" ))
async def check_url(url: "URL"): """Check URL path for versioned part. Parameters: url: A complete `urllib`-parsed raw URL. Raises: VersionNotSupported: If the URL represents an OPTIMADE versioned base URL and the version part is not supported by the implementation. """ base_url = get_base_url(url) optimade_path = f"{url.scheme}://{url.netloc}{url.path}"[len(base_url) :] match = re.match( r"^/gateways/[^/\s]+(?P<version>/v[0-9]+(\.[0-9]+){0,2}).*", optimade_path ) if match is not None: if match.group("version") not in BASE_URL_PREFIXES.values(): raise VersionNotSupported( detail=( f"The parsed versioned base URL {match.group('version')!r} from " f"{url} is not supported by this implementation. " "Supported versioned base URLs are: " f"{', '.join(BASE_URL_PREFIXES.values())}" ) )
def get_info(request: Request): parse_result = urllib.parse.urlparse(str(request.url)) base_url = get_base_url(parse_result) return IndexInfoResponse( meta=meta_values(str(request.url), 1, 1, more_data_available=False), data=IndexInfoResource( id=IndexInfoResource.schema()["properties"]["id"]["const"], type=IndexInfoResource.schema()["properties"]["type"]["const"], attributes=IndexInfoAttributes( api_version=f"{__api_version__}", available_api_versions=[{ "url": f"{base_url}/v{__api_version__.split('.')[0]}/", "version": f"{__api_version__}", }], formats=["json"], available_endpoints=["info", "links"], entry_types_by_format={"json": []}, is_index=True, ), relationships={ "default": IndexRelationship( data={ "type": RelatedLinksResource.schema()["properties"]["type"] ["const"], "id": CONFIG.default_db, }) }, ), )
async def dispatch(self, request: Request, call_next): parsed_query = urllib.parse.parse_qs(request.url.query, keep_blank_values=True) if "api_hint" in parsed_query: if self.is_versioned_base_url(str(request.url)): warnings.warn( QueryParamNotUsed(detail=( "`api_hint` provided with value{:s} '{:s}' for a versioned base URL. " "In accordance with the specification, this will not be handled by " "the implementation.".format( "s" if len(parsed_query["api_hint"]) > 1 else "", "', '".join(parsed_query["api_hint"]), )))) else: from optimade.server.routers.utils import get_base_url version_path = self.handle_api_hint(parsed_query["api_hint"]) if version_path: base_url = get_base_url(request.url) new_request = ( f"{base_url}{version_path}{str(request.url)[len(base_url):]}" ) url = urllib.parse.urlsplit(new_request) parsed_query = urllib.parse.parse_qsl( url.query, keep_blank_values=True) parsed_query = "&".join([ f"{key}={value}" for key, value in parsed_query if key != "api_hint" ]) return RedirectResponse( request.url.replace(path=url.path, query=parsed_query), headers=request.headers, ) # This is the non-URL changing solution: # # scope = request.scope # scope["path"] = path # request = Request(scope=scope, receive=request.receive, send=request._send) response = await call_next(request) return response
def is_versioned_base_url(url: str) -> bool: """Determine whether a request is for a versioned base URL. First, simply check whether a `/vMAJOR(.MINOR.PATCH)` part exists in the URL. If not, return `False`, else, remove unversioned base URL from the URL and check again. Return `bool` of final result. Parameters: url: The full URL to check. Returns: Whether or not the full URL represents an OPTIMADE versioned base URL. """ if not re.findall(r"(/v[0-9]+(\.[0-9]+){0,2})", url): return False base_url = get_base_url(url) return bool( re.findall(r"(/v[0-9]+(\.[0-9]+){0,2})", url[len(base_url):]))
async def get_entries( collection: AsyncMongoCollection, response_cls: "EntryResponseMany", request: "Request", params: "EntryListingQueryParams", ) -> "EntryResponseMany": """Generalized `/{entries}` endpoint getter""" ( results, data_returned, more_data_available, fields, include_fields, ) = await collection.afind(params=params) if more_data_available: # Deduce the `next` link from the current request query = urllib.parse.parse_qs(request.url.query) query["page_offset"] = [int(query.get("page_offset", [0])[0]) + len(results)] # type: ignore[list-item, arg-type] urlencoded = urllib.parse.urlencode(query, doseq=True) base_url = get_base_url(request.url) links = ToplevelLinks(next=f"{base_url}{request.url.path}?{urlencoded}") else: links = ToplevelLinks(next=None) if fields or include_fields: results = handle_response_fields(results, fields, include_fields) return response_cls( links=links, data=results, meta=meta_values( url=request.url, data_returned=data_returned, data_available=await collection.acount(), more_data_available=more_data_available, ), )
def handle_pagination(request: Request, more_data_available: bool, nresults: int) -> dict: """Handle pagination for request with number of results equal nresults""" from optimade.server.routers.utils import get_base_url pagination = {} # "prev" parse_result = urllib.parse.urlparse(str(request.url)) base_url = get_base_url(parse_result) query = urllib.parse.parse_qs(parse_result.query) query["page_offset"] = int(query.get("page_offset", ["0"])[0]) - int( query.get("page_limit", [CONFIG.page_limit])[0]) urlencoded_prev = None if query["page_offset"] > 0: urlencoded_prev = urllib.parse.urlencode(query, doseq=True) elif query["page_offset"] == 0 or abs(query["page_offset"]) < int( query.get("page_limit", [CONFIG.page_limit])[0]): prev_query = query.copy() prev_query.pop("page_offset") urlencoded_prev = urllib.parse.urlencode(prev_query, doseq=True) if urlencoded_prev: pagination["prev"] = f"{base_url}{parse_result.path}" pagination["prev"] += f"?{urlencoded_prev}" # "next" if more_data_available: query["page_offset"] = ( int(query.get("page_offset", 0)) + nresults + int(query.get("page_limit", [CONFIG.page_limit])[0])) urlencoded_next = urllib.parse.urlencode(query, doseq=True) pagination["next"] = f"{base_url}{parse_result.path}" if urlencoded_next: pagination["next"] += f"?{urlencoded_next}" else: pagination["next"] = None return pagination