def parse_token_response_dict(data, response=None): text = data.get("token") if not text: if "token" not in data: raise ParseError( "token dict does not contain 'token' key", response=response, ) raise ParseError( "token dict does not contain value for 'token' key", response=response, ) return text
def _check_for_errors(data, response): """Check data for keys known to indicate errors from this endpoint. """ # TODO: what regularity is there across endpoints in these? error_code = data.get("errorCode") error_message = data.get("errorMessage") if error_code or error_message: raise ParseError("JSON response body contained error {0}: {1}".format( error_code, error_message), response=response)
def parse_opensearch(self, response): if not response: raise ParseError("falsy response", response=response) if not response.body: raise ParseError("empty response body", response=response) if response.status != 200: raise ParseError("error response", response=response) content_types = response.headers[b"content-type"] content_type = content_types[0] if content_types else None if not content_type: raise ParseError( "no content-type in response headers", response=response, ) elif not content_type.startswith(b"application/atom+xml"): raise ParseError( "unexpected content-type {0!r}".format(content_type), response=response) return parse_opensearch_xml(response.body)
def parse_token_blob(blob, response=None): parts = blob.split(b".", 2) if len(parts) < 2: raise ParseError( "cannot parse token blob into parts", response=response, ) middle = parts[1] padding = (len(middle) % 4) * b"=" payload = base64.b64decode(middle + padding) # Reminder: JSON is required to be utf-8, we want to fail if it's not try: decoded = payload.decode("utf-8") data = json.loads(decoded) except (UnicodeDecodeError, ValueError, TypeError) as error: raise ParseError( "Could not parse decoded token blob as JSON", response=response, error=error, ) return data
def parse_metadata(self, response): data = parse_json(response, required_keys=[]) if not isinstance(data, list): raise ParseError( "unexpected type for JSON response body", response=response, ) table = {} for record in data: basemap = parse.dict_to_basemap(record) table[basemap.title.lower()] = basemap return table
def encode_token_text(text, response=None): # Why hardcode this encoding? # JSON is required to use utf-8. While the python JSON package uses # Text to represent this data, it should always encode as utf-8. # Moreover, token should be base64, utf-8 is more than enough there. # If it isn't, then we actually want a traceback, not another encoding. try: blob = text.encode("utf-8") except UnicodeEncodeError as error: raise ParseError( "token blob cannot be encoded as utf-8", error=error, response=response, ) return blob
def parse_opensearch_xml(data): doc = parseString(data) entries = doc.getElementsByTagName("entry") # I like comprehensions, but this will give better tracebacks results = [] for entry in entries: try: data = parse_opensearch_xml_entry(entry) # raise a predictable exception type wrapping the xml exception except ExpatError as error: raise ParseError( "error parsing opensearch XML", error=error, ) result = SearchResult(**data) results.append(result) return results
def parse_token_response(response): data = parse_json(response, ["token"]) if not data: raise ParseError("token response body JSON represents an empty object") return data