def deserialize_from_text(cls, response, content_type=None): # type: (Type[ContentDecodePolicyType], PipelineResponse, Optional[str]) -> Any """Decode response data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param response: The HTTP response. :type response: ~azure.core.pipeline.transport.HttpResponse :param str content_type: The content type. """ data = response.text() # type: ignore if not data: return None if hasattr(data, 'read'): # Assume a stream data = cast(IO, data).read() if isinstance(data, bytes): data_as_str = data.decode(encoding='utf-8-sig') else: # Explain to mypy the correct type. data_as_str = cast(str, data) if content_type is None: return data if content_type in cls.JSON_MIMETYPES: try: return json.loads(data_as_str) except ValueError as err: raise DecodeError(message="JSON is invalid: {}".format(err), response=response, error=err) elif "xml" in (content_type or []): try: return ET.fromstring(data_as_str) except ET.ParseError: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken # let's flow the initial exception def _json_attemp(data): try: return True, json.loads(data) except ValueError: return False, None # Don't care about this one success, json_result = _json_attemp(data) if success: return json_result # If i'm here, it's not JSON, it's not XML, let's scream # and raise the last context in this block (the XML exception) # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise_with_traceback(DecodeError, message="XML is invalid", response=response) raise DecodeError( "Cannot deserialize content-type: {}".format(content_type))
def mock_outputs(pipeline_response): response = pipeline_response.http_response try: body = json.loads(response.text()) except ValueError: raise DecodeError("Impossible to deserialize") body = { TestArmPolling.convert.sub(r'\1_\2', k).lower(): v for k, v in body.items() } properties = body.setdefault('properties', {}) if 'name' in body: properties['name'] = body['name'] if properties: properties = { TestArmPolling.convert.sub(r'\1_\2', k).lower(): v for k, v in properties.items() } del body['properties'] body.update(properties) resource = SimpleResource(**body) else: raise DecodeError("Impossible to deserialize") resource = SimpleResource(**body) return resource
def __next__(self): internal_response = self.response.internal_response try: chunk = next(self.iter_content_func) if not chunk: raise StopIteration() return chunk except StopIteration: internal_response.close() raise StopIteration() except requests.exceptions.StreamConsumedError: raise except requests.exceptions.ContentDecodingError as err: raise DecodeError(err, error=err) except requests.exceptions.ChunkedEncodingError as err: msg = err.__str__() if 'IncompleteRead' in msg: _LOGGER.warning("Incomplete download: %s", err) internal_response.close() raise IncompleteReadError(err, error=err) _LOGGER.warning("Unable to stream download: %s", err) internal_response.close() raise HttpResponseError(err, error=err) except Exception as err: _LOGGER.warning("Unable to stream download: %s", err) internal_response.close() raise
def decode(self, content, response): try: return b64decode(content.encode('utf-8')) except (ValueError, TypeError) as error: # ValueError for Python 3, TypeError for Python 2 raise DecodeError(message="Message content is not valid base 64.", response=response, error=error)
def _as_json(response): # type: (azure.core.pipeline.transport.HttpResponse) -> None """Assuming this is not empty, return the content as JSON. Result/exceptions is not determined if you call this method without testing _is_empty. :raises: DecodeError if response body contains invalid json data. """ # Assume ClientResponse has "body", and otherwise it's a requests.Response content = response.text() if hasattr(response, "body") else response.text try: return json.loads(content) except ValueError: raise DecodeError( "Error occurred in deserializing the response body.")
def _is_empty(response): # type: (azure.core.pipeline.transport.HttpResponse) -> None """Check if response body contains meaningful content. :rtype: bool :raises: DecodeError if response body contains invalid json data. """ # Assume ClientResponse has "body", and otherwise it's a requests.Response content = response.text() if hasattr(response, "body") else response.text if not content: return True try: return not json.loads(content) except ValueError: raise DecodeError( "Error occurred in deserializing the response body.")
def _read_raw_stream(response, chunk_size=1): # Special case for urllib3. if hasattr(response.raw, 'stream'): try: for chunk in response.raw.stream(chunk_size, decode_content=False): yield chunk except ProtocolError as e: raise ServiceResponseError(e, error=e) except CoreDecodeError as e: raise DecodeError(e, error=e) except ReadTimeoutError as e: raise ServiceRequestError(e, error=e) else: # Standard file-like object. while True: chunk = response.raw.read(chunk_size) if not chunk: break yield chunk # following behavior from requests iter_content, we set content consumed to True # https://github.com/psf/requests/blob/master/requests/models.py#L774 response._content_consumed = True # pylint: disable=protected-access
def deserialize_from_text( cls, # type: Type[ContentDecodePolicyType] data, # type: Optional[Union[AnyStr, IO]] mime_type=None, # Optional[str] response=None # Optional[Union[HttpResponse, AsyncHttpResponse]] ): """Decode response data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param response: The HTTP response. :type response: ~azure.core.pipeline.transport.HttpResponse :param str mime_type: The mime type. As mime type, charset is not expected. :param response: If passed, exception will be annotated with that response :raises ~azure.core.exceptions.DecodeError: If deserialization fails :returns: A dict or XML tree, depending of the mime_type """ if not data: return None if hasattr(data, 'read'): # Assume a stream data = cast(IO, data).read() if isinstance(data, bytes): data_as_str = data.decode(encoding='utf-8-sig') else: # Explain to mypy the correct type. data_as_str = cast(str, data) if mime_type is None: return data if cls.JSON_REGEXP.match(mime_type): try: return json.loads(data_as_str) except ValueError as err: raise DecodeError(message="JSON is invalid: {}".format(err), response=response, error=err) elif "xml" in (mime_type or []): try: try: if isinstance(data, unicode): # type: ignore # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string data_as_str = data_as_str.encode( encoding="utf-8") # type: ignore except NameError: pass return ET.fromstring(data_as_str) except ET.ParseError: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken # let's flow the initial exception def _json_attemp(data): try: return True, json.loads(data) except ValueError: return False, None # Don't care about this one success, json_result = _json_attemp(data) if success: return json_result # If i'm here, it's not JSON, it's not XML, let's scream # and raise the last context in this block (the XML exception) # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise_with_traceback(DecodeError, message="XML is invalid", response=response) raise DecodeError( "Cannot deserialize content-type: {}".format(mime_type))
def _Request(global_endpoint_manager, request_params, connection_policy, pipeline_client, request, **kwargs): """Makes one http request using the requests module. :param _GlobalEndpointManager global_endpoint_manager: :param dict request_params: contains the resourceType, operationType, endpointOverride, useWriteEndpoint, useAlternateWriteEndpoint information :param documents.ConnectionPolicy connection_policy: :param azure.core.PipelineClient pipeline_client: Pipeline client to process the resquest :param azure.core.HttpRequest request: The request object to send through the pipeline :return: tuple of (result, headers) :rtype: tuple of (dict, dict) """ # pylint: disable=protected-access is_media = request.url.find("media") > -1 is_media_stream = is_media and connection_policy.MediaReadMode == documents.MediaReadMode.Streamed connection_timeout = connection_policy.MediaRequestTimeout if is_media else connection_policy.RequestTimeout connection_timeout = kwargs.pop("connection_timeout", connection_timeout / 1000.0) # Every request tries to perform a refresh global_endpoint_manager.refresh_endpoint_list(None) if request_params.endpoint_override: base_url = request_params.endpoint_override else: base_url = global_endpoint_manager.resolve_service_endpoint(request_params) if base_url != pipeline_client._base_url: request.url = request.url.replace(pipeline_client._base_url, base_url) parse_result = urlparse(request.url) # The requests library now expects header values to be strings only starting 2.11, # and will raise an error on validation if they are not, so casting all header values to strings. request.headers.update({header: str(value) for header, value in request.headers.items()}) # We are disabling the SSL verification for local emulator(localhost/127.0.0.1) or if the user # has explicitly specified to disable SSL verification. is_ssl_enabled = ( parse_result.hostname != "localhost" and parse_result.hostname != "127.0.0.1" and not connection_policy.DisableSSLVerification ) if connection_policy.SSLConfiguration or "connection_cert" in kwargs: ca_certs = connection_policy.SSLConfiguration.SSLCaCerts cert_files = (connection_policy.SSLConfiguration.SSLCertFile, connection_policy.SSLConfiguration.SSLKeyFile) response = pipeline_client._pipeline.run( request, stream=is_media_stream, connection_timeout=connection_timeout, connection_verify=kwargs.pop("connection_verify", ca_certs), connection_cert=kwargs.pop("connection_cert", cert_files), **kwargs ) else: response = pipeline_client._pipeline.run( request, stream=is_media_stream, connection_timeout=connection_timeout, # If SSL is disabled, verify = false connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), **kwargs ) response = response.http_response headers = dict(response.headers) # In case of media stream response, return the response to the user and the user # will need to handle reading the response. if is_media_stream: return (response.stream_download(pipeline_client._pipeline), headers) data = response.body() if not six.PY2: # python 3 compatible: convert data from byte to unicode string data = data.decode("utf-8") if response.status_code == 404: raise errors.CosmosResourceNotFoundError(message=data, response=response) if response.status_code == 409: raise errors.CosmosResourceExistsError(message=data, response=response) if response.status_code == 412: raise errors.CosmosAccessConditionFailedError(message=data, response=response) if response.status_code >= 400: raise errors.CosmosHttpResponseError(message=data, response=response) result = None if is_media: result = data else: if data: try: result = json.loads(data) except Exception as e: raise DecodeError( message="Failed to decode JSON data: {}".format(e), response=response, error=e) return (result, headers)