コード例 #1
0
def serialize_iso(attr):
    """Serialize Datetime object into ISO-8601 formatted string.

    :param Datetime attr: Object to be serialized.
    :rtype: str
    :raises: ValueError if format invalid.
    """
    if not attr:
        return None
    if isinstance(attr, str):
        attr = isodate.parse_datetime(attr)
    try:
        utc = attr.utctimetuple()
        if utc.tm_year > 9999 or utc.tm_year < 1:
            raise OverflowError("Hit max or min date")

        date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
            utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min,
            utc.tm_sec)
        return date + 'Z'
    except (ValueError, OverflowError) as err:
        msg = "Unable to serialize datetime object."
        raise_with_traceback(ValueError, msg, err)
    except AttributeError as err:
        msg = "ISO-8601 object must be valid Datetime object."
        raise_with_traceback(TypeError, msg, err)
コード例 #2
0
    def deserialize_from_text(cls, response, content_type=None):
        # type: (Type[ContentDecodePolicyType], PipelineResponse, Optional[str]) -> Any
        """Decode response data according to content-type.
        Accept a stream of data as well, but will be load at once in memory for now.
        If no content-type, will return the string version (not bytes, not stream)
        :param response: The HTTP response.
        :type response: ~azure.core.pipeline.transport.HttpResponse
        :param str content_type: The content type.
        """
        data = response.text()  # type: ignore
        if not data:
            return None

        if hasattr(data, 'read'):
            # Assume a stream
            data = cast(IO, data).read()

        if isinstance(data, bytes):
            data_as_str = data.decode(encoding='utf-8-sig')
        else:
            # Explain to mypy the correct type.
            data_as_str = cast(str, data)

        if content_type is None:
            return data

        if content_type in cls.JSON_MIMETYPES:
            try:
                return json.loads(data_as_str)
            except ValueError as err:
                raise DecodeError(message="JSON is invalid: {}".format(err),
                                  response=response,
                                  error=err)
        elif "xml" in (content_type or []):
            try:
                return ET.fromstring(data_as_str)
            except ET.ParseError:
                # It might be because the server has an issue, and returned JSON with
                # content-type XML....
                # So let's try a JSON load, and if it's still broken
                # let's flow the initial exception
                def _json_attemp(data):
                    try:
                        return True, json.loads(data)
                    except ValueError:
                        return False, None  # Don't care about this one

                success, json_result = _json_attemp(data)
                if success:
                    return json_result
                # If i'm here, it's not JSON, it's not XML, let's scream
                # and raise the last context in this block (the XML exception)
                # The function hack is because Py2.7 messes up with exception
                # context otherwise.
                _LOGGER.critical("Wasn't XML not JSON, failing")
                raise_with_traceback(DecodeError,
                                     message="XML is invalid",
                                     response=response)
        raise DecodeError(
            "Cannot deserialize content-type: {}".format(content_type))
コード例 #3
0
    def fetch(self, path):
        """Fetch and return the contents of a JSON file at a given filesystem path.

        :param str path: Path to JSON file (relative to the base_filepath of the Fetcher)

        :raises: ResourceNotFoundError if the JSON file cannot be found

        :returns: JSON data at the path
        :rtype: JSON object
        """
        _LOGGER.debug("Fetching %s from local filesystem", path)
        abs_path = os.path.join(self.base_filepath, path)
        abs_path = os.path.normpath(abs_path)

        # Fetch
        try:
            _LOGGER.debug("File open on %s", abs_path)
            with io.open(abs_path, encoding="utf-8-sig") as f:
                file_str = f.read()
        except (OSError, IOError):
            # In Python 3 a FileNotFoundError is raised when a file doesn't exist.
            # In Python 2 an IOError is raised when a file doesn't exist.
            # Both of these errors are inherited from OSError, so we use this to catch them both.
            # The semantics would ideally be better, but this is the price of supporting both.
            raise_with_traceback(ResourceNotFoundError,
                                 message="Could not open file")
        return json.loads(file_str)
コード例 #4
0
    def update_queue(self, queue_description, **kwargs):
        # type: (QueueDescription, Any) -> QueueDescription
        """Update a queue.

        :param queue_description: The properties of this `QueueDescription` will be applied to the queue in
         ServiceBus. Only a portion of properties can be updated.
         Refer to https://docs.microsoft.com/en-us/rest/api/servicebus/update-queue.
        :type queue_description: ~azure.servicebus.management.QueueDescription
        :rtype: ~azure.servicebus.management.QueueDescription
        """

        if not isinstance(queue_description, QueueDescription):
            raise TypeError("queue_description must be of type QueueDescription")

        to_update = copy(queue_description._to_internal_entity())  # pylint:disable=protected-access

        for attr in QUEUE_DESCRIPTION_SERIALIZE_ATTRIBUTES:
            setattr(to_update, attr, getattr(queue_description, attr, None))
        to_update.default_message_time_to_live = avoid_timedelta_overflow(to_update.default_message_time_to_live)
        to_update.auto_delete_on_idle = avoid_timedelta_overflow(to_update.auto_delete_on_idle)

        create_entity_body = CreateQueueBody(
            content=CreateQueueBodyContent(
                queue_description=to_update,
            )
        )
        request_body = create_entity_body.serialize(is_xml=True)
        with _handle_response_error():
            try:
                et = cast(
                    ElementTree,
                    self._impl.queue.put(
                        queue_description.queue_name,  # type: ignore
                        request_body,
                        api_version=constants.API_VERSION,
                        if_match="*",
                        **kwargs
                    )
                )
            except ValidationError:
                # post-hoc try to give a somewhat-justifiable failure reason.
                raise_with_traceback(
                    ValueError,
                    message="queue_description must be a QueueDescription with valid fields, "
                            "including non-empty string queue name")
        result = QueueDescription._from_internal_entity(  # pylint:disable=protected-access
            _convert_xml_to_object(queue_description.queue_name, et)
        )
        result.queue_name = queue_description.queue_name
        return result
コード例 #5
0
def _from_cncf_events(event):
    """This takes in a CNCF cloudevent and returns a dictionary.
    If cloud events library is not installed, the event is returned back.
    """
    try:
        from cloudevents.http import to_json
        return json.loads(to_json(event))
    except (AttributeError, ImportError):
        # means this is not a CNCF event
        return event
    except Exception as err:  # pylint: disable=broad-except
        msg = """Failed to serialize the event. Please ensure your
        CloudEvents is correctly formatted (https://pypi.org/project/cloudevents/)"""
        raise_with_traceback(ValueError, msg, err)
コード例 #6
0
    def _parse_result(self, response, config):  # pylint:disable=inconsistent-return-statements
        status_code = response.status_code
        headers = response.headers
        content = response.body().decode("utf-8")

        if status_code == 200:
            content_json = json.loads(content, encoding="utf-8")
            if config.response_hook:
                config.response_hook(headers, content_json)
            try:
                return _parse_recognition_units(content_json)
            except Exception as err:  # pylint:disable=broad-except
                msg = "Cannot parse response from server."
                raise_with_traceback(ServiceResponseError, msg, err)
        else:
            self._error_handler(status_code, content)
コード例 #7
0
    def create_queue(self, queue, **kwargs):
        # type: (Union[str, QueueDescription], Any) -> QueueDescription
        """Create a queue.

        :param queue: The queue name or a `QueueDescription` instance. When it's a str, it will be the name
         of the created queue. Other properties of the created queue will have default values decided by the
         ServiceBus. Use a `QueueDescription` if you want to set queue properties other than the queue name.
        :type queue: Union[str, QueueDescription]
        :rtype: ~azure.servicebus.management.QueueDescription
        """
        try:
            queue_name = queue.queue_name  # type: ignore
            to_create = queue._to_internal_entity()  # type: ignore  # pylint:disable=protected-access
        except AttributeError:
            queue_name = queue  # type: ignore
            to_create = InternalQueueDescription()  # Use an empty queue description.

        create_entity_body = CreateQueueBody(
            content=CreateQueueBodyContent(
                queue_description=to_create,  # type: ignore
            )
        )
        request_body = create_entity_body.serialize(is_xml=True)
        try:
            with _handle_response_error():
                et = cast(
                    ElementTree,
                    self._impl.queue.put(
                        queue_name,  # type: ignore
                        request_body, api_version=constants.API_VERSION, **kwargs)
                )
        except ValidationError:
            # post-hoc try to give a somewhat-justifiable failure reason.
            if isinstance(queue, (six.string_types, QueueDescription)):
                raise_with_traceback(
                    ValueError,
                    message="queue must be a non-empty str or a QueueDescription with non-empty str queue_name")
            raise_with_traceback(
                TypeError,
                message="queue must be a non-empty str or a QueueDescription with non-empty str queue_name")

        result = QueueDescription._from_internal_entity(  # pylint:disable=protected-access
                _convert_xml_to_object(queue_name, et)
        )
        result.queue_name = queue_name
        return result
コード例 #8
0
    def deserialize_from_text(
        cls,  # type: Type[ContentDecodePolicyType]
        data,  # type: Optional[Union[AnyStr, IO]]
        mime_type=None,  # Optional[str]
        response=None  # Optional[Union[HttpResponse, AsyncHttpResponse]]
    ):
        """Decode response data according to content-type.

        Accept a stream of data as well, but will be load at once in memory for now.
        If no content-type, will return the string version (not bytes, not stream)

        :param response: The HTTP response.
        :type response: ~azure.core.pipeline.transport.HttpResponse
        :param str mime_type: The mime type. As mime type, charset is not expected.
        :param response: If passed, exception will be annotated with that response
        :raises ~azure.core.exceptions.DecodeError: If deserialization fails
        :returns: A dict or XML tree, depending of the mime_type
        """
        if not data:
            return None

        if hasattr(data, 'read'):
            # Assume a stream
            data = cast(IO, data).read()

        if isinstance(data, bytes):
            data_as_str = data.decode(encoding='utf-8-sig')
        else:
            # Explain to mypy the correct type.
            data_as_str = cast(str, data)

        if mime_type is None:
            return data

        if cls.JSON_REGEXP.match(mime_type):
            try:
                return json.loads(data_as_str)
            except ValueError as err:
                raise DecodeError(message="JSON is invalid: {}".format(err),
                                  response=response,
                                  error=err)
        elif "xml" in (mime_type or []):
            try:
                try:
                    if isinstance(data, unicode):  # type: ignore
                        # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
                        data_as_str = data_as_str.encode(
                            encoding="utf-8")  # type: ignore
                except NameError:
                    pass
                return ET.fromstring(data_as_str)
            except ET.ParseError:
                # It might be because the server has an issue, and returned JSON with
                # content-type XML....
                # So let's try a JSON load, and if it's still broken
                # let's flow the initial exception
                def _json_attemp(data):
                    try:
                        return True, json.loads(data)
                    except ValueError:
                        return False, None  # Don't care about this one

                success, json_result = _json_attemp(data)
                if success:
                    return json_result
                # If i'm here, it's not JSON, it's not XML, let's scream
                # and raise the last context in this block (the XML exception)
                # The function hack is because Py2.7 messes up with exception
                # context otherwise.
                _LOGGER.critical("Wasn't XML not JSON, failing")
                raise_with_traceback(DecodeError,
                                     message="XML is invalid",
                                     response=response)
        raise DecodeError(
            "Cannot deserialize content-type: {}".format(mime_type))
コード例 #9
0
    def attest_open_enclave(self, report, **kwargs):
        # type: (bytes, **Any) -> Tuple[AttestationResult, AttestationToken]
        """Attests the validity of an Open Enclave report.

        :param bytes report: An open_enclave report generated from an Intel(tm)
            SGX enclave
        :keyword bytes inittime_data: Data presented at the time that the SGX
            enclave was initialized.
        :keyword bytes inittime_json: Data presented at the time that the SGX
            enclave was initialized, JSON encoded.
        :keyword bytes runtime_data: Data presented at the time that the open_enclave
            report was created.
        :keyword bytes runtime_json: Data presented at the time that the open_enclave
            report was created. JSON Encoded.
        :keyword str draft_policy: "draft" or "experimental" policy to be used with
            this attestation request. If this parameter is provided, then this
            policy document will be used for the attestation request.
            This allows a caller to test various policy documents against actual data
            before applying the policy document via the set_policy API.
        :keyword bool validate_token: If True, validate the token, otherwise return the token unvalidated.
        :keyword validation_callback: Function callback to allow clients to perform custom validation of the token.
            if the token is invalid, the `validation_callback` function should throw
            an exception.
        :paramtype validation_callback: ~typing.Callable[[AttestationToken, AttestationSigner], None]
        :keyword bool validate_signature: If True, validate the signature of the token being validated.
        :keyword bool validate_expiration: If True, validate the expiration time of the token being validated.
        :keyword str issuer: Expected issuer, used if `validate_issuer` is true.
        :keyword float validation_slack: Slack time for validation - tolerance applied
            to help account for clock drift between the issuer and the current machine.
        :keyword bool validate_issuer: If True, validate that the issuer of the token matches the expected issuer.
        :keyword bool validate_not_before_time: If true, validate the "Not Before" time in the token.

        :return: :class:`AttestationResult` containing the claims in the returned attestation token.

        :rtype: Tuple[~azure.security.attestation.AttestationResult, ~azure.security.attestation.AttestationToken]

        .. admonition:: Example: Simple OpenEnclave attestation.

            .. literalinclude:: ../samples/sample_attest_enclave.py
                :start-after: [START attest_open_enclave_shared]
                :end-before: [END attest_open_enclave_shared]
                :language: python
                :dedent: 8
                :caption: Attesting an open_enclave report for an SGX enclave.

        .. admonition:: Example: Simple OpenEnclave attestation with draft attestation policy.

            .. literalinclude:: ../samples/sample_attest_enclave.py
                :start-after: [START attest_open_enclave_shared_draft]
                :end-before: [END attest_open_enclave_shared_draft]
                :language: python
                :dedent: 8
                :caption: Attesting using a draft attestation policy.

        .. note::
            Note that if the `draft_policy` parameter is provided, the resulting
            attestation token will be an unsecured attestation token.

        For additional request configuration options, please see `Python Request
        Options <https://aka.ms/azsdk/python/options>`_.

        """

        inittime_json = kwargs.pop("inittime_json", None)  # type: bytes
        inittime_data = kwargs.pop("inittime_data", None)  # type: bytes
        runtime_json = kwargs.pop("runtime_json", None)  # type: bytes
        runtime_data = kwargs.pop("runtime_data", None)  # type: bytes

        if inittime_json and inittime_data:
            raise ValueError("Cannot provide both inittime_json and inittime_data.")
        if runtime_json and runtime_data:
            raise ValueError("Cannot provide both runtime_data and runtime_json.")

        # If the input was JSON, make sure that it's valid JSON before sending it
        # to the service.
        if inittime_json:
            try:
                json.loads(inittime_json)
            except json.JSONDecodeError:
                raise_with_traceback(ValueError, "Content must be valid JSON.")

        if runtime_json:
            try:
                json.loads(runtime_json)
            except json.JSONDecodeError:
                raise_with_traceback(ValueError, "Content must be valid JSON.")

        # Now create the RuntimeData object to be sent to the service.
        runtime = None
        if runtime_data:
            runtime = RuntimeData(data=runtime_data, data_type=DataType.BINARY)

        if runtime_json:
            runtime = RuntimeData(data=runtime_json, data_type=DataType.JSON)

        # And the InitTimeData object to be sent to the service.
        inittime = None
        if inittime_data:
            inittime = InitTimeData(data=inittime_data, data_type=DataType.BINARY)
        if inittime_json:
            inittime = InitTimeData(data=inittime_data, data_type=DataType.JSON)

        request = AttestOpenEnclaveRequest(
            report=report,
            init_time_data=inittime,
            runtime_data=runtime,
            draft_policy_for_attestation=kwargs.pop("draft_policy", None),
        )

        # Merge our existing config options with the options for this API call.
        # Note that this must be done before calling into the implementation
        # layer because the implementation layer doesn't like keyword args that
        # it doesn't expect :(.
        options = merge_validation_args(self._config._kwargs, kwargs)

        result = self._client.attestation.attest_open_enclave(request, **kwargs)
        token = AttestationToken(
            token=result.token, body_type=GeneratedAttestationResult
        )

        if options.get("validate_token", True):
            token._validate_token(self._get_signers(**kwargs), **options)
        return (AttestationResult._from_generated(token._get_body()), token)