Пример #1
0
    def create_model(self,
                     role=None,
                     predictor_cls=None,
                     serializer=IdentitySerializer(),
                     deserializer=BytesDeserializer(),
                     vpc_config_override=vpc_utils.VPC_CONFIG_DEFAULT,
                     **kwargs):
        """Create a model to deploy.

        The serializer and deserializer are only used to define a default
        Predictor. They are ignored if an explicit predictor class is passed in.
        Other arguments are passed through to the Model class.

        Args:
            role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``,
                which is also used during transform jobs. If not specified, the
                role from the Estimator will be used.
            predictor_cls (Predictor): The predictor class to use when
                deploying the model.
            serializer (:class:`~sagemaker.serializers.BaseSerializer`): A
                serializer object, used to encode data for an inference endpoint
                (default: :class:`~sagemaker.serializers.IdentitySerializer`).
            deserializer (:class:`~sagemaker.deserializers.BaseDeserializer`): A
                deserializer object, used to decode data from an inference
                endpoint (default: :class:`~sagemaker.deserializers.BytesDeserializer`).
            vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on
                the model. Default: use subnets and security groups from this Estimator.
                * 'Subnets' (list[str]): List of subnet ids.
                * 'SecurityGroupIds' (list[str]): List of security group ids.
            **kwargs: Additional arguments for creating a :class:`~sagemaker.model.ModelPackage`.

        .. tip::

            You can find additional parameters for using this method at
            :class:`~sagemaker.model.ModelPackage` and
            :class:`~sagemaker.model.Model`.

        Returns:
            a Model ready for deployment.
        """
        removed_kwargs("content_type", kwargs)
        removed_kwargs("accept", kwargs)

        if predictor_cls is None:

            def predict_wrapper(endpoint, session):
                return Predictor(endpoint, session, serializer, deserializer)

            predictor_cls = predict_wrapper

        role = role or self.role

        return sagemaker.ModelPackage(
            role,
            algorithm_arn=self.algorithm_arn,
            model_data=self.model_data,
            vpc_config=self.get_vpc_config(vpc_config_override),
            sagemaker_session=self.sagemaker_session,
            predictor_cls=predictor_cls,
            **kwargs)
Пример #2
0
    def __init__(
            self,
            endpoint_name,
            sagemaker_session=None,
            serializer=IdentitySerializer(),
            deserializer=BytesDeserializer(),
    ):
        """Initialize a ``Predictor``.

        Behavior for serialization of input data and deserialization of
        result data can be configured through initializer arguments. If not
        specified, a sequence of bytes is expected and the API sends it in the
        request body without modifications. In response, the API returns the
        sequence of bytes from the prediction result without any modifications.

        Args:
            endpoint_name (str): Name of the Amazon SageMaker endpoint to which
                requests are sent.
            sagemaker_session (sagemaker.session.Session): A SageMaker Session
                object, used for SageMaker interactions (default: None). If not
                specified, one is created using the default AWS configuration
                chain.
            serializer (:class:`~sagemaker.serializers.BaseSerializer`): A
                serializer object, used to encode data for an inference endpoint
                (default: :class:`~sagemaker.serializers.IdentitySerializer`).
            deserializer (:class:`~sagemaker.deserializers.BaseDeserializer`): A
                deserializer object, used to decode data from an inference
                endpoint (default: :class:`~sagemaker.deserializers.BytesDeserializer`).
        """
        self.endpoint_name = endpoint_name
        self.sagemaker_session = sagemaker_session or Session()
        self.serializer = serializer
        self.deserializer = deserializer
        self._endpoint_config_name = self._get_endpoint_config_name()
        self._model_names = self._get_model_names()
Пример #3
0
def test_predict_jsonlines(tfs_predictor):
    input_data = "[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]"
    expected_result = {"predictions": [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}

    predictor = sagemaker.Predictor(
        tfs_predictor.endpoint_name,
        tfs_predictor.sagemaker_session,
        serializer=IdentitySerializer(content_type="application/jsonlines"),
        deserializer=JSONDeserializer(),
    )

    result = predictor.predict(input_data)
    assert expected_result == result
Пример #4
0
def test_predictor_jsons(sagemaker_session):
    predictor = TensorFlowPredictor(
        "endpoint",
        sagemaker_session,
        serializer=IdentitySerializer(content_type="application/jsons"),
    )

    mock_response(json.dumps(PREDICT_RESPONSE).encode("utf-8"), sagemaker_session)
    result = predictor.predict("[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]")

    assert_invoked(
        sagemaker_session,
        EndpointName="endpoint",
        ContentType="application/jsons",
        Accept=JSON_CONTENT_TYPE,
        Body="[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]",
    )

    assert PREDICT_RESPONSE == result
def test_identity_serializer_with_custom_content_type():
    identity_serializer = IdentitySerializer(content_type="text/csv")
    assert identity_serializer.serialize(b"a,b\n1,2") == b"a,b\n1,2"
    assert identity_serializer.CONTENT_TYPE == "text/csv"
def test_identity_serializer():
    identity_serializer = IdentitySerializer()
    assert identity_serializer.serialize(b"{}") == b"{}"