def test_numpy_deserializer_from_npy_object_array_with_allow_pickle_false(): numpy_deserializer = NumpyDeserializer(allow_pickle=False) array = np.array([{"a": "", "b": ""}, {"c": "", "d": ""}]) stream = io.BytesIO() np.save(stream, array) stream.seek(0) with pytest.raises(ValueError): numpy_deserializer.deserialize(stream, "application/x-npy")
def __init__( self, endpoint_name, sagemaker_session=None, serializer=NumpySerializer(), deserializer=NumpyDeserializer(), ): """Initialize an ``SKLearnPredictor``. Args: endpoint_name (str): The name of the endpoint to perform inference on. sagemaker_session (sagemaker.session.Session): Session object which manages interactions with Amazon SageMaker APIs and any other AWS services needed. If not specified, the estimator creates one using the default AWS configuration chain. serializer (sagemaker.serializers.BaseSerializer): Optional. Default serializes input data to .npy format. Handles lists and numpy arrays. deserializer (sagemaker.deserializers.BaseDeserializer): Optional. Default parses the response from .npy format to numpy array. """ super(SKLearnPredictor, self).__init__( endpoint_name, sagemaker_session, serializer=serializer, deserializer=deserializer, )
def __init__(self, endpoint_name, sagemaker_session=None): """Initialize an ``PyTorchPredictor``. Args: endpoint_name (str): The name of the endpoint to perform inference on. sagemaker_session (sagemaker.session.Session): Session object which manages interactions with Amazon SageMaker APIs and any other AWS services needed. If not specified, the estimator creates one using the default AWS configuration chain. """ super(PyTorchPredictor, self).__init__( endpoint_name, sagemaker_session, NumpySerializer(), NumpyDeserializer() )
production_variants = endpoint_config["ProductionVariants"] return [d["ModelName"] for d in production_variants] @property def content_type(self): """The MIME type of the data sent to the inference endpoint.""" return self.serializer.CONTENT_TYPE @property def accept(self): """The content type(s) that are expected from the inference endpoint.""" return self.deserializer.ACCEPT @property def endpoint(self): """Deprecated attribute. Please use endpoint_name.""" renamed_warning("The endpoint attribute") return self.endpoint_name csv_serializer = deprecated_serialize(CSVSerializer(), "csv_serializer") json_serializer = deprecated_serialize(JSONSerializer(), "json_serializer") npy_serializer = deprecated_serialize(NumpySerializer(), "npy_serializer") csv_deserializer = deprecated_deserialize(CSVDeserializer(), "csv_deserializer") json_deserializer = deprecated_deserialize(JSONDeserializer(), "json_deserializer") numpy_deserializer = deprecated_deserialize(NumpyDeserializer(), "numpy_deserializer") RealTimePredictor = deprecated_class(Predictor, "RealTimePredictor")
def numpy_deserializer(): return NumpyDeserializer()
def test_numpy_deserializer_from_json_alpha(): numpy_deserializer = NumpyDeserializer(dtype="U5") stream = io.BytesIO(b'[["hello",2,3],\n[4,5,6]]') array = numpy_deserializer.deserialize(stream, "application/json") assert np.array_equal(array, np.array([["hello", 2, 3], [4, 5, 6]]))
def test_numpy_deserializer_from_csv_alpha(): numpy_deserializer = NumpyDeserializer(dtype="U5") stream = io.BytesIO(b"hello,2,3\n4,5,6") array = numpy_deserializer.deserialize(stream, "text/csv") assert np.array_equal(array, np.array([["hello", 2, 3], [4, 5, 6]]))
def attach_predictor(self): self.predictor = SagemakerPredictor(endpoint_name=self.endpoint_name, sagemaker_session=self.session, serializer=NumpySerializer(), deserializer=NumpyDeserializer())