def testIssueRequests_RaiseValueErrorOnUnrecognizedRequestType(self): # Prepare requests and client. not_a_request = 'i am a request' client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') # Call with self.assertRaises(error_types.ValidationFailed): client.SendRequests([not_a_request])
def testIssueRequests_RaiseRpcErrorIfRpcFailed(self): # Prepare client and a side effect. request = classification_pb2.ClassificationRequest() client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') self.prediction_stub.Classify.side_effect = grpc.RpcError # Call. with self.assertRaises(error_types.ValidationFailed): client.SendRequests([request])
def testGetModelState_ReturnsNotReady_IfServerUnavailable(self): # Prepare stub and client. self.model_stub.GetModelStatus.side_effect = grpc.RpcError client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') # Call. result = client._GetServingStatus() # Check result. self.assertEqual(result, types.ModelServingStatus.NOT_READY)
def testGetModelState_ReturnsNotReady_IfEmptyState(self): # Prepare stub and client. self.model_stub.GetModelStatus.return_value = _make_response({ 'model_version_status': [] # Empty }) client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') # Calls result = client._GetServingStatus() # Check result. self.assertEqual(result, types.ModelServingStatus.NOT_READY)
def testIssueRequests_NoErrorIfSucceeded(self): # Prepare requests and client. r1 = classification_pb2.ClassificationRequest() r2 = classification_pb2.ClassificationRequest() r3 = regression_pb2.RegressionRequest() client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') # Call. client.SendRequests([r1, r2, r3]) # Check calls self.prediction_stub.Classify.assert_called_with(r1) self.prediction_stub.Classify.assert_called_with(r2) self.prediction_stub.Regress.assert_called_with(r3)
def testGetModelState_ReturnsUnavailable_IfAnyStateEnded(self): # Prepare stub and client. self.model_stub.GetModelStatus.return_value = _make_response({ 'model_version_status': [ {'state': 'AVAILABLE'}, {'state': 'AVAILABLE'}, {'state': 'END'} ] }) client = tensorflow_serving_client.TensorFlowServingClient( 'localhost:1234', 'a_model_name') # Call. result = client._GetServingStatus() # Check result. self.assertEqual(result, types.ModelServingStatus.UNAVAILABLE)
def MakeClient(self, endpoint: Text) -> base_client.BaseModelServerClient: return tensorflow_serving_client.TensorFlowServingClient( endpoint=endpoint, model_name=self._model_name)