def testRegress(self):
    """Test PredictionService.Regress implementation."""
    model_path = self._GetSavedModelBundlePath()

    atexit.register(self.TerminateProcs)
    model_server_address = self.RunServer(PickUnusedPort(), 'default',
                                          model_path)

    print 'Sending Regress request...'
    # Prepare request
    request = regression_pb2.RegressionRequest()
    request.model_spec.name = 'default'
    request.model_spec.signature_name = 'regress_x_to_y'

    example = request.input.example_list.examples.add()
    example.features.feature['x'].float_list.value.extend([2.0])

    # Send request
    channel = grpc.insecure_channel(model_server_address)
    stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
    result = stub.Regress(request, RPC_TIMEOUT)  # 5 secs timeout
    # Verify response
    self.assertEquals(1, len(result.result.regressions))
    expected_output = 3.0
    self.assertEquals(expected_output, result.result.regressions[0].value)
    self._VerifyModelSpec(result.model_spec, request.model_spec.name,
                          request.model_spec.signature_name,
                          self._GetModelVersion(model_path))
    def testRegress(self):
        """Test PredictionService.Regress implementation."""
        model_path = self._GetSavedModelBundlePath()
        use_saved_model = True

        atexit.register(self.TerminateProcs)
        model_server_address = self.RunServer(PickUnusedPort(), 'default',
                                              model_path, use_saved_model)
        time.sleep(5)

        print 'Sending Regress request...'
        # Prepare request
        request = regression_pb2.RegressionRequest()
        request.model_spec.name = 'default'
        request.model_spec.signature_name = 'regress_x_to_y'

        example = request.input.example_list.examples.add()
        example.features.feature['x'].float_list.value.extend([2.0])

        # Send request
        host, port = model_server_address.split(':')
        channel = implementations.insecure_channel(host, int(port))
        stub = prediction_service_pb2.beta_create_PredictionService_stub(
            channel)
        result = stub.Regress(request, 5.0)  # 5 secs timeout
        # Verify response
        self.assertEquals(1, len(result.result.regressions))
        expected_output = 3.0
        self.assertEquals(expected_output, result.result.regressions[0].value)
Ejemplo n.º 3
0
    def _TestRegress(self, model_path):
        """Test PredictionService.Regress implementation."""
        model_server_address = TensorflowModelServerTest.RunServer(
            'default', model_path)[1]

        print('Sending Regress request...')
        # Prepare request
        request = regression_pb2.RegressionRequest()
        request.model_spec.name = 'default'
        request.model_spec.signature_name = 'regress_x_to_y'

        example = request.input.example_list.examples.add()
        example.features.feature['x'].float_list.value.extend([2.0])

        # Send request
        channel = grpc.insecure_channel(model_server_address)
        stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
        result = stub.Regress(request, RPC_TIMEOUT)  # 5 secs timeout
        # Verify response
        self.assertEqual(1, len(result.result.regressions))
        expected_output = 3.0
        self.assertEqual(expected_output, result.result.regressions[0].value)
        self._VerifyModelSpec(result.model_spec, request.model_spec.name,
                              request.model_spec.signature_name,
                              self._GetModelVersion(model_path))
Ejemplo n.º 4
0
 def _BuildRegressionRequests(self, signature_name: str):
     for example in self.examples:
         request = regression_pb2.RegressionRequest()
         request.model_spec.name = self._model_name
         request.model_spec.signature_name = signature_name
         request.input.example_list.examples.append(example)
         yield request
Ejemplo n.º 5
0
def call_servo(examples, serving_bundle):
    """Send an RPC request to the Servomatic prediction service.

  Args:
    examples: A list of tf.train.Examples that matches the model spec.
    serving_bundle: A `ServingBundle` object that contains the information to
      make the serving request.

  Returns:
    A ClassificationResponse or RegressionResponse proto.
  """
    parsed_url = urlparse('http://' + serving_bundle.inference_address)
    channel = implementations.insecure_channel(parsed_url.hostname,
                                               parsed_url.port)
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    if serving_bundle.model_type == 'classification':
        request = classification_pb2.ClassificationRequest()
    else:
        request = regression_pb2.RegressionRequest()
    request.model_spec.name = serving_bundle.model_name
    if serving_bundle.model_version is not None:
        request.model_spec.version.value = serving_bundle.model_version
    if serving_bundle.signature is not None:
        request.model_spec.signature_name = serving_bundle.signature
    request.input.example_list.examples.extend(examples)

    if serving_bundle.model_type == 'classification':
        return stub.Classify(request, 30.0)  # 30 secs timeout
    else:
        return stub.Regress(request, 30.0)  # 30 secs timeout
    def parse_request(self, serialized_data):
        request_fn_map = {
            PREDICT: lambda: predict_pb2.PredictRequest(),
            INFERENCE: lambda: inference_pb2.MultiInferenceRequest(),
            CLASSIFY: lambda: classification_pb2.ClassificationRequest(),
            REGRESSION: lambda: regression_pb2.RegressionRequest()
        }

        request = request_fn_map[self.prediction_type]()
        request.ParseFromString(serialized_data)

        return request
Ejemplo n.º 7
0
    def testIssueRequests_NoErrorIfSucceeded(self):
        # Prepare requests and client.
        r1 = classification_pb2.ClassificationRequest()
        r2 = classification_pb2.ClassificationRequest()
        r3 = regression_pb2.RegressionRequest()
        client = TensorFlowServingClient('localhost:1234', 'a_model_name')

        # Call.
        client.IssueRequests([r1, r2, r3])

        # Check calls
        self.prediction_stub.Classify.assert_called_with(r1)
        self.prediction_stub.Classify.assert_called_with(r2)
        self.prediction_stub.Regress.assert_called_with(r3)
Ejemplo n.º 8
0
    def _ExampleToRegressionRequest(
            self,
            example: tf.train.Example) -> regression_pb2.RegressionRequest:
        """Convert single Example to RegressionRequest.

    Args:
      example: `Example` instance to convert.

    Returns:
      A converted `RegressionRequest` instance.
    """
        request = regression_pb2.RegressionRequest()
        request.model_spec.name = self._model_name
        request.model_spec.signature_name = self._signature_name
        request.input.example_list.examples.append(example)
        return request
Ejemplo n.º 9
0
def call_servo(examples, serving_bundle):
  """Send an RPC request to the Servomatic prediction service.

  Args:
    examples: A list of examples that matches the model spec.
    serving_bundle: A `ServingBundle` object that contains the information to
      make the serving request.

  Returns:
    A ClassificationResponse or RegressionResponse proto.
  """
  parsed_url = urlparse('http://' + serving_bundle.inference_address)
  channel = implementations.insecure_channel(parsed_url.hostname,
                                             parsed_url.port)
  stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

  if serving_bundle.use_predict:
    request = predict_pb2.PredictRequest()
  elif serving_bundle.model_type == 'classification':
    request = classification_pb2.ClassificationRequest()
  else:
    request = regression_pb2.RegressionRequest()
  request.model_spec.name = serving_bundle.model_name
  if serving_bundle.model_version is not None:
    request.model_spec.version.value = serving_bundle.model_version
  if serving_bundle.signature is not None:
    request.model_spec.signature_name = serving_bundle.signature

  if serving_bundle.use_predict:
    # tf.compat.v1 API used here to convert tf.example into proto. This
    # utility file is bundled in the witwidget pip package which has a dep
    # on TensorFlow.
    request.inputs[serving_bundle.predict_input_tensor].CopyFrom(
      tf.compat.v1.make_tensor_proto(
        values=[ex.SerializeToString() for ex in examples],
        dtype=types_pb2.DT_STRING))
  else:
    request.input.example_list.examples.extend(examples)

  if serving_bundle.use_predict:
    return common_utils.convert_predict_response(
      stub.Predict(request, 30.0), serving_bundle) # 30 secs timeout
  elif serving_bundle.model_type == 'classification':
    return stub.Classify(request, 30.0)  # 30 secs timeout
  else:
    return stub.Regress(request, 30.0)  # 30 secs timeout
Ejemplo n.º 10
0
def main(_):
    host, port = FLAGS.server.split(':')
    channel = implementations.insecure_channel(host, int(port))
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    request = regression_pb2.RegressionRequest()
    request.model_spec.name = 'lr_1'
    request.model_spec.version.value = 1503585217
    example = request.input.example_list.examples.add()

    example.features.feature['AGE'].float_list.value.extend([1.2])
    example.features.feature['B'].float_list.value.extend([1.2])
    example.features.feature['CHAS'].float_list.value.extend([1.2])
    example.features.feature['CRIM'].float_list.value.extend([1.2])
    example.features.feature['DIS'].float_list.value.extend([1.2])

    result = stub.Regress(request, 10.0)  # 10 secs timeout
    print(result)
Ejemplo n.º 11
0
print(' # Preparing data ')
features_data=[34.49726772511229, 12.65565114916675, 39.57766801952616, 4.0826206329529615]
model_input = tf.train.Example(
    features=tf.train.Features(
        feature={
            'inputs': tf.train.Feature(
                float_list=tf.train.FloatList(value=features_data)
            )
        }
    )
)

print(' # Preparing predictor ')
endpoint='SageMakerEndpoint'

predictor=RealTimePredictor(
    endpoint=endpoint,
    deserializer=tf_deserializer,
    serializer=tf_serializer,
    content_type='application/octet-stream')

print(' # Preparing request ')
request=regression_pb2.RegressionRequest()
request.model_spec.name='generic_model'
request.model_spec.signature_name=DEFAULT_SERVING_SIGNATURE_DEF_KEY
request.input.example_list.examples.extend([model_input])

print(' # Invoking SageMaker API ')
result=predictor.predict(request)

print(result)
Ejemplo n.º 12
0
def call_servo(examples, serving_bundle):
    """Send an RPC request to the Servomatic prediction service.

  Args:
    examples: A list of examples that matches the model spec.
    serving_bundle: A `ServingBundle` object that contains the information to
      make the serving request.

  Returns:
    A ClassificationResponse or RegressionResponse proto.
  """
    parsed_url = urlparse('http://' + serving_bundle.inference_address)
    channel = implementations.insecure_channel(parsed_url.hostname,
                                               parsed_url.port)
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    if serving_bundle.use_predict:
        request = predict_pb2.PredictRequest()
    elif serving_bundle.model_type == 'classification':
        request = classification_pb2.ClassificationRequest()
    else:
        request = regression_pb2.RegressionRequest()
    request.model_spec.name = serving_bundle.model_name
    if serving_bundle.model_version is not None:
        request.model_spec.version.value = serving_bundle.model_version
    if serving_bundle.signature is not None:
        request.model_spec.signature_name = serving_bundle.signature

    if serving_bundle.use_predict:
        # tf.compat.v1 API used here to convert tf.example into proto. This
        # utility file is bundled in the witwidget pip package which has a dep
        # on TensorFlow.
        predict_input_tensor_list = serving_bundle.predict_input_tensor.split(
            ",")
        if len(examples) > 0:
            for key in predict_input_tensor_list:
                dtype = types_pb2.DT_STRING
                for value in examples[0].features.feature.items():
                    if key == str(value[0]):
                        if "int64" in str(value[1]):
                            dtype = types_pb2.DT_INT64
                        elif "float" in str(value[1]):
                            dtype = types_pb2.DT_FLOAT
                        break
                values = []
                for ex in examples:
                    if dtype == types_pb2.DT_FLOAT:
                        values += ex.features.feature[key].float_list.value
                    elif dtype == types_pb2.DT_INT64:
                        values += ex.features.feature[key].int64_list.value
                request.inputs[key].CopyFrom(
                    tf.compat.v1.make_tensor_proto(values=values, dtype=dtype))
        else:
            print("examples length is %d" % len(examples))
    else:
        request.input.example_list.examples.extend(examples)

    if serving_bundle.use_predict:
        return common_utils.convert_predict_response(
            stub.Predict(request, 30.0), serving_bundle)  # 30 secs timeout
    elif serving_bundle.model_type == 'classification':
        return stub.Classify(request, 30.0)  # 30 secs timeout
    else:
        return stub.Regress(request, 30.0)  # 30 secs timeout