示例#1
0
def test_retrying_rpc_exception():

    first_call = [True]

    def predict_mock(request, timeout):
        inputs = tf.contrib.util.make_ndarray(request.inputs['images'])
        assert all([x == y for x, y in zip(inputs[0], [1, 2])])

        if (first_call[0]):
            first_call[0] = False
            raise grpc.RpcError()

        return_data = np.asarray([[11, 22]])
        return_tensor = tf.contrib.util.make_tensor_proto(
            return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = {"output_alias": return_tensor}
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    client = PredictionClient("localhost", 50051)
    client._get_grpc_stub = lambda: stub_mock

    result = client.score_numpy_array(np.asarray([[1, 2]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [11, 22])])
示例#2
0
class RemoteQuantizedResNet50(QuantizedResNet50):
    def __init__(self,
                 subscription_id,
                 resource_group,
                 model_management_account,
                 model_base_path,
                 remote_service_name=None):
        super().__init__(model_base_path)
        self.__deployment_client = DeploymentClient(subscription_id,
                                                    resource_group,
                                                    model_management_account)
        self.__service_name = remote_service_name if remote_service_name is not None else "featurizer-service-" + hashlib.md5(
            (self.model_name + "-" +
             self.version).encode("utf-8")).hexdigest()[:6]

    def _import_featurizer_graph_def(self, input_map):
        service = self.__deployment_client.get_service_by_name(
            self.__service_name)
        if (service is None):
            model_name = self.model_name + "-" + self.version + "-model"
            temp_dir = tempfile.mkdtemp()
            model_path = os.path.join(temp_dir, "model")
            service_def = ServiceDefinition()
            service_def.pipeline.append(BrainWaveStage(self))
            service_def.save(model_path)
            model_id = self.__deployment_client.register_model(
                model_name, model_path)
            service = self.__deployment_client.create_service(
                self.__service_name, model_id)

        self.__client = PredictionClient(service.ipAddress, service.port)  #pylint: disable=E1101
        return input_map['InputImage'], tf.py_func(self._remote_service_call,
                                                   [input_map['InputImage']],
                                                   tf.float32)

    def _remote_service_call(self, data):
        return self.__client.score_numpy_array(data)

    def cleanup_remote_service(self):
        service = self.__deployment_client.get_service_by_name(
            self.__service_name)
        if service is not None:
            print("Deleting service", service.id)
            self.__deployment_client.delete_service(service.id)
            print("Deleted service", service.id)
            print("Deleting model", service.modelId)
            self.__deployment_client.delete_model(service.modelId)
            print("Deleted model", service.modelId)
示例#3
0
def test_retrying_rpc_exception():

    first_call = [True]

    channel_mock_loaded = {'value': 0}
    channel_mock_closed = {'value': 0}

    def unary_unary(id, request_serializer, response_deserializer):
        result = mock.MagicMock()
        if id == '/tensorflow.serving.PredictionService/Predict':
            if (first_call[0]):
                first_call[0] = False
                return lambda req, timeout: (_ for _ in
                                             ()).throw(grpc.RpcError())

            return_data = np.asarray([[11, 22]])
            return_tensor = tf.contrib.util.make_tensor_proto(
                return_data, types_pb2.DT_FLOAT, return_data.shape)
            result.outputs = {"output_alias": return_tensor}
        return lambda req, timeout: result

    def load_channel_mock():
        channel_mock_loaded['value'] += 1
        return channel_mock

    def close_channel_mock():
        channel_mock_closed['value'] += 1

    now = datetime.now()

    channel_mock = mock.Mock()
    channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)
    channel_mock.close = close_channel_mock

    client = PredictionClient("localhost",
                              50051,
                              channel_shutdown_timeout=timedelta(minutes=1))
    client._channel_func = load_channel_mock
    client._get_datetime_now = lambda: now

    result = client.score_numpy_array(np.asarray([[1, 2]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [11, 22])])

    assert channel_mock_loaded['value'] == 2
    assert channel_mock_closed['value'] == 1
示例#4
0
def test_score_numpy_array():

    def predict_mock(request, timeout):
        inputs = tf.contrib.util.make_ndarray(request.inputs['images'])
        assert all([x == y for x, y in zip(inputs[0], [ 1, 2, 3 ])])
        assert all([x == y for x, y in zip(inputs[1], [ 4, 5, 6 ])])

        return_data = np.asarray([[ 11, 22, 33 ], [ 44, 55, 66 ]])
        return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = { "output_alias": return_tensor }
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    client = PredictionClient("localhost", 50051)
    client.stub = stub_mock

    result = client.score_numpy_array(np.asarray([[1, 2, 3], [4, 5, 6]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [ 11, 22, 33 ])])
    assert all([x == y for x, y in zip(result[1], [ 44, 55, 66 ])])