Exemple #1
0
def test_retrying_rpc_exception():

    first_call = [True]

    def predict_mock(request, timeout):
        inputs = tf.contrib.util.make_ndarray(request.inputs['images'])
        assert all([x == y for x, y in zip(inputs[0], [1, 2])])

        if (first_call[0]):
            first_call[0] = False
            raise grpc.RpcError()

        return_data = np.asarray([[11, 22]])
        return_tensor = tf.contrib.util.make_tensor_proto(
            return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = {"output_alias": return_tensor}
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    client = PredictionClient("localhost", 50051)
    client._get_grpc_stub = lambda: stub_mock

    result = client.score_numpy_array(np.asarray([[1, 2]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [11, 22])])
Exemple #2
0
def test_remote_featurizer_create_package_and_service():
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        service_principal_params=get_service_principal())

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-rf-model-" + id
    service_name = "int-test-rf-service-" + id

    service_def_path = "/tmp/modelrf"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    remote_service_name = ("int-test-featurizer-svc-" + str(uuid.uuid4()))[:30]

    model = RemoteQuantizedResNet50(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        os.path.expanduser("~/models"),
        remote_service_name,
        service_principal_params=get_service_principal())
    model.import_graph_def(include_featurizer=True, input_tensor=image_tensors)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    model_id = deployment_client.register_model(model_name, service_def_path)
    service = deployment_client.create_service(service_name, model_id)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(model_id)
Exemple #3
0
def worker(IP, path):
    count = 0
    tt = 0
    #path = '/home/srpitcha/images/dog.png'
    for i in range(tper):
        ts = time.time()
        count += 1
        client = PredictionClient(IP, 80, False, '')
        results = client.score_image(path)
        te = time.time()
        tt += te-ts
    mtime = tt/count
    meantime.append(mtime)
Exemple #4
0
def test_retrying_rpc_exception():

    first_call = [True]

    channel_mock_loaded = {'value': 0}
    channel_mock_closed = {'value': 0}

    def unary_unary(id, request_serializer, response_deserializer):
        result = mock.MagicMock()
        if id == '/tensorflow.serving.PredictionService/Predict':
            if (first_call[0]):
                first_call[0] = False
                return lambda req, timeout: (_ for _ in
                                             ()).throw(grpc.RpcError())

            return_data = np.asarray([[11, 22]])
            return_tensor = tf.contrib.util.make_tensor_proto(
                return_data, types_pb2.DT_FLOAT, return_data.shape)
            result.outputs = {"output_alias": return_tensor}
        return lambda req, timeout: result

    def load_channel_mock():
        channel_mock_loaded['value'] += 1
        return channel_mock

    def close_channel_mock():
        channel_mock_closed['value'] += 1

    now = datetime.now()

    channel_mock = mock.Mock()
    channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)
    channel_mock.close = close_channel_mock

    client = PredictionClient("localhost",
                              50051,
                              channel_shutdown_timeout=timedelta(minutes=1))
    client._channel_func = load_channel_mock
    client._get_datetime_now = lambda: now

    result = client.score_numpy_array(np.asarray([[1, 2]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [11, 22])])

    assert channel_mock_loaded['value'] == 2
    assert channel_mock_closed['value'] == 1
    def _import_featurizer_graph_def(self, input_map):
        service = self.__deployment_client.get_service_by_name(
            self.__service_name)
        if (service is None):
            model_name = self.model_name + "-" + self.version + "-model"
            temp_dir = tempfile.mkdtemp()
            model_path = os.path.join(temp_dir, "model")
            service_def = ServiceDefinition()
            service_def.pipeline.append(BrainWaveStage(self))
            service_def.save(model_path)
            model_id = self.__deployment_client.register_model(
                model_name, model_path)
            service = self.__deployment_client.create_service(
                self.__service_name, model_id)

        self.__client = PredictionClient(service.ipAddress, service.port)  #pylint: disable=E1101
        return input_map['InputImage'], tf.py_func(self._remote_service_call,
                                                   [input_map['InputImage']],
                                                   tf.float32)
def worker(IP, path, trainedDs):
    count = 0
    tt = 0
    datadir = os.path.expanduser(path)
    cat_files = glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
    dog_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
    pdb.set_trace()
    for image in cat_files[:5]:
        ts = time.time()
        count += 1
        client = PredictionClient(IP, 80, False, '')
        results = client.score_image(image)
        results = enumerate(results)
        sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
        for top in sorted_results[:5]:
            print(trainedDs[top[0]], 'confidence:', top[1])
        te = time.time()
        tt += te - ts
    mtime = tt / count
    meantime.append(mtime)
Exemple #7
0
def test_create_new_channel_after_timeout_expires():

    channel_mock_loaded = {'value': 0}

    def unary_unary(id, request_serializer, response_deserializer):
        result = mock.MagicMock()
        if id == '/tensorflow.serving.PredictionService/Predict':
            return_data = np.asarray([[1, 2, 3]])
            return_tensor = tf.contrib.util.make_tensor_proto(
                return_data, types_pb2.DT_FLOAT, return_data.shape)
            result.outputs = {"output_alias": return_tensor}
        return lambda req, timeout: result

    def load_channel_mock():
        channel_mock_loaded['value'] += 1
        return channel_mock

    now = datetime.now()

    channel_mock = mock.Mock()
    channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)

    image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
    image_file = open(image_file_path, "w")
    image_file.write("abc")
    image_file.close()

    client = PredictionClient("localhost",
                              50051,
                              channel_shutdown_timeout=timedelta(minutes=1))
    client._channel_func = load_channel_mock
    client._get_datetime_now = lambda: now

    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=50)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=20)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=70)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 2
Exemple #8
0
def worker(IP, path):
  datadir = os.path.expanduser(path)
  pass_files= glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
  fail_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
  dest_pass = os.path.join(datadir, 'output', 'pass')
  dest_fail = os.path.join(datadir, 'output', 'fail')
  print("Pass files")
  for image in pass_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    if (results >= 0.5):
      print("PASS", image, results)
      cp(image, dest_pass)
    else:
      print("FAIL", image, results)
      cp(image, dest_fail)
  print("Fail files")
  for image in fail_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    if (results < 0.5):
      cp(image, dest_fail)
    else:
      print(image, results)
      cp(image, dest_pass)
Exemple #9
0
def test_score_numpy_array():

    def predict_mock(request, timeout):
        inputs = tf.contrib.util.make_ndarray(request.inputs['images'])
        assert all([x == y for x, y in zip(inputs[0], [ 1, 2, 3 ])])
        assert all([x == y for x, y in zip(inputs[1], [ 4, 5, 6 ])])

        return_data = np.asarray([[ 11, 22, 33 ], [ 44, 55, 66 ]])
        return_tensor = tf.contrib.util.make_tensor_proto(return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = { "output_alias": return_tensor }
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    client = PredictionClient("localhost", 50051)
    client.stub = stub_mock

    result = client.score_numpy_array(np.asarray([[1, 2, 3], [4, 5, 6]], dtype='f'))
    assert all([x == y for x, y in zip(result[0], [ 11, 22, 33 ])])
    assert all([x == y for x, y in zip(result[1], [ 44, 55, 66 ])])
Exemple #10
0
def test_score_image():
    def predict_mock(request, timeout):
        inputs = request.inputs['images'].string_val
        assert inputs[0].decode('utf-8') == "abc"
        return_data = np.asarray([[1, 2, 3]])
        return_tensor = tf.contrib.util.make_tensor_proto(
            return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = {"output_alias": return_tensor}
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
    image_file = open(image_file_path, "w")
    image_file.write("abc")
    image_file.close()

    client = PredictionClient("localhost", 50051)
    client._get_grpc_stub = lambda: stub_mock

    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
class RemoteQuantizedResNet50(QuantizedResNet50):
    def __init__(self,
                 subscription_id,
                 resource_group,
                 model_management_account,
                 model_base_path,
                 remote_service_name=None):
        super().__init__(model_base_path)
        self.__deployment_client = DeploymentClient(subscription_id,
                                                    resource_group,
                                                    model_management_account)
        self.__service_name = remote_service_name if remote_service_name is not None else "featurizer-service-" + hashlib.md5(
            (self.model_name + "-" +
             self.version).encode("utf-8")).hexdigest()[:6]

    def _import_featurizer_graph_def(self, input_map):
        service = self.__deployment_client.get_service_by_name(
            self.__service_name)
        if (service is None):
            model_name = self.model_name + "-" + self.version + "-model"
            temp_dir = tempfile.mkdtemp()
            model_path = os.path.join(temp_dir, "model")
            service_def = ServiceDefinition()
            service_def.pipeline.append(BrainWaveStage(self))
            service_def.save(model_path)
            model_id = self.__deployment_client.register_model(
                model_name, model_path)
            service = self.__deployment_client.create_service(
                self.__service_name, model_id)

        self.__client = PredictionClient(service.ipAddress, service.port)  #pylint: disable=E1101
        return input_map['InputImage'], tf.py_func(self._remote_service_call,
                                                   [input_map['InputImage']],
                                                   tf.float32)

    def _remote_service_call(self, data):
        return self.__client.score_numpy_array(data)

    def cleanup_remote_service(self):
        service = self.__deployment_client.get_service_by_name(
            self.__service_name)
        if service is not None:
            print("Deleting service", service.id)
            self.__deployment_client.delete_service(service.id)
            print("Deleted service", service.id)
            print("Deleting model", service.modelId)
            self.__deployment_client.delete_model(service.modelId)
            print("Deleted model", service.modelId)
Exemple #12
0
def worker(IP, path):
  datadir = os.path.expanduser(path)
  pass_files= glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
  fail_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
  print("Pass files")
  for image in pass_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    print(image, results)
    if (results < 0.5):
      pdb.set_trace()
      print(image, results)
  print("Fail files")
  for image in fail_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    print(image, results)
    if (results >= 0.5):
      pdb.set_trace()
      print(image, results)
Exemple #13
0
def test_create_update_and_delete_service():
    override_token_funcs()
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'])
    cleanup_old_test_services(deployment_client)

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-model-" + id
    service_name = "int-test-service-" + id

    service_def_path = "/tmp/model"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    model = LocalQuantizedResNet50(os.path.expanduser("~/models"))
    model.import_graph_def(include_featurizer=False)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    first_model_id = deployment_client.register_model(model_name,
                                                      service_def_path)
    service = deployment_client.create_service(service_name, first_model_id)

    service_list = deployment_client.list_services()
    assert any(x.name == service_name for x in service_list)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    # update service, remove classifier
    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.save(service_def_path)

    second_model_id = deployment_client.register_model(model_name,
                                                       service_def_path)
    deployment_client.update_service(service.id, second_model_id)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    # wait for timeout of Azure LB
    time.sleep(4 * 60 + 10)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(first_model_id)
    deployment_client.delete_model(second_model_id)
Exemple #14
0
def test_create_client_raises_if_port_is_none():
    with pytest.raises(ValueError):
        PredictionClient("localhost", None)
Exemple #15
0
def test_create_client_raises_if_host_is_none():
    with pytest.raises(ValueError):
        PredictionClient(None, 50051)
Exemple #16
0
def test_create_client_with_auth():
    client = PredictionClient("localhost", 50051, True, "key1")
    assert client is not None
Exemple #17
0
def test_create_client():
    client = PredictionClient("localhost", 50051)
    assert client is not None
Exemple #18
0
import requests
from amlrealtimeai.client import PredictionClient
import argparse

parse = argparse.ArgumentParser(description='AML inferencing client')
parse.add_argument('IP', type=str, help='IP of the FPGA runtime node')
parse.add_argument('path',
                   type=str,
                   help='Path to image that need to inferenced')
arg = parse.parse_args()
IP = arg.IP
path = arg.path

client = PredictionClient(IP, 80, False, '')
results = client.score_image(path)
#print(results)
trained_ds = requests.get(
    "https://raw.githubusercontent.com/Lasagne/Recipes/master/examples/resnet50/imagenet_classes.txt"
).text.splitlines()
#for line in trained_ds:
#  print(line)

# map results [class_id] => [confidence]
results = enumerate(results)
#for line in results:
#  print(line)

# sort results by confidence
sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
#for line in sorted_results:
#  print(line)