Exemple #1
0
def worker(IP, path):
  datadir = os.path.expanduser(path)
  pass_files= glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
  fail_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
  dest_pass = os.path.join(datadir, 'output', 'pass')
  dest_fail = os.path.join(datadir, 'output', 'fail')
  print("Pass files")
  for image in pass_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    if (results >= 0.5):
      print("PASS", image, results)
      cp(image, dest_pass)
    else:
      print("FAIL", image, results)
      cp(image, dest_fail)
  print("Fail files")
  for image in fail_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    if (results < 0.5):
      cp(image, dest_fail)
    else:
      print(image, results)
      cp(image, dest_pass)
Exemple #2
0
def test_create_new_channel_after_timeout_expires():

    channel_mock_loaded = {'value': 0}

    def unary_unary(id, request_serializer, response_deserializer):
        result = mock.MagicMock()
        if id == '/tensorflow.serving.PredictionService/Predict':
            return_data = np.asarray([[1, 2, 3]])
            return_tensor = tf.contrib.util.make_tensor_proto(
                return_data, types_pb2.DT_FLOAT, return_data.shape)
            result.outputs = {"output_alias": return_tensor}
        return lambda req, timeout: result

    def load_channel_mock():
        channel_mock_loaded['value'] += 1
        return channel_mock

    now = datetime.now()

    channel_mock = mock.Mock()
    channel_mock.unary_unary = mock.MagicMock(side_effect=unary_unary)

    image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
    image_file = open(image_file_path, "w")
    image_file.write("abc")
    image_file.close()

    client = PredictionClient("localhost",
                              50051,
                              channel_shutdown_timeout=timedelta(minutes=1))
    client._channel_func = load_channel_mock
    client._get_datetime_now = lambda: now

    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=50)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=20)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 1

    now = now + timedelta(seconds=70)
    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
    assert channel_mock_loaded['value'] == 2
Exemple #3
0
def worker(IP, path):
  datadir = os.path.expanduser(path)
  pass_files= glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
  fail_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
  print("Pass files")
  for image in pass_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    print(image, results)
    if (results < 0.5):
      pdb.set_trace()
      print(image, results)
  print("Fail files")
  for image in fail_files:
    client = PredictionClient(IP, 80, False, '')
    results = client.score_image(image)
    print(image, results)
    if (results >= 0.5):
      pdb.set_trace()
      print(image, results)
Exemple #4
0
def test_remote_featurizer_create_package_and_service():
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        service_principal_params=get_service_principal())

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-rf-model-" + id
    service_name = "int-test-rf-service-" + id

    service_def_path = "/tmp/modelrf"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    remote_service_name = ("int-test-featurizer-svc-" + str(uuid.uuid4()))[:30]

    model = RemoteQuantizedResNet50(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        os.path.expanduser("~/models"),
        remote_service_name,
        service_principal_params=get_service_principal())
    model.import_graph_def(include_featurizer=True, input_tensor=image_tensors)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    model_id = deployment_client.register_model(model_name, service_def_path)
    service = deployment_client.create_service(service_name, model_id)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(model_id)
Exemple #5
0
def worker(IP, path):
    count = 0
    tt = 0
    #path = '/home/srpitcha/images/dog.png'
    for i in range(tper):
        ts = time.time()
        count += 1
        client = PredictionClient(IP, 80, False, '')
        results = client.score_image(path)
        te = time.time()
        tt += te-ts
    mtime = tt/count
    meantime.append(mtime)
def worker(IP, path, trainedDs):
    count = 0
    tt = 0
    datadir = os.path.expanduser(path)
    cat_files = glob.glob(os.path.join(datadir, 'pass', '*.jpg'))
    dog_files = glob.glob(os.path.join(datadir, 'fail', '*.jpg'))
    pdb.set_trace()
    for image in cat_files[:5]:
        ts = time.time()
        count += 1
        client = PredictionClient(IP, 80, False, '')
        results = client.score_image(image)
        results = enumerate(results)
        sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
        for top in sorted_results[:5]:
            print(trainedDs[top[0]], 'confidence:', top[1])
        te = time.time()
        tt += te - ts
    mtime = tt / count
    meantime.append(mtime)
Exemple #7
0
def test_score_image():
    def predict_mock(request, timeout):
        inputs = request.inputs['images'].string_val
        assert inputs[0].decode('utf-8') == "abc"
        return_data = np.asarray([[1, 2, 3]])
        return_tensor = tf.contrib.util.make_tensor_proto(
            return_data, types_pb2.DT_FLOAT, return_data.shape)
        result = mock.MagicMock()
        result.outputs = {"output_alias": return_tensor}
        return result

    stub_mock = mock.Mock()
    stub_mock.Predict = mock.MagicMock(side_effect=predict_mock)

    image_file_path = os.path.join(tempfile.mkdtemp(), "img.png")
    image_file = open(image_file_path, "w")
    image_file.write("abc")
    image_file.close()

    client = PredictionClient("localhost", 50051)
    client._get_grpc_stub = lambda: stub_mock

    result = client.score_image(image_file_path)
    assert all([x == y for x, y in zip(result, [1, 2, 3])])
Exemple #8
0
def test_create_update_and_delete_service():
    override_token_funcs()
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'])
    cleanup_old_test_services(deployment_client)

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-model-" + id
    service_name = "int-test-service-" + id

    service_def_path = "/tmp/model"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    model = LocalQuantizedResNet50(os.path.expanduser("~/models"))
    model.import_graph_def(include_featurizer=False)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    first_model_id = deployment_client.register_model(model_name,
                                                      service_def_path)
    service = deployment_client.create_service(service_name, first_model_id)

    service_list = deployment_client.list_services()
    assert any(x.name == service_name for x in service_list)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    # update service, remove classifier
    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.save(service_def_path)

    second_model_id = deployment_client.register_model(model_name,
                                                       service_def_path)
    deployment_client.update_service(service.id, second_model_id)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    # wait for timeout of Azure LB
    time.sleep(4 * 60 + 10)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(first_model_id)
    deployment_client.delete_model(second_model_id)
Exemple #9
0
import requests
from amlrealtimeai.client import PredictionClient
import argparse

parse = argparse.ArgumentParser(description='AML inferencing client')
parse.add_argument('IP', type=str, help='IP of the FPGA runtime node')
parse.add_argument('path',
                   type=str,
                   help='Path to image that need to inferenced')
arg = parse.parse_args()
IP = arg.IP
path = arg.path

client = PredictionClient(IP, 80, False, '')
results = client.score_image(path)
#print(results)
trained_ds = requests.get(
    "https://raw.githubusercontent.com/Lasagne/Recipes/master/examples/resnet50/imagenet_classes.txt"
).text.splitlines()
#for line in trained_ds:
#  print(line)

# map results [class_id] => [confidence]
results = enumerate(results)
#for line in results:
#  print(line)

# sort results by confidence
sorted_results = sorted(results, key=lambda x: x[1], reverse=True)
#for line in sorted_results:
#  print(line)