예제 #1
0
def test_remote_featurizer_create_package_and_service():
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        service_principal_params=get_service_principal())

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-rf-model-" + id
    service_name = "int-test-rf-service-" + id

    service_def_path = "/tmp/modelrf"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    remote_service_name = ("int-test-featurizer-svc-" + str(uuid.uuid4()))[:30]

    model = RemoteQuantizedResNet50(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        os.path.expanduser("~/models"),
        remote_service_name,
        service_principal_params=get_service_principal())
    model.import_graph_def(include_featurizer=True, input_tensor=image_tensors)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    model_id = deployment_client.register_model(model_name, service_def_path)
    service = deployment_client.create_service(service_name, model_id)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(model_id)
예제 #2
0
def test_remote_featurizer_local_usage():
    test_config = get_test_config()

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    remote_service_name = ("int-test-featurizer-svc-" + str(uuid.uuid4()))[:30]
    featurizer = RemoteQuantizedResNet50(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'],
        os.path.expanduser("~/models"),
        remote_service_name,
        service_principal_params=get_service_principal())
    featurizer.import_graph_def(include_top=True,
                                include_featurizer=True,
                                input_tensor=image_tensors)

    try:
        with tf.Session() as sess:
            result = sess.run([featurizer.featurizer_output],
                              feed_dict={in_images: [read_file()]})
            np_result = np.array(result[0])
            assert all(
                [x == y for x, y in zip(np_result.shape, [1, 1, 1, 2048])])
            assert np_result.dtype == np.dtype('float32')

            result = sess.run(
                [featurizer.featurizer_output],
                feed_dict={in_images: [read_file(), read_file()]})
            np_result = np.array(result[0])
            assert all(
                [x == y for x, y in zip(np_result.shape, [2, 1, 1, 2048])])
            assert np_result.dtype == np.dtype('float32')

            result = sess.run([featurizer.classifier_output],
                              feed_dict={in_images: [read_file()]})
            np_result = np.array(result[0])
            assert all([x == y for x, y in zip(np_result.shape, [1, 1000])])
            assert np_result.dtype == np.dtype('float32')

            result = sess.run(
                [featurizer.classifier_output],
                feed_dict={in_images: [read_file(), read_file()]})
            np_result = np.array(result[0])
            assert all([x == y for x, y in zip(np_result.shape, [2, 1000])])
            assert np_result.dtype == np.dtype('float32')
    finally:
        featurizer.cleanup_remote_service()
예제 #3
0
def test_create_update_and_delete_service():
    override_token_funcs()
    test_config = get_test_config()

    deployment_client = DeploymentClient(
        test_config['test_subscription_id'],
        test_config['test_resource_group'],
        test_config['test_model_management_account'])
    cleanup_old_test_services(deployment_client)

    id = uuid.uuid4().hex[:5]
    model_name = "int-test-model-" + id
    service_name = "int-test-service-" + id

    service_def_path = "/tmp/model"

    in_images = tf.placeholder(tf.string)
    image_tensors = preprocess_array(in_images)

    model = LocalQuantizedResNet50(os.path.expanduser("~/models"))
    model.import_graph_def(include_featurizer=False)

    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), model.classifier_input,
                        model.classifier_output))
    service_def.save(service_def_path)

    # create service
    first_model_id = deployment_client.register_model(model_name,
                                                      service_def_path)
    service = deployment_client.create_service(service_name, first_model_id)

    service_list = deployment_client.list_services()
    assert any(x.name == service_name for x in service_list)

    prediction_client = PredictionClient(service.ipAddress, service.port)
    top_result = sorted(enumerate(
        prediction_client.score_image("/tmp/share1/shark.jpg")),
                        key=lambda x: x[1],
                        reverse=True)[:1]
    # 'tiger shark' is class 3
    assert top_result[0][0] == 3

    # update service, remove classifier
    service_def = ServiceDefinition()
    service_def.pipeline.append(
        TensorflowStage(tf.Session(), in_images, image_tensors))
    service_def.pipeline.append(BrainWaveStage(model))
    service_def.save(service_def_path)

    second_model_id = deployment_client.register_model(model_name,
                                                       service_def_path)
    deployment_client.update_service(service.id, second_model_id)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    # wait for timeout of Azure LB
    time.sleep(4 * 60 + 10)

    result = prediction_client.score_image("/tmp/share1/shark.jpg")
    assert all([x == y for x, y in zip(np.array(result).shape, [1, 1, 2048])])

    deployment_client.delete_service(service.id)
    deployment_client.delete_model(first_model_id)
    deployment_client.delete_model(second_model_id)