def start_server_update_flow_latest(request, get_image, get_test_dir,
                                    get_docker_context):
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models'
    update_test_dir = path_to_mount + '/update-{}/'.format(get_tests_suffix())
    # ensure model dir is empty before starting OVMS
    shutil.rmtree(update_test_dir, ignore_errors=True)

    volumes_dict = {'{}'.format(path_to_mount): {'bind': '/opt/ml',
                                                 'mode': 'ro'}}

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="03")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name resnet --model_path /opt/ml/update-{} " \
              "--port {} --rest_port {} --grpc_workers 1 --nireq 1".\
              format(get_tests_suffix(), grpc_port, rest_port)

    container = client.containers.run(image=get_image, detach=True,
                                      name='ie-serving-py-test-update-'
                                      'latest-{}'.
                                      format(get_tests_suffix()),
                                      ports={'{}/tcp'.format(grpc_port):
                                             grpc_port,
                                             '{}/tcp'.format(rest_port):
                                             rest_port},
                                      remove=True, volumes=volumes_dict,
                                      command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 2
0
def start_server_single_model_from_minio(request, get_docker_network,
                                         get_minio_server_s3, get_image,
                                         get_test_dir, get_docker_context):

    network = get_docker_network

    AWS_ACCESS_KEY_ID = os.getenv('MINIO_ACCESS_KEY')
    AWS_SECRET_ACCESS_KEY = os.getenv('MINIO_SECRET_KEY')
    AWS_REGION = os.getenv('AWS_REGION')

    _, ports = get_minio_server_s3
    grpc_port = ports["grpc_port"]
    minio_endpoint = 'http://minio.locals3-{}.com:{}'.format(
        get_tests_suffix(), grpc_port)

    envs = [
        'MINIO_ACCESS_KEY=' + AWS_ACCESS_KEY_ID,
        'MINIO_SECRET_KEY=' + AWS_SECRET_ACCESS_KEY,
        'AWS_ACCESS_KEY_ID=' + AWS_ACCESS_KEY_ID,
        'AWS_SECRET_ACCESS_KEY=' + AWS_SECRET_ACCESS_KEY,
        'AWS_REGION=' + AWS_REGION, 'S3_ENDPOINT=' + minio_endpoint
    ]

    client = get_docker_context

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="11")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name resnet " \
              "--model_path s3://inference/resnet_v1_50 " \
              "--port {}".format(grpc_port)

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-test-'
        'single-minio-{}'.format(get_tests_suffix()),
        ports={'{}/tcp'.format(grpc_port): grpc_port},
        remove=True,
        environment=envs,
        command=command,
        network=network.name)

    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)

    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 3
0
def start_server_single_model_from_gc(request, get_image, get_test_dir,
                                      get_docker_context):
    client = get_docker_context

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="08")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name resnet " \
              "--model_path " \
              "gs://public-artifacts/intelai_public_models/resnet_50_i8/ " \
              "--port " + str(grpc_port) + " --target_device CPU --nireq 4" \
              " --plugin_config " \
              "\"{\\\"CPU_THROUGHPUT_STREAMS\\\": \\\"2\\\", " \
              "\\\"CPU_THREADS_NUM\\\": \\\"4\\\"}\""
    envs = ['https_proxy=' + os.getenv('https_proxy', "")]
    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-single-gs-{}'.format(get_tests_suffix()),
        ports={'{}/tcp'.format(grpc_port): grpc_port},
        remove=True,
        environment=envs,
        command=command)

    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 4
0
def start_server_single_model_from_s3(request, get_image, get_test_dir,
                                      get_docker_context):
    AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID')
    AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY')
    AWS_REGION = os.getenv('AWS_REGION')

    client = get_docker_context
    envs = [
        'AWS_ACCESS_KEY_ID=' + AWS_ACCESS_KEY_ID,
        'AWS_SECRET_ACCESS_KEY=' + AWS_SECRET_ACCESS_KEY,
        'AWS_REGION=' + AWS_REGION
    ]

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="09")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name resnet " \
              "--model_path s3://inference-test-aipg/resnet_v1_50 " \
              "--port {}".format(grpc_port)

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-single-s3-{}'.format(get_tests_suffix()),
        ports={'{}/tcp'.format(grpc_port): grpc_port},
        remove=True,
        environment=envs,
        command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
def start_server_model_ver_policy(request, get_image, get_test_dir,
                                  get_docker_context):
    shutil.copyfile('tests/functional/model_version_policy_config.json',
                    get_test_dir +
                    '/saved_models/model_ver_policy_config.json')

    shutil.copyfile('tests/functional/mapping_config.json',
                    get_test_dir + '/saved_models/model_ver/3/'
                                   'mapping_config.json')

    client = get_docker_context
    volumes_dict = {'{}'.format(get_test_dir + '/saved_models/'):
                    {'bind': '/opt/ml', 'mode': 'ro'}}

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="18")

    command = "/ie-serving-py/start_server.sh ie_serving config " \
              "--config_path /opt/ml/model_ver_policy_config.json " \
              "--port {} --rest_port {}".format(grpc_port, rest_port)

    container = client.containers.run(image=get_image, detach=True,
                                      name='ie-serving-py-test-policy-{}'.
                                      format(get_tests_suffix()),
                                      ports={'{}/tcp'.format(grpc_port):
                                             grpc_port,
                                             '{}/tcp'.format(rest_port):
                                             rest_port},
                                      remove=True, volumes=volumes_dict,
                                      command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 6
0
def clean_hanging_docker_resources():
    client = get_docker_client()
    containers = client.containers.list(all=True)
    networks = client.networks.list()
    tests_suffix = get_tests_suffix()
    clean_hanging_containers(containers, tests_suffix)
    clean_hanging_networks(networks, tests_suffix)
Ejemplo n.º 7
0
def start_server_batch_model_auto(request, get_image, get_test_dir,
                                  get_docker_context):
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models/'
    volumes_dict = {
        '{}'.format(path_to_mount): {
            'bind': '/opt/ml',
            'mode': 'ro'
        }
    }

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="14")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name resnet --model_path /opt/ml/resnet_V1_50_batch8 " \
              "--port {} --batch_size auto --rest_port {}".\
              format(grpc_port, rest_port)

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-autobatch-{}'.format(get_tests_suffix()),
        ports={
            '{}/tcp'.format(grpc_port): grpc_port,
            '{}/tcp'.format(rest_port): rest_port
        },
        remove=True,
        volumes=volumes_dict,
        command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 8
0
def get_docker_network(request, get_docker_context):

    client = get_docker_context
    existing = None

    try:
        existing = client.networks.get("minio-network-{}".format(
            get_tests_suffix()))
    except Exception as e:
        pass

    if existing is not None:
        existing.remove()

    network = client.networks.create("minio-network-{}".format(
        get_tests_suffix()))

    request.addfinalizer(network.remove)

    return network
def start_server_update_flow_specific(request, get_image, get_test_dir,
                                      get_docker_context):
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models'
    update_test_dir = path_to_mount + '/update-{}/'.format(get_tests_suffix())
    # ensure model dir is empty before starting OVMS
    shutil.rmtree(update_test_dir, ignore_errors=True)

    volumes_dict = {'{}'.format(path_to_mount): {'bind': '/opt/ml',
                                                 'mode': 'ro'}}

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="04")

    command = '/ie-serving-py/start_server.sh ie_serving model ' \
              '--model_name resnet --model_path ' \
              '/opt/ml/update-' + get_tests_suffix() +  \
              ' --port ' + str(grpc_port) + ' --model_version_policy' \
              ' \'{"specific": { "versions":[1, 3, 4] }}\' ' \
              '--rest_port ' + str(rest_port)

    container = client.containers.run(image=get_image, detach=True,
                                      name='ie-serving-py-test-'
                                           'update-specific-{}'.
                                      format(get_tests_suffix()),
                                      ports={'{}/tcp'.format(grpc_port):
                                             grpc_port,
                                             '{}/tcp'.format(rest_port):
                                             rest_port},
                                      remove=True, volumes=volumes_dict,
                                      command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 10
0
def start_server_with_mapping(request, get_image, get_test_dir,
                              get_docker_context):
    shutil.copyfile(
        'tests/functional/mapping_config.json', get_test_dir + '/saved_models/'
        'age-gender-recognition-retail-0013/1/'
        'mapping_config.json')
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models/'
    volumes_dict = {
        '{}'.format(path_to_mount): {
            'bind': '/opt/ml',
            'mode': 'ro'
        }
    }

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="06")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name age_gender " \
              "--model_path /opt/ml/age-gender-recognition-retail-0013 " \
              "--port {} --rest_port {}".format(grpc_port, rest_port)

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-2-out-{}'.format(get_tests_suffix()),
        ports={
            '{}/tcp'.format(grpc_port): grpc_port,
            '{}/tcp'.format(rest_port): rest_port
        },
        remove=True,
        volumes=volumes_dict,
        command=command)

    def delete_mapping_file():
        path = get_test_dir + '/saved_models/' \
                              'age-gender-recognition-retail-0013/1/' \
                              'mapping_config.json'
        if os.path.exists(path):
            os.remove(path)

    request.addfinalizer(delete_mapping_file)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 11
0
def start_server_single_vehicle_attrib_model(
        request, get_image, get_test_dir, vehicle_attributes_model_downloader,
        get_docker_context):

    print("Downloaded model files:", vehicle_attributes_model_downloader)
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models/'
    volumes_dict = {
        '{}'.format(path_to_mount): {
            'bind': '/opt/ml',
            'mode': 'ro'
        }
    }

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="05")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name vehicle-attributes " \
              "--model_path /opt/ml/vehicle-attributes-recognition-barrier-0039 " \
              "--port " + str(grpc_port) + " --rest_port " + str(rest_port) + \
              " --plugin_config " \
              "\"{\\\"CPU_THROUGHPUT_STREAMS\\\": " \
              "\\\"CPU_THROUGHPUT_AUTO\\\"}\""

    container = \
        client.containers.run(
            image=get_image,
            detach=True,
            name='ie-serving-py-test-single-{}'.format(get_tests_suffix()),
            ports={'{}/tcp'.format(grpc_port): grpc_port,
                   '{}/tcp'.format(rest_port): rest_port},
            remove=True,
            volumes=volumes_dict,
            # In this case, slower,
            # non-default serialization method is used
            environment=[
                'SERIALIZATON=_prepare_output_as_AppendArrayToTensorProto'],
            command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 12
0
def convert_model(client, model, output_dir, model_name, input_shape):

    files = (os.path.join(output_dir, model_name) + '.bin',
             os.path.join(output_dir, model_name) + '.xml')

    if os.path.exists(files[0]) and os.path.exists(files[1]):
        return files

    Path(output_dir).mkdir(parents=True, exist_ok=True)

    input_shape_str = '[{}]'.format(','.join(str(i) for i in input_shape))
    print("Converting {} to IR with input shape {}...".format(
        model, input_shape_str))

    input_dir = os.path.dirname(model)

    image = 'openvino/ubuntu18_dev:latest'
    volumes = {
        input_dir: {
            'bind': '/mnt/input_dir',
            'mode': 'ro'
        },
        output_dir: {
            'bind': '/mnt/output_dir',
            'mode': 'rw'
        }
    }
    user_id = os.getuid()

    command = ' '.join([
        'python3 deployment_tools/model_optimizer/mo.py',
        '--input_model /mnt/input_dir/' + os.path.basename(model),
        '--model_name ' + model_name, '--output_dir /mnt/output_dir/',
        '--input_shape ' + input_shape_str
    ])

    client.containers.run(image=image,
                          name='convert-model-{}'.format(get_tests_suffix()),
                          volumes=volumes,
                          user=user_id,
                          command=command,
                          remove=True)
    return files
Ejemplo n.º 13
0
def start_minio_server(request, get_test_dir, get_docker_network,
                       get_docker_context):
    """sudo docker run -d -p 9099:9000 minio/minio server /data"""
    client = get_docker_context

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="10")

    command = 'server --address ":{}" /data'.format(grpc_port)

    client.images.pull('minio/minio:latest')

    network = get_docker_network

    MINIO_ACCESS_KEY = os.getenv('MINIO_ACCESS_KEY')
    MINIO_SECRET_KEY = os.getenv('MINIO_SECRET_KEY')

    if MINIO_ACCESS_KEY is None or MINIO_SECRET_KEY is None:
        MINIO_ACCESS_KEY = "MINIO_A_KEY"
        MINIO_SECRET_KEY = "MINIO_S_KEY"
        os.environ["MINIO_ACCESS_KEY"] = "MINIO_A_KEY"
        os.environ["MINIO_SECRET_KEY"] = "MINIO_S_KEY"

    envs = [
        'MINIO_ACCESS_KEY=' + MINIO_ACCESS_KEY,
        'MINIO_SECRET_KEY=' + MINIO_SECRET_KEY
    ]

    container = client.containers.run(
        image='minio/minio:latest',
        detach=True,
        name='minio.locals3-{}.com'.format(get_tests_suffix()),
        ports={'{}/tcp'.format(grpc_port): grpc_port},
        remove=True,
        environment=envs,
        command=command,
        network=network.name)

    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container, minio_condition, 30)
    assert running is True, "minio container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 14
0
def start_server_face_detection_model_named_shape(request, get_image,
                                                  get_test_dir,
                                                  get_docker_context):
    client = get_docker_context
    path_to_mount = get_test_dir + '/saved_models/'
    volumes_dict = {
        '{}'.format(path_to_mount): {
            'bind': '/opt/ml',
            'mode': 'ro'
        }
    }

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="01")

    command = "/ie-serving-py/start_server.sh ie_serving model " \
              "--model_name face_detection --model_path " \
              "/opt/ml/face-detection-retail-0004 " \
              "--port " + str(grpc_port) + " --rest_port " + str(rest_port) + \
              " --shape \"{\\\"data\\\": \\\"(1, 3, 600, 600)\\\"}\" " \
              "--grpc_workers 4 --rest_workers 2 " \
              "--nireq 2"

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-named-shape-{}'.format(get_tests_suffix()),
        ports={
            '{}/tcp'.format(grpc_port): grpc_port,
            '{}/tcp'.format(rest_port): rest_port
        },
        remove=True,
        volumes=volumes_dict,
        command=command)
    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 15
0
    def test_specific_version_rest(self, resnet_multiple_batch_sizes,
                                   get_test_dir,
                                   start_server_update_flow_specific):
        _, ports = start_server_update_flow_specific
        resnet, resnet_bs4, resnet_bs8 = resnet_multiple_batch_sizes
        dir = get_test_dir + '/saved_models/' + 'update-{}/'.format(
            get_tests_suffix())
        # ensure model dir is empty at the beginning
        shutil.rmtree(dir, ignore_errors=True)
        resnet_copy_dir = copy_model(resnet, 1, dir)
        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)
        time.sleep(8)

        in_name = 'map/TensorArrayStack/TensorArrayGatherV3'
        out_name = 'softmax_tensor'

        # Available versions: 1, 4

        print("Getting info about resnet model")
        model_name = 'resnet'

        expected_input_metadata_v1 = {
            in_name: {
                'dtype': 1,
                'shape': [1, 3, 224, 224]
            }
        }
        expected_output_metadata_v1 = {
            out_name: {
                'dtype': 1,
                'shape': [1, 1001]
            }
        }

        rest_url_latest = 'http://localhost:{}/v1/models/resnet/' \
                          'versions/1/metadata'.format(ports["rest_port"])

        response = get_model_metadata_response_rest(rest_url_latest)
        input_metadata, output_metadata = model_metadata_response(
            response=response)

        print(output_metadata)
        assert model_name == response.model_spec.name
        assert expected_input_metadata_v1 == input_metadata
        assert expected_output_metadata_v1 == output_metadata

        rest_url = 'http://localhost:{}/v1/models/resnet/metadata'.format(
            ports["rest_port"])
        response_latest = get_model_metadata_response_rest(rest_url)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        rest_url_v4 = 'http://localhost:{}/v1/models/resnet/' \
                      'versions/4/metadata'.format(ports["rest_port"])
        response_v4 = get_model_metadata_response_rest(rest_url_v4)
        print("response", response_v4)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_latest)

        assert response_v4.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v4 == input_metadata_latest
        assert output_metadata_v4 == output_metadata_latest

        # Model status check
        rest_status_url = 'http://localhost:{}/v1/models/resnet'.format(
            ports["rest_port"])
        status_response = get_model_status_response_rest(rest_status_url)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 2
        for version_status in versions_statuses:
            assert version_status.version in [1, 4]
            assert version_status.state == ModelVersionState.AVAILABLE
            assert version_status.status.error_code == ErrorCode.OK
            assert version_status.status.error_message == _ERROR_MESSAGE[
                ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_bs4_copy_dir)
        resnet_bs8_copy_dir = copy_model(resnet_bs8, 3, dir)
        time.sleep(10)

        # Available versions: 1, 3

        rest_url = 'http://localhost:{}/v1/models/resnet/metadata'.format(
            ports["rest_port"])
        response_latest = get_model_metadata_response_rest(rest_url)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        rest_url_v3 = 'http://localhost:{}/v1/models/resnet/' \
                      'versions/3/metadata'.format(ports["rest_port"])
        response_v3 = get_model_metadata_response_rest(rest_url_v3)
        print("response", response_v3)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert response_v3.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v3 == input_metadata_latest
        assert output_metadata_v3 == output_metadata_latest

        # Model status check
        rest_status_url = 'http://localhost:{}/v1/models/resnet'.format(
            ports["rest_port"])
        status_response = get_model_status_response_rest(rest_status_url)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 3
        for version_status in versions_statuses:
            assert version_status.version in [1, 3, 4]
            if version_status.version == 4:
                assert version_status.state == ModelVersionState.END
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.END][ErrorCode.OK]
            elif version_status.version == 1 or version_status.version == 3:
                assert version_status.state == ModelVersionState.AVAILABLE
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        # Available versions: 1, 3, 4

        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)
        time.sleep(10)

        rest_url_v1 = 'http://localhost:{}/v1/models/resnet/' \
                      'versions/1/metadata'.format(ports["rest_port"])
        response_v1 = get_model_metadata_response_rest(rest_url_v1)
        input_metadata_v1, output_metadata_v1 = model_metadata_response(
            response=response_v1)

        assert model_name == response_v1.model_spec.name
        assert expected_input_metadata_v1 == input_metadata_v1
        assert expected_output_metadata_v1 == output_metadata_v1

        expected_input_metadata_v3 = {
            in_name: {
                'dtype': 1,
                'shape': [8, 3, 224, 224]
            }
        }
        expected_output_metadata_v3 = {
            out_name: {
                'dtype': 1,
                'shape': [8, 1001]
            }
        }

        rest_url_v3 = 'http://localhost:{}/v1/models/resnet/' \
                      'versions/3/metadata'.format(ports["rest_port"])
        response_v3 = get_model_metadata_response_rest(rest_url_v3)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert model_name == response_v3.model_spec.name
        assert expected_input_metadata_v3 == input_metadata_v3
        assert expected_output_metadata_v3 == output_metadata_v3

        expected_input_metadata_v4 = {
            in_name: {
                'dtype': 1,
                'shape': [4, 3, 224, 224]
            }
        }
        expected_output_metadata_v4 = {
            out_name: {
                'dtype': 1,
                'shape': [4, 1001]
            }
        }
        response_v4 = get_model_metadata_response_rest(rest_url)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_v4)

        assert model_name == response_v4.model_spec.name
        assert expected_input_metadata_v4 == input_metadata_v4
        assert expected_output_metadata_v4 == output_metadata_v4

        # Model status check
        rest_status_url = 'http://localhost:{}/v1/models/resnet'.format(
            ports["rest_port"])
        status_response = get_model_status_response_rest(rest_status_url)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 3
        for version_status in versions_statuses:
            assert version_status.version in [1, 3, 4]
            assert version_status.state == ModelVersionState.AVAILABLE
            assert version_status.status.error_code == ErrorCode.OK
            assert version_status.status.error_message == _ERROR_MESSAGE[
                ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_copy_dir)
        shutil.rmtree(resnet_bs4_copy_dir)
        shutil.rmtree(resnet_bs8_copy_dir)
        time.sleep(10)
def start_server_multi_model(request, get_docker_network, start_minio_server,
                             get_minio_server_s3, get_image, get_test_dir,
                             get_docker_context):

    shutil.copyfile('tests/functional/config.json',
                    get_test_dir + '/saved_models/config.json')
    AWS_ACCESS_KEY_ID = os.getenv('MINIO_ACCESS_KEY')
    AWS_SECRET_ACCESS_KEY = os.getenv('MINIO_SECRET_KEY')
    AWS_REGION = os.getenv('AWS_REGION')

    client = get_docker_context
    network = get_docker_network

    _, ports = start_minio_server
    grpc_port, rest_port = ports["grpc_port"], ports["rest_port"]
    minio_endpoint = 'http://minio.locals3-{}.com:{}'.format(
        get_tests_suffix(), grpc_port)

    envs = [
        'MINIO_ACCESS_KEY=' + AWS_ACCESS_KEY_ID,
        'MINIO_SECRET_KEY=' + AWS_SECRET_ACCESS_KEY,
        'AWS_ACCESS_KEY_ID=' + AWS_ACCESS_KEY_ID,
        'AWS_SECRET_ACCESS_KEY=' + AWS_SECRET_ACCESS_KEY,
        'AWS_REGION=' + AWS_REGION, 'S3_ENDPOINT=' + minio_endpoint,
        'https_proxy=' + os.getenv('https_proxy', ""),
        'no_proxy={}'.format(minio_endpoint)
    ]

    volumes_dict = {
        '{}'.format(get_test_dir + '/saved_models/'): {
            'bind': '/opt/ml',
            'mode': 'ro'
        }
    }

    grpc_port, rest_port = get_ports_for_fixture(port_suffix="07")

    command = "/ie-serving-py/start_server.sh ie_serving config " \
              "--config_path /opt/ml/config.json --port {} " \
              "--rest_port {} --grpc_workers 2 --rest_workers 2".\
              format(grpc_port, rest_port)

    container = client.containers.run(
        image=get_image,
        detach=True,
        name='ie-serving-py-test-multi-{}'.format(get_tests_suffix()),
        ports={
            '{}/tcp'.format(grpc_port): grpc_port,
            '{}/tcp'.format(rest_port): rest_port
        },
        remove=True,
        volumes=volumes_dict,
        environment=envs,
        command=command,
        network=network.name)

    request.addfinalizer(container.kill)

    running = wait_endpoint_setup(container)
    assert running is True, "docker container was not started successfully"

    return container, {"grpc_port": grpc_port, "rest_port": rest_port}
Ejemplo n.º 17
0
    def test_specific_version(self, resnet_multiple_batch_sizes, get_test_dir,
                              start_server_update_flow_specific,
                              create_grpc_channel):
        _, ports = start_server_update_flow_specific
        resnet, resnet_bs4, resnet_bs8 = resnet_multiple_batch_sizes
        dir = get_test_dir + '/saved_models/' + 'update-{}/'.format(
            get_tests_suffix())
        # ensure model dir is empty at the beginning
        shutil.rmtree(dir, ignore_errors=True)
        stub = create_grpc_channel('localhost:{}'.format(ports["grpc_port"]),
                                   PREDICTION_SERVICE)
        status_stub = create_grpc_channel(
            'localhost:{}'.format(ports["grpc_port"]), MODEL_SERVICE)

        resnet_copy_dir = copy_model(resnet, 1, dir)
        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)

        # This could be replaced with status polling
        time.sleep(8)

        # Available versions: 1, 4

        print("Getting info about resnet model")
        model_name = 'resnet'
        in_name = 'map/TensorArrayStack/TensorArrayGatherV3'
        out_name = 'softmax_tensor'
        expected_input_metadata_v1 = {
            in_name: {
                'dtype': 1,
                'shape': [1, 3, 224, 224]
            }
        }
        expected_output_metadata_v1 = {
            out_name: {
                'dtype': 1,
                'shape': [1, 1001]
            }
        }
        request = get_model_metadata(model_name=model_name, version=1)
        response = stub.GetModelMetadata(request, 10)
        input_metadata, output_metadata = model_metadata_response(
            response=response)

        print(output_metadata)
        assert model_name == response.model_spec.name
        assert expected_input_metadata_v1 == input_metadata
        assert expected_output_metadata_v1 == output_metadata

        request_latest = get_model_metadata(model_name=model_name)
        response_latest = stub.GetModelMetadata(request_latest, 10)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        request_v4 = get_model_metadata(model_name=model_name, version=4)
        response_v4 = stub.GetModelMetadata(request_v4, 10)
        print("response", response_v4)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_latest)

        assert response_v4.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v4 == input_metadata_latest
        assert output_metadata_v4 == output_metadata_latest

        # Model status check
        model_name = 'resnet'
        request = get_model_status(model_name=model_name)
        status_response = status_stub.GetModelStatus(request, 10)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 2
        for version_status in versions_statuses:
            assert version_status.version in [1, 4]
            assert version_status.state == ModelVersionState.AVAILABLE
            assert version_status.status.error_code == ErrorCode.OK
            assert version_status.status.error_message == _ERROR_MESSAGE[
                ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_bs4_copy_dir)
        resnet_bs8_copy_dir = copy_model(resnet_bs8, 3, dir)
        time.sleep(10)

        # Available versions: 1, 3

        request_latest = get_model_metadata(model_name=model_name)
        response_latest = stub.GetModelMetadata(request_latest, 10)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        request_v3 = get_model_metadata(model_name=model_name, version=3)
        response_v3 = stub.GetModelMetadata(request_v3, 10)
        print("response", response_v3)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert response_v3.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v3 == input_metadata_latest
        assert output_metadata_v3 == output_metadata_latest

        # Model status check
        model_name = 'resnet'
        request = get_model_status(model_name=model_name)
        status_response = status_stub.GetModelStatus(request, 10)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 3
        for version_status in versions_statuses:
            assert version_status.version in [1, 3, 4]
            if version_status.version == 4:
                assert version_status.state == ModelVersionState.END
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.END][ErrorCode.OK]
            elif version_status.version == 1 or version_status.version == 3:
                assert version_status.state == ModelVersionState.AVAILABLE
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        # Available versions: 1, 3, 4

        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)
        time.sleep(10)

        request_v1 = get_model_metadata(model_name=model_name, version=1)
        response_v1 = stub.GetModelMetadata(request_v1, 10)
        input_metadata_v1, output_metadata_v1 = model_metadata_response(
            response=response_v1)

        assert model_name == response_v1.model_spec.name
        assert expected_input_metadata_v1 == input_metadata_v1
        assert expected_output_metadata_v1 == output_metadata_v1

        expected_input_metadata_v3 = {
            in_name: {
                'dtype': 1,
                'shape': [8, 3, 224, 224]
            }
        }
        expected_output_metadata_v3 = {
            out_name: {
                'dtype': 1,
                'shape': [8, 1001]
            }
        }

        request_v3 = get_model_metadata(model_name=model_name, version=3)
        response_v3 = stub.GetModelMetadata(request_v3, 10)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert model_name == response_v3.model_spec.name
        assert expected_input_metadata_v3 == input_metadata_v3
        assert expected_output_metadata_v3 == output_metadata_v3

        expected_input_metadata_v4 = {
            in_name: {
                'dtype': 1,
                'shape': [4, 3, 224, 224]
            }
        }
        expected_output_metadata_v4 = {
            out_name: {
                'dtype': 1,
                'shape': [4, 1001]
            }
        }
        request_v4 = get_model_metadata(model_name=model_name)
        response_v4 = stub.GetModelMetadata(request_v4, 10)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_v4)

        assert model_name == response_v4.model_spec.name
        assert expected_input_metadata_v4 == input_metadata_v4
        assert expected_output_metadata_v4 == output_metadata_v4

        # Model status check
        model_name = 'resnet'
        request = get_model_status(model_name=model_name)
        status_response = status_stub.GetModelStatus(request, 10)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 3
        for version_status in versions_statuses:
            assert version_status.version in [1, 3, 4]
            assert version_status.state == ModelVersionState.AVAILABLE
            assert version_status.status.error_code == ErrorCode.OK
            assert version_status.status.error_message == _ERROR_MESSAGE[
                ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_copy_dir)
        shutil.rmtree(resnet_bs4_copy_dir)
        shutil.rmtree(resnet_bs8_copy_dir)
        time.sleep(10)
Ejemplo n.º 18
0
    def test_update_rest_grpc(self, resnet_multiple_batch_sizes, get_test_dir,
                              start_server_update_flow_specific,
                              create_grpc_channel):
        _, ports = start_server_update_flow_specific
        resnet, resnet_bs4, resnet_bs8 = resnet_multiple_batch_sizes
        dir = get_test_dir + '/saved_models/' + 'update-{}/'.format(
            get_tests_suffix())
        # ensure model dir is empty at the beginning
        shutil.rmtree(dir, ignore_errors=True)
        stub = create_grpc_channel('localhost:{}'.format(ports["grpc_port"]),
                                   PREDICTION_SERVICE)
        resnet_copy_dir = copy_model(resnet, 1, dir)
        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)
        time.sleep(8)

        # Available versions: 1, 4

        print("Getting info about resnet model")
        model_name = 'resnet'
        in_name = 'map/TensorArrayStack/TensorArrayGatherV3'
        out_name = 'softmax_tensor'
        expected_input_metadata_v1 = {
            in_name: {
                'dtype': 1,
                'shape': [1, 3, 224, 224]
            }
        }
        expected_output_metadata_v1 = {
            out_name: {
                'dtype': 1,
                'shape': [1, 1001]
            }
        }
        request = get_model_metadata(model_name=model_name, version=1)
        response = stub.GetModelMetadata(request, 10)
        input_metadata, output_metadata = model_metadata_response(
            response=response)

        print(output_metadata)
        assert model_name == response.model_spec.name
        assert expected_input_metadata_v1 == input_metadata
        assert expected_output_metadata_v1 == output_metadata

        rest_url = 'http://localhost:{}/v1/models/resnet/metadata'.format(
            ports["rest_port"])
        response_latest = get_model_metadata_response_rest(rest_url)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        request_v4 = get_model_metadata(model_name=model_name, version=4)
        response_v4 = stub.GetModelMetadata(request_v4, 10)
        print("response", response_v4)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_latest)

        assert response_v4.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v4 == input_metadata_latest
        assert output_metadata_v4 == output_metadata_latest

        shutil.rmtree(resnet_bs4_copy_dir)
        resnet_bs8_copy_dir = copy_model(resnet_bs8, 3, dir)
        time.sleep(3)

        # Available versions: 1, 3

        request_latest = get_model_metadata(model_name=model_name)
        response_latest = stub.GetModelMetadata(request_latest, 10)
        print("response", response_latest)
        input_metadata_latest, output_metadata_latest = \
            model_metadata_response(response=response_latest)

        rest_url = 'http://localhost:{}/v1/models/resnet/versions/3/metadata'.\
                   format(ports["rest_port"])
        response_v3 = get_model_metadata_response_rest(rest_url)
        print("response", response_v3)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert response_v3.model_spec.name == response_latest.model_spec.name
        assert input_metadata_v3 == input_metadata_latest
        assert output_metadata_v3 == output_metadata_latest

        # Available versions: 1, 3, 4

        time.sleep(3)
        resnet_bs4_copy_dir = copy_model(resnet_bs4, 4, dir)
        time.sleep(3)
        rest_url = 'http://localhost:{}/v1/models/resnet/versions/1/metadata'.\
                   format(ports["rest_port"])
        response_v1 = get_model_metadata_response_rest(rest_url)
        input_metadata_v1, output_metadata_v1 = model_metadata_response(
            response=response_v1)

        assert model_name == response.model_spec.name
        assert expected_input_metadata_v1 == input_metadata_v1
        assert expected_output_metadata_v1 == output_metadata_v1

        expected_input_metadata_v3 = {
            in_name: {
                'dtype': 1,
                'shape': [8, 3, 224, 224]
            }
        }
        expected_output_metadata_v3 = {
            out_name: {
                'dtype': 1,
                'shape': [8, 1001]
            }
        }

        request_v3 = get_model_metadata(model_name=model_name, version=3)
        response_v3 = stub.GetModelMetadata(request_v3, 10)
        input_metadata_v3, output_metadata_v3 = model_metadata_response(
            response=response_v3)

        assert model_name == response.model_spec.name
        assert expected_input_metadata_v3 == input_metadata_v3
        assert expected_output_metadata_v3 == output_metadata_v3

        expected_input_metadata_v4 = {
            in_name: {
                'dtype': 1,
                'shape': [4, 3, 224, 224]
            }
        }
        expected_output_metadata_v4 = {
            out_name: {
                'dtype': 1,
                'shape': [4, 1001]
            }
        }
        rest_url = 'http://localhost:{}/v1/models/resnet/' \
                   'versions/4/metadata'.format(ports["rest_port"])
        response_v4 = get_model_metadata_response_rest(rest_url)
        input_metadata_v4, output_metadata_v4 = model_metadata_response(
            response=response_v4)

        assert model_name == response_v4.model_spec.name
        assert expected_input_metadata_v4 == input_metadata_v4
        assert expected_output_metadata_v4 == output_metadata_v4

        shutil.rmtree(resnet_copy_dir)
        shutil.rmtree(resnet_bs4_copy_dir)
        shutil.rmtree(resnet_bs8_copy_dir)
Ejemplo n.º 19
0
    def test_latest_version_rest(self, resnet_multiple_batch_sizes,
                                 get_test_dir,
                                 start_server_update_flow_latest):
        _, ports = start_server_update_flow_latest
        resnet, resnet_bs4, resnet_bs8 = resnet_multiple_batch_sizes
        dir = get_test_dir + '/saved_models/' + 'update-{}/'.format(
            get_tests_suffix())
        # ensure model dir is empty at the beginning
        shutil.rmtree(dir, ignore_errors=True)
        resnet_copy_dir = copy_model(resnet, 1, dir)
        time.sleep(8)

        print("Getting info about resnet model")
        model_name = 'resnet'
        in_name = 'map/TensorArrayStack/TensorArrayGatherV3'
        out_name = 'softmax_tensor'
        expected_input_metadata_v1 = {
            in_name: {
                'dtype': 1,
                'shape': [1, 3, 224, 224]
            }
        }
        expected_output_metadata_v1 = {
            out_name: {
                'dtype': 1,
                'shape': [1, 1001]
            }
        }

        rest_url = 'http://localhost:{}/v1/models/resnet/metadata'.format(
            ports["rest_port"])
        response = get_model_metadata_response_rest(rest_url)
        input_metadata, output_metadata = model_metadata_response(
            response=response)

        print(output_metadata)
        assert model_name == response.model_spec.name
        assert expected_input_metadata_v1 == input_metadata
        assert expected_output_metadata_v1 == output_metadata

        # Model status check before update
        rest_status_url = 'http://localhost:{}/v1/models/resnet'.format(
            ports["rest_port"])
        status_response = get_model_status_response_rest(rest_status_url)
        versions_statuses = status_response.model_version_status
        version_status = versions_statuses[0]
        assert len(versions_statuses) == 1
        assert version_status.version == 1
        assert version_status.state == ModelVersionState.AVAILABLE
        assert version_status.status.error_code == ErrorCode.OK
        assert version_status.status.error_message == _ERROR_MESSAGE[
            ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_copy_dir)
        resnet_bs4_copy_dir = copy_model(resnet_bs4, 2, dir)
        time.sleep(10)

        expected_input_metadata = {
            in_name: {
                'dtype': 1,
                'shape': [4, 3, 224, 224]
            }
        }
        expected_output_metadata = {out_name: {'dtype': 1, 'shape': [4, 1001]}}
        response = get_model_metadata_response_rest(rest_url)
        input_metadata, output_metadata = model_metadata_response(
            response=response)

        print(output_metadata)
        assert model_name == response.model_spec.name
        assert expected_input_metadata == input_metadata
        assert expected_output_metadata == output_metadata

        # Model status check after update
        status_response = get_model_status_response_rest(rest_status_url)
        versions_statuses = status_response.model_version_status
        assert len(versions_statuses) == 2
        for version_status in versions_statuses:
            assert version_status.version in [1, 2]
            if version_status.version == 1:
                assert version_status.state == ModelVersionState.END
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.END][ErrorCode.OK]
            elif version_status.version == 2:
                assert version_status.state == ModelVersionState.AVAILABLE
                assert version_status.status.error_code == ErrorCode.OK
                assert version_status.status.error_message == _ERROR_MESSAGE[
                    ModelVersionState.AVAILABLE][ErrorCode.OK]
        ###

        shutil.rmtree(resnet_bs4_copy_dir)
        time.sleep(10)