Пример #1
0
def test_tfs_model_with_dependencies(repack_model, model_code_key_prefix,
                                     sagemaker_session,
                                     tensorflow_inference_version):
    model = TensorFlowModel(
        "s3://some/data.tar.gz",
        entry_point="train.py",
        dependencies=["src", "lib"],
        role=ROLE,
        framework_version=tensorflow_inference_version,
        image_uri="my-image",
        sagemaker_session=sagemaker_session,
    )

    model.prepare_container_def(INSTANCE_TYPE)

    model_code_key_prefix.assert_called_with(model.key_prefix, model.name,
                                             model.image_uri)

    repack_model.assert_called_with(
        "train.py",
        None,
        ["src", "lib"],
        "s3://some/data.tar.gz",
        "s3://my_bucket/key-prefix/model.tar.gz",
        sagemaker_session,
        kms_key=None,
    )
Пример #2
0
def test_model_prepare_container_def_no_instance_type_or_image(tensorflow_inference_version):
    model = TensorFlowModel(
        "s3://some/data.tar.gz", role=ROLE, framework_version=tensorflow_inference_version
    )

    with pytest.raises(ValueError) as e:
        model.prepare_container_def()

    expected_msg = "Must supply either an instance type (for choosing CPU vs GPU) or an image URI."
    assert expected_msg in str(e)
Пример #3
0
def test_model_image_accelerator(sagemaker_session):
    model = TensorFlowModel(MODEL_DATA,
                            role=ROLE,
                            entry_point=SCRIPT_PATH,
                            sagemaker_session=sagemaker_session)
    container_def = model.prepare_container_def(
        INSTANCE_TYPE, accelerator_type=ACCELERATOR_TYPE)
    assert container_def['Image'] == _get_full_cpu_image_uri_with_ei(
        defaults.TF_VERSION)
Пример #4
0
def test_tfs_model_with_custom_image(sagemaker_session, tensorflow_inference_version):
    model = TensorFlowModel(
        "s3://some/data.tar.gz",
        role=ROLE,
        framework_version=tensorflow_inference_version,
        image_uri="my-image",
        sagemaker_session=sagemaker_session,
    )
    cdef = model.prepare_container_def(INSTANCE_TYPE)
    assert cdef["Image"] == "my-image"
Пример #5
0
def test_tfs_model_with_log_level(sagemaker_session, tensorflow_inference_version):
    model = TensorFlowModel(
        "s3://some/data.tar.gz",
        role=ROLE,
        framework_version=tensorflow_inference_version,
        container_log_level=logging.INFO,
        sagemaker_session=sagemaker_session,
    )
    cdef = model.prepare_container_def(INSTANCE_TYPE)
    assert cdef["Environment"] == {TensorFlowModel.LOG_LEVEL_PARAM_NAME: "info"}
Пример #6
0
def test_tfs_model_accelerator(retrieve_image_uri, sagemaker_session, tensorflow_eia_version):
    model = TensorFlowModel(
        "s3://some/data.tar.gz",
        role=ROLE,
        framework_version=tensorflow_eia_version,
        sagemaker_session=sagemaker_session,
    )
    cdef = model.prepare_container_def(INSTANCE_TYPE, accelerator_type=ACCELERATOR_TYPE)
    retrieve_image_uri.assert_called_with(
        "tensorflow",
        REGION,
        version=tensorflow_eia_version,
        instance_type=INSTANCE_TYPE,
        accelerator_type=ACCELERATOR_TYPE,
        image_scope="inference",
    )
    assert IMAGE == cdef["Image"]

    predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE)
    assert isinstance(predictor, TensorFlowPredictor)