def test_tfs_model_with_dependencies(repack_model, model_code_key_prefix, sagemaker_session, tf_version): model = Model( "s3://some/data.tar.gz", entry_point="train.py", dependencies=["src", "lib"], role=ROLE, framework_version=tf_version, image="my-image", sagemaker_session=sagemaker_session, ) model.prepare_container_def(INSTANCE_TYPE) model_code_key_prefix.assert_called_with(model.key_prefix, model.name, model.image) repack_model.assert_called_with( "train.py", None, ["src", "lib"], "s3://some/data.tar.gz", "s3://my_bucket/key-prefix/model.tar.gz", sagemaker_session, )
def test_model_prepare_container_def_no_instance_type_or_image(): model = Model("s3://some/data.tar.gz", role=ROLE) with pytest.raises(ValueError) as e: model.prepare_container_def() expected_msg = "Must supply either an instance type (for choosing CPU vs GPU) or an image URI." assert expected_msg in str(e)
def test_tfs_model_with_custom_image(sagemaker_session, tf_version): model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version, image='my-image', sagemaker_session=sagemaker_session) cdef = model.prepare_container_def(INSTANCE_TYPE) assert cdef['Image'] == 'my-image'
def test_tfs_model_with_log_level(sagemaker_session, tf_version): model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version, container_log_level=logging.INFO, sagemaker_session=sagemaker_session) cdef = model.prepare_container_def(INSTANCE_TYPE) assert cdef['Environment'] == {Model.LOG_LEVEL_PARAM_NAME: 'info'}
def test_tfs_model(sagemaker_session, tf_version): model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version, sagemaker_session=sagemaker_session) cdef = model.prepare_container_def(INSTANCE_TYPE) assert cdef['Image'].endswith('sagemaker-tensorflow-serving:{}-cpu'.format(tf_version)) assert cdef['Environment'] == {} predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE) assert isinstance(predictor, Predictor)
def test_tfs_model_with_entry_point(repack_model, model_code_key_prefix, sagemaker_session, tf_version): model = Model("s3://some/data.tar.gz", entry_point='train.py', role=ROLE, framework_version=tf_version, image='my-image', sagemaker_session=sagemaker_session) model.prepare_container_def(INSTANCE_TYPE) model_code_key_prefix.assert_called_with(model.key_prefix, model.name, model.image) repack_model.assert_called_with('train.py', None, [], 's3://some/data.tar.gz', 's3://my_bucket/key-prefix/model.tar.gz', sagemaker_session)
def test_tfs_model_image_accelerator(sagemaker_session, tf_version): model = Model( "s3://some/data.tar.gz", role=ROLE, framework_version=tf_version, sagemaker_session=sagemaker_session, ) cdef = model.prepare_container_def(INSTANCE_TYPE, accelerator_type=ACCELERATOR_TYPE) assert cdef["Image"].endswith("sagemaker-tensorflow-serving-eia:{}-cpu".format(tf_version)) predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE) assert isinstance(predictor, Predictor)
def test_tfs_model_image_accelerator(sagemaker_session, tf_version): model = Model("s3://some/data.tar.gz", role=ROLE, framework_version=tf_version, sagemaker_session=sagemaker_session) with pytest.raises(ValueError): model.prepare_container_def(INSTANCE_TYPE, accelerator_type=ACCELERATOR_TYPE)