コード例 #1
0
def test_transformer_creation_without_endpoint_type(create_model,
                                                    sagemaker_session):
    model = Mock()
    create_model.return_value = model

    tf = TensorFlow(
        entry_point=SCRIPT_PATH,
        role=ROLE,
        sagemaker_session=sagemaker_session,
        train_instance_count=INSTANCE_COUNT,
        train_instance_type=INSTANCE_TYPE,
    )
    tf.transformer(INSTANCE_COUNT, INSTANCE_TYPE)

    create_model.assert_called_with(
        endpoint_type=None,
        model_server_workers=None,
        role=ROLE,
        vpc_config_override="VPC_CONFIG_DEFAULT",
    )
    model.transformer.assert_called_with(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        accept=None,
        assemble_with=None,
        env=None,
        max_concurrent_transforms=None,
        max_payload=None,
        output_kms_key=None,
        output_path=None,
        strategy=None,
        tags=None,
        volume_kms_key=None,
    )
コード例 #2
0
def test_transformer_creation_without_optional_args(
    name_from_base,
    create_model,
    sagemaker_session,
    tensorflow_inference_version,
    tensorflow_inference_py_version,
):
    if version.Version(tensorflow_inference_version) < version.Version("1.11"):
        pytest.skip(
            "Legacy TF version requires explicit image URI, and "
            "this logic is tested in test_create_model_with_custom_image.")

    model_name = "generated-model-name"
    name_from_base.return_value = model_name

    model = Mock()
    create_model.return_value = model

    base_job_name = "tensorflow"
    tf = TensorFlow(
        entry_point=SCRIPT_PATH,
        framework_version=tensorflow_inference_version,
        py_version=tensorflow_inference_py_version,
        role=ROLE,
        sagemaker_session=sagemaker_session,
        instance_count=INSTANCE_COUNT,
        instance_type=INSTANCE_TYPE,
        base_job_name=base_job_name,
    )
    tf.latest_training_job = _TrainingJob(sagemaker_session, "some-job-name")
    tf.transformer(INSTANCE_COUNT, INSTANCE_TYPE)

    name_from_base.assert_called_with(base_job_name)
    create_model.assert_called_with(
        role=ROLE,
        vpc_config_override="VPC_CONFIG_DEFAULT",
        entry_point=None,
        enable_network_isolation=False,
        name=model_name,
    )
    model.transformer.assert_called_with(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        accept=None,
        assemble_with=None,
        env=None,
        max_concurrent_transforms=None,
        max_payload=None,
        output_kms_key=None,
        output_path=None,
        strategy=None,
        tags=None,
        volume_kms_key=None,
    )
def main():
    download_training_and_eval_data()

    print('Starting model training.')
    print(
        'Note: if launching for the first time in local mode, container image download might take a few minutes to complete.')
    california_housing_estimator = TensorFlow(entry_point='california_housing_tf2.py',
                                              source_dir='code',
                                              role=DUMMY_IAM_ROLE,
                                              instance_count=1,
                                              instance_type='local',
                                              framework_version='2.4.1',
                                              py_version='py37')

    inputs = {'train': 'file://./data/train', 'test': 'file://./data/test'}
    california_housing_estimator.fit(inputs)
    print('Completed model training')

    print('Running Batch Transform in local mode')
    tensorflow_serving_transformer = california_housing_estimator.transformer(
        instance_count=1,
        instance_type='local',
        output_path='file:./data/output',
    )

    tensorflow_serving_transformer.transform('file://./data/input',
                                             split_type='Line',
                                             content_type='text/csv')

    print('Printing Batch Transform output file content')
    output_file = open('./data/output/x_test.csv.out', 'r').read()
    print(output_file)
コード例 #4
0
def test_transformer_creation_with_optional_args(
        create_model, sagemaker_session, tensorflow_inference_version,
        tensorflow_inference_py_version):
    if version.Version(tensorflow_inference_version) < version.Version("1.11"):
        pytest.skip(
            "Legacy TF version requires explicit image URI, and "
            "this logic is tested in test_create_model_with_custom_image.")

    model = Mock()
    create_model.return_value = model

    tf = TensorFlow(
        entry_point=SCRIPT_PATH,
        framework_version=tensorflow_inference_version,
        py_version=tensorflow_inference_py_version,
        role=ROLE,
        sagemaker_session=sagemaker_session,
        instance_count=INSTANCE_COUNT,
        instance_type=INSTANCE_TYPE,
    )
    tf.latest_training_job = _TrainingJob(sagemaker_session, "some-job-name")

    strategy = "SingleRecord"
    assemble_with = "Line"
    output_path = "s3://{}/batch-output".format(BUCKET_NAME)
    kms_key = "kms"
    accept_type = "text/bytes"
    env = {"foo": "bar"}
    max_concurrent_transforms = 3
    max_payload = 100
    tags = {"Key": "foo", "Value": "bar"}
    new_role = "role"
    vpc_config = {"Subnets": ["1234"], "SecurityGroupIds": ["5678"]}
    model_name = "model-name"

    tf.transformer(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        strategy=strategy,
        assemble_with=assemble_with,
        output_path=output_path,
        output_kms_key=kms_key,
        accept=accept_type,
        env=env,
        max_concurrent_transforms=max_concurrent_transforms,
        max_payload=max_payload,
        tags=tags,
        role=new_role,
        volume_kms_key=kms_key,
        entry_point=SERVING_SCRIPT_FILE,
        vpc_config_override=vpc_config,
        enable_network_isolation=True,
        model_name=model_name,
    )

    create_model.assert_called_with(
        role=new_role,
        vpc_config_override=vpc_config,
        entry_point=SERVING_SCRIPT_FILE,
        enable_network_isolation=True,
        name=model_name,
    )
    model.transformer.assert_called_with(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        accept=accept_type,
        assemble_with=assemble_with,
        env=env,
        max_concurrent_transforms=max_concurrent_transforms,
        max_payload=max_payload,
        output_kms_key=kms_key,
        output_path=output_path,
        strategy=strategy,
        tags=tags,
        volume_kms_key=kms_key,
    )
コード例 #5
0
def test_transform_tf_kms_network_isolation(sagemaker_session,
                                            cpu_instance_type, tmpdir,
                                            tf_full_version, py_version):
    data_path = os.path.join(DATA_DIR, "tensorflow_mnist")

    tf = TensorFlow(
        entry_point=os.path.join(data_path, "mnist.py"),
        role="SageMakerRole",
        train_instance_count=1,
        train_instance_type=cpu_instance_type,
        framework_version=tf_full_version,
        script_mode=True,
        py_version=py_version,
        sagemaker_session=sagemaker_session,
    )

    s3_prefix = "integ-test-data/tf-scriptmode/mnist"
    training_input = sagemaker_session.upload_data(
        path=os.path.join(data_path, "data"),
        key_prefix="{}/training".format(s3_prefix))

    job_name = unique_name_from_base("test-tf-transform")
    tf.fit(inputs=training_input, job_name=job_name)

    transform_input = sagemaker_session.upload_data(
        path=os.path.join(data_path, "transform"),
        key_prefix="{}/transform".format(s3_prefix))

    with bucket_with_encryption(sagemaker_session,
                                "SageMakerRole") as (bucket_with_kms, kms_key):
        output_path = "{}/{}/output".format(bucket_with_kms, job_name)

        transformer = tf.transformer(
            instance_count=1,
            instance_type=cpu_instance_type,
            output_path=output_path,
            output_kms_key=kms_key,
            volume_kms_key=kms_key,
            enable_network_isolation=True,
        )

        with timeout_and_delete_model_with_transformer(
                transformer,
                sagemaker_session,
                minutes=TRANSFORM_DEFAULT_TIMEOUT_MINUTES):
            transformer.transform(transform_input,
                                  job_name=job_name,
                                  content_type="text/csv",
                                  wait=True)

            model_desc = sagemaker_session.sagemaker_client.describe_model(
                ModelName=transformer.model_name)
            assert model_desc["EnableNetworkIsolation"]

        job_desc = sagemaker_session.describe_transform_job(job_name=job_name)
        assert job_desc["TransformOutput"]["S3OutputPath"] == output_path
        assert job_desc["TransformOutput"]["KmsKeyId"] == kms_key
        assert job_desc["TransformResources"]["VolumeKmsKeyId"] == kms_key

        s3.S3Downloader.download(
            s3_uri=output_path,
            local_path=os.path.join(tmpdir, "tf-batch-output"),
            session=sagemaker_session,
        )

        with open(os.path.join(tmpdir, "tf-batch-output",
                               "data.csv.out")) as f:
            result = json.load(f)
            assert len(result["predictions"][0]["probabilities"]) == 10
            assert result["predictions"][0]["classes"] == 1
コード例 #6
0
def test_transformer_creation_with_optional_args(create_model, sagemaker_session):
    model = Mock()
    create_model.return_value = model

    tf = TensorFlow(
        entry_point=SCRIPT_PATH,
        role=ROLE,
        sagemaker_session=sagemaker_session,
        train_instance_count=INSTANCE_COUNT,
        train_instance_type=INSTANCE_TYPE,
    )
    tf.latest_training_job = _TrainingJob(sagemaker_session, "some-job-name")

    strategy = "SingleRecord"
    assemble_with = "Line"
    output_path = "s3://{}/batch-output".format(BUCKET_NAME)
    kms_key = "kms"
    accept_type = "text/bytes"
    env = {"foo": "bar"}
    max_concurrent_transforms = 3
    max_payload = 100
    tags = {"Key": "foo", "Value": "bar"}
    new_role = "role"
    model_server_workers = 2
    vpc_config = {"Subnets": ["1234"], "SecurityGroupIds": ["5678"]}

    tf.transformer(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        strategy=strategy,
        assemble_with=assemble_with,
        output_path=output_path,
        output_kms_key=kms_key,
        accept=accept_type,
        env=env,
        max_concurrent_transforms=max_concurrent_transforms,
        max_payload=max_payload,
        tags=tags,
        role=new_role,
        model_server_workers=model_server_workers,
        volume_kms_key=kms_key,
        endpoint_type="tensorflow-serving",
        entry_point=SERVING_SCRIPT_FILE,
        vpc_config_override=vpc_config,
        enable_network_isolation=True,
    )

    create_model.assert_called_with(
        model_server_workers=model_server_workers,
        role=new_role,
        vpc_config_override=vpc_config,
        endpoint_type="tensorflow-serving",
        entry_point=SERVING_SCRIPT_FILE,
        enable_network_isolation=True,
    )
    model.transformer.assert_called_with(
        INSTANCE_COUNT,
        INSTANCE_TYPE,
        accept=accept_type,
        assemble_with=assemble_with,
        env=env,
        max_concurrent_transforms=max_concurrent_transforms,
        max_payload=max_payload,
        output_kms_key=kms_key,
        output_path=output_path,
        strategy=strategy,
        tags=tags,
        volume_kms_key=kms_key,
    )