def test_server_side_encryption(sagemaker_session):

    boto_session = sagemaker_session.boto_session
    with kms_utils.bucket_with_encryption(boto_session, ROLE) as (bucket_with_kms, kms_key):

        output_path = os.path.join(bucket_with_kms, 'test-server-side-encryption', time.strftime('%y%m%d-%H%M'))

        estimator = TensorFlow(entry_point=SCRIPT,
                               role=ROLE,
                               train_instance_count=1,
                               train_instance_type='ml.c5.xlarge',
                               sagemaker_session=sagemaker_session,
                               py_version='py3',
                               framework_version='1.11',
                               base_job_name=unique_name_from_base('test-server-side-encryption'),
                               code_location=output_path,
                               output_path=output_path,
                               model_dir='/opt/ml/model',
                               output_kms_key=kms_key)

        inputs = estimator.sagemaker_session.upload_data(
            path=os.path.join(RESOURCE_PATH, 'data'),
            key_prefix='scriptmode/mnist')

        with timeout.timeout(minutes=integ.TRAINING_DEFAULT_TIMEOUT_MINUTES):
            estimator.fit(inputs)
def test_server_side_encryption(sagemaker_session, tf_full_version,
                                py_version):
    with kms_utils.bucket_with_encryption(sagemaker_session,
                                          ROLE) as (bucket_with_kms, kms_key):
        output_path = os.path.join(bucket_with_kms,
                                   "test-server-side-encryption",
                                   time.strftime("%y%m%d-%H%M"))

        estimator = TensorFlow(
            entry_point="training.py",
            source_dir=TFS_RESOURCE_PATH,
            role=ROLE,
            train_instance_count=1,
            train_instance_type="ml.c5.xlarge",
            sagemaker_session=sagemaker_session,
            script_mode=True,
            framework_version=tf_full_version,
            py_version=py_version,
            code_location=output_path,
            output_path=output_path,
            model_dir="/opt/ml/model",
            output_kms_key=kms_key,
        )

        inputs = estimator.sagemaker_session.upload_data(
            path=os.path.join(MNIST_RESOURCE_PATH, "data"),
            key_prefix="scriptmode/mnist")

        with tests.integ.timeout.timeout(
                minutes=tests.integ.TRAINING_DEFAULT_TIMEOUT_MINUTES):
            estimator.fit(
                inputs=inputs,
                job_name=unique_name_from_base("test-server-side-encryption"))

        endpoint_name = unique_name_from_base("test-server-side-encryption")
        with timeout.timeout_and_delete_endpoint_by_name(
                endpoint_name, sagemaker_session):
            estimator.deploy(
                initial_instance_count=1,
                instance_type="ml.c5.xlarge",
                endpoint_name=endpoint_name,
                entry_point=os.path.join(TFS_RESOURCE_PATH, "inference.py"),
            )
Esempio n. 3
0
def test_transform_tf_kms_network_isolation(sagemaker_session,
                                            cpu_instance_type, tmpdir,
                                            tf_full_version, py_version):
    data_path = os.path.join(DATA_DIR, "tensorflow_mnist")

    tf = TensorFlow(
        entry_point=os.path.join(data_path, "mnist.py"),
        role="SageMakerRole",
        train_instance_count=1,
        train_instance_type=cpu_instance_type,
        framework_version=tf_full_version,
        script_mode=True,
        py_version=py_version,
        sagemaker_session=sagemaker_session,
    )

    s3_prefix = "integ-test-data/tf-scriptmode/mnist"
    training_input = sagemaker_session.upload_data(
        path=os.path.join(data_path, "data"),
        key_prefix="{}/training".format(s3_prefix))

    job_name = unique_name_from_base("test-tf-transform")
    tf.fit(inputs=training_input, job_name=job_name)

    transform_input = sagemaker_session.upload_data(
        path=os.path.join(data_path, "transform"),
        key_prefix="{}/transform".format(s3_prefix))

    with bucket_with_encryption(sagemaker_session,
                                "SageMakerRole") as (bucket_with_kms, kms_key):
        output_path = "{}/{}/output".format(bucket_with_kms, job_name)

        transformer = tf.transformer(
            instance_count=1,
            instance_type=cpu_instance_type,
            output_path=output_path,
            output_kms_key=kms_key,
            volume_kms_key=kms_key,
            enable_network_isolation=True,
        )

        with timeout_and_delete_model_with_transformer(
                transformer,
                sagemaker_session,
                minutes=TRANSFORM_DEFAULT_TIMEOUT_MINUTES):
            transformer.transform(transform_input,
                                  job_name=job_name,
                                  content_type="text/csv",
                                  wait=True)

            model_desc = sagemaker_session.sagemaker_client.describe_model(
                ModelName=transformer.model_name)
            assert model_desc["EnableNetworkIsolation"]

        job_desc = sagemaker_session.describe_transform_job(job_name=job_name)
        assert job_desc["TransformOutput"]["S3OutputPath"] == output_path
        assert job_desc["TransformOutput"]["KmsKeyId"] == kms_key
        assert job_desc["TransformResources"]["VolumeKmsKeyId"] == kms_key

        s3.S3Downloader.download(
            s3_uri=output_path,
            local_path=os.path.join(tmpdir, "tf-batch-output"),
            session=sagemaker_session,
        )

        with open(os.path.join(tmpdir, "tf-batch-output",
                               "data.csv.out")) as f:
            result = json.load(f)
            assert len(result["predictions"][0]["probabilities"]) == 10
            assert result["predictions"][0]["classes"] == 1