def custom_bucket_name(boto_session): region = boto_session.region_name account = boto_session.client("sts", region_name=region, endpoint_url=utils.sts_regional_endpoint( region)).get_caller_identity()["Account"] return "{}-{}-{}".format(CUSTOM_BUCKET_NAME_PREFIX, region, account)
def sagemaker_session_with_custom_bucket(boto_config, sagemaker_client_config, sagemaker_runtime_config): boto_session = (boto3.Session(**boto_config) if boto_config else boto3.Session(region_name=DEFAULT_REGION)) sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10))) sagemaker_client = (boto_session.client("sagemaker", ** sagemaker_client_config) if sagemaker_client_config else None) runtime_client = (boto_session.client("sagemaker-runtime", ** sagemaker_runtime_config) if sagemaker_runtime_config else None) region = boto_session.region_name account = boto_session.client("sts", region_name=region, endpoint_url=sts_regional_endpoint( region)).get_caller_identity()["Account"] custom_default_bucket = "{}-{}-{}".format(CUSTOM_BUCKET_PATH_PREFIX, region, account) return Session( boto_session=boto_session, sagemaker_client=sagemaker_client, sagemaker_runtime_client=runtime_client, default_bucket=custom_default_bucket, )
def _ecr_image_uri(sagemaker_session, algorithm_name): region = sagemaker_session.boto_region_name sts_client = sagemaker_session.boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region) ) account_id = sts_client.get_caller_identity()["Account"] endpoint_data = utils._botocore_resolver().construct_endpoint("ecr", region) return "{}.dkr.{}/{}:latest".format(account_id, endpoint_data["hostname"], algorithm_name)
def bucket_with_encryption(boto_session, sagemaker_role): region = boto_session.region_name sts_client = boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region)) account = sts_client.get_caller_identity()["Account"] role_arn = sts_client.get_caller_identity()["Arn"] kms_client = boto_session.client("kms") kms_key_arn = _create_kms_key(kms_client, account, role_arn, sagemaker_role, None) region = boto_session.region_name bucket_name = "sagemaker-{}-{}-with-kms".format(region, account) s3 = boto_session.client("s3") try: # 'us-east-1' cannot be specified because it is the default region: # https://github.com/boto/boto3/issues/125 if region == "us-east-1": s3.create_bucket(Bucket=bucket_name) else: s3.create_bucket( Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region}) except exceptions.ClientError as e: if e.response["Error"]["Code"] != "BucketAlreadyOwnedByYou": raise s3.put_bucket_encryption( Bucket=bucket_name, ServerSideEncryptionConfiguration={ "Rules": [{ "ApplyServerSideEncryptionByDefault": { "SSEAlgorithm": "aws:kms", "KMSMasterKeyID": kms_key_arn, } }] }, ) s3.put_bucket_policy(Bucket=bucket_name, Policy=KMS_BUCKET_POLICY % (bucket_name, bucket_name)) yield "s3://" + bucket_name, kms_key_arn kms_client.schedule_key_deletion(KeyId=kms_key_arn, PendingWindowInDays=7)
def container_image(sagemaker_session): """ Create a Multi-Model container image for use with integration testcases since 1P containers supporting multiple models are not available yet""" region = sagemaker_session.boto_region_name ecr_client = sagemaker_session.boto_session.client("ecr", region_name=region) sts_client = sagemaker_session.boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region)) account_id = sts_client.get_caller_identity()["Account"] algorithm_name = "sagemaker-multimodel-integ-test-{}".format( sagemaker_timestamp()) ecr_image_uri_prefix = get_ecr_image_uri_prefix(account=account_id, region=region) ecr_image = "{prefix}/{algorithm_name}:latest".format( prefix=ecr_image_uri_prefix, algorithm_name=algorithm_name) # Build and tag docker image locally docker_client = docker.from_env() image, build_log = docker_client.images.build(path=os.path.join( DATA_DIR, "multimodel", "container"), tag=algorithm_name, rm=True) image.tag(ecr_image, tag="latest") # Create AWS ECR and push the local docker image to it _create_repository(ecr_client, algorithm_name) username, password = _ecr_login(ecr_client) # Retry docker image push for _ in retries(3, "Upload docker image to ECR repo", seconds_to_sleep=10): try: docker_client.images.push(ecr_image, auth_config={ "username": username, "password": password }) break except requests.exceptions.ConnectionError: # This can happen when we try to create multiple repositories in parallel, so we retry pass yield ecr_image # Delete repository after the multi model integration tests complete _delete_repository(ecr_client, algorithm_name)
def bucket_with_encryption(sagemaker_session, sagemaker_role): boto_session = sagemaker_session.boto_session region = boto_session.region_name sts_client = boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region)) account = sts_client.get_caller_identity()["Account"] role_arn = sts_client.get_caller_identity()["Arn"] kms_client = boto_session.client("kms") kms_key_arn = _create_kms_key(kms_client, account, region, role_arn, sagemaker_role, None) region = boto_session.region_name bucket_name = "sagemaker-{}-{}-with-kms".format(region, account) sagemaker_session._create_s3_bucket_if_it_does_not_exist( bucket_name=bucket_name, region=region) s3_client = boto_session.client("s3", region_name=region) s3_client.put_bucket_encryption( Bucket=bucket_name, ServerSideEncryptionConfiguration={ "Rules": [{ "ApplyServerSideEncryptionByDefault": { "SSEAlgorithm": "{partition}:kms".format( partition=utils._aws_partition(region)), "KMSMasterKeyID": kms_key_arn, } }] }, ) s3_client.put_bucket_policy( Bucket=bucket_name, Policy=KMS_BUCKET_POLICY.format(partition=utils._aws_partition(region), bucket_name=bucket_name), ) yield "s3://" + bucket_name, kms_key_arn kms_client.schedule_key_deletion(KeyId=kms_key_arn, PendingWindowInDays=7)
def get_or_create_kms_key( sagemaker_session, role_arn=None, alias=KEY_ALIAS, sagemaker_role="SageMakerRole" ): kms_client = sagemaker_session.boto_session.client("kms") kms_key_arn = _get_kms_key_arn(kms_client, alias) region = sagemaker_session.boto_region_name sts_client = sagemaker_session.boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region) ) account_id = sts_client.get_caller_identity()["Account"] if kms_key_arn is None: return _create_kms_key(kms_client, account_id, role_arn, sagemaker_role, alias) if role_arn: _add_role_to_policy(kms_client, account_id, role_arn, alias, sagemaker_role) return kms_key_arn
def container_image(sagemaker_session): """ Create a Multi-Model container image for use with integration testcases since 1P containers supporting multiple models are not available yet""" region = sagemaker_session.boto_region_name ecr_client = sagemaker_session.boto_session.client("ecr", region_name=region) sts_client = sagemaker_session.boto_session.client( "sts", region_name=region, endpoint_url=utils.sts_regional_endpoint(region)) account_id = sts_client.get_caller_identity()["Account"] ecr_image = "{account}.dkr.ecr.{region}.amazonaws.com/{algorithm_name}:latest".format( account=account_id, region=region, algorithm_name=ALGORITHM_NAME) # Build and tag docker image locally docker_client = docker.from_env() image, build_log = docker_client.images.build(path=os.path.join( DATA_DIR, "multimodel", "container"), tag=ALGORITHM_NAME, rm=True) image.tag(ecr_image, tag="latest") # Create AWS ECR and push the local docker image to it _create_repository(ecr_client, ALGORITHM_NAME) username, password = _ecr_login(ecr_client) docker_client.images.push(ecr_image, auth_config={ "username": username, "password": password }) yield ecr_image # Delete repository after the multi model integration tests complete repo = ecr_client.describe_repositories(repositoryNames=[ALGORITHM_NAME]) if "repositories" in repo: ecr_client.delete_repository(repositoryName=ALGORITHM_NAME, force=True)