Пример #1
0
def _get_sagemaker_resource_names(deployment_pb):
    sagemaker_model_name = generate_aws_compatible_string(
        (deployment_pb.namespace, 10),
        (deployment_pb.name, 12),
        (deployment_pb.spec.bento_name, 20),
        (deployment_pb.spec.bento_version, 18),
    )
    sagemaker_endpoint_config_name = generate_aws_compatible_string(
        (deployment_pb.namespace, 10),
        (deployment_pb.name, 12),
        (deployment_pb.spec.bento_name, 20),
        (deployment_pb.spec.bento_version, 18),
    )
    sagemaker_endpoint_name = generate_aws_compatible_string(
        deployment_pb.namespace, deployment_pb.name)
    return sagemaker_model_name, sagemaker_endpoint_config_name, sagemaker_endpoint_name
Пример #2
0
    def _add(self, deployment_pb, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._add(deployment_pb, bento_pb, local_path)

        deployment_spec = deployment_pb.spec
        lambda_deployment_config = deployment_spec.aws_lambda_operator_config
        bento_service_metadata = bento_pb.bento.bento_service_metadata
        lambda_s3_bucket = generate_aws_compatible_string(
            'btml-{namespace}-{name}-{random_string}'.format(
                namespace=deployment_pb.namespace,
                name=deployment_pb.name,
                random_string=uuid.uuid4().hex[:6].lower(),
            )
        )
        try:
            create_s3_bucket_if_not_exists(
                lambda_s3_bucket, lambda_deployment_config.region
            )
            _deploy_lambda_function(
                deployment_pb=deployment_pb,
                bento_service_metadata=bento_service_metadata,
                deployment_spec=deployment_spec,
                lambda_s3_bucket=lambda_s3_bucket,
                lambda_deployment_config=lambda_deployment_config,
                bento_path=bento_path,
            )
            return ApplyDeploymentResponse(status=Status.OK(), deployment=deployment_pb)
        except BentoMLException as error:
            if lambda_s3_bucket and lambda_deployment_config:
                _cleanup_s3_bucket_if_exist(
                    lambda_s3_bucket, lambda_deployment_config.region
                )
            raise error
Пример #3
0
    def delete(self, deployment_pb):
        try:
            logger.debug('Deleting AWS Lambda deployment')

            deployment_spec = deployment_pb.spec
            lambda_deployment_config = deployment_spec.aws_lambda_operator_config
            lambda_deployment_config.region = (
                lambda_deployment_config.region or get_default_aws_region()
            )
            if not lambda_deployment_config.region:
                raise InvalidArgument('AWS region is missing')

            cf_client = boto3.client('cloudformation', lambda_deployment_config.region)
            stack_name = generate_aws_compatible_string(
                deployment_pb.namespace, deployment_pb.name
            )
            if deployment_pb.state.info_json:
                deployment_info_json = json.loads(deployment_pb.state.info_json)
                bucket_name = deployment_info_json.get('s3_bucket')
                if bucket_name:
                    _cleanup_s3_bucket_if_exist(
                        bucket_name, lambda_deployment_config.region
                    )

            logger.debug(
                'Deleting AWS CloudFormation: %s that includes Lambda function '
                'and related resources',
                stack_name,
            )
            cf_client.delete_stack(StackName=stack_name)
            return DeleteDeploymentResponse(status=Status.OK())

        except BentoMLException as error:
            return DeleteDeploymentResponse(status=error.status_proto)
Пример #4
0
def _deploy_lambda_function(
    deployment_pb,
    bento_service_metadata,
    deployment_spec,
    lambda_s3_bucket,
    lambda_deployment_config,
    bento_path,
):
    deployment_path_prefix = os.path.join(deployment_pb.namespace, deployment_pb.name)

    py_major, py_minor, _ = bento_service_metadata.env.python_version.split('.')
    if py_major != '3':
        raise BentoMLException('Python 2 is not supported for Lambda Deployment')
    python_runtime = 'python{}.{}'.format(py_major, py_minor)

    artifact_types = [item.artifact_type for item in bento_service_metadata.artifacts]
    if any(
        i in ['TensorflowSavedModelArtifact', 'KerasModelArtifact']
        for i in artifact_types
    ) and (py_major, py_minor) != ('3', '6'):
        raise BentoMLException(
            'AWS Lambda Deployment only supports BentoML services'
            'built with Python 3.6.x. To fix this, repack your'
            'service with the right Python version'
            '(hint: pyenv/anaconda) and try again'
        )

    api_names = (
        [lambda_deployment_config.api_name]
        if lambda_deployment_config.api_name
        else [api.name for api in bento_service_metadata.apis]
    )

    raise_if_api_names_not_found_in_bento_service_metadata(
        bento_service_metadata, api_names
    )

    with TempDirectory() as lambda_project_dir:
        logger.debug(
            'Generating cloudformation template.yaml for lambda project at %s',
            lambda_project_dir,
        )
        template_file_path = _create_aws_lambda_cloudformation_template_file(
            project_dir=lambda_project_dir,
            namespace=deployment_pb.namespace,
            deployment_name=deployment_pb.name,
            deployment_path_prefix=deployment_path_prefix,
            api_names=api_names,
            bento_service_name=deployment_spec.bento_name,
            s3_bucket_name=lambda_s3_bucket,
            py_runtime=python_runtime,
            memory_size=lambda_deployment_config.memory_size,
            timeout=lambda_deployment_config.timeout,
        )
        logger.debug('Validating generated template.yaml')
        validate_lambda_template(
            template_file_path, lambda_deployment_config.region, lambda_project_dir,
        )
        logger.debug(
            'Initializing lambda project in directory: %s ...', lambda_project_dir,
        )
        init_sam_project(
            lambda_project_dir,
            bento_path,
            deployment_pb.name,
            deployment_spec.bento_name,
            api_names,
            aws_region=lambda_deployment_config.region,
        )
        for api_name in api_names:
            build_directory = os.path.join(
                lambda_project_dir, '.aws-sam', 'build', api_name
            )
            logger.debug(
                'Checking is function "%s" bundle under lambda size ' 'limit', api_name,
            )
            # Since we only use s3 get object in lambda function, and
            # lambda function pack their own boto3/botocore modules,
            # we will just delete those modules from function bundle
            # directory
            delete_list = ['boto3', 'botocore']
            for name in delete_list:
                logger.debug('Remove module "%s" from build directory', name)
                shutil.rmtree(os.path.join(build_directory, name))
            total_build_dir_size = total_file_or_directory_size(build_directory)
            if total_build_dir_size > LAMBDA_FUNCTION_MAX_LIMIT:
                raise BentoMLException(
                    'Build function size is over 700MB, max size '
                    'capable for AWS Lambda function'
                )
            if total_build_dir_size >= LAMBDA_FUNCTION_LIMIT:
                logger.debug(
                    'Function %s is over lambda size limit, attempting ' 'reduce it',
                    api_name,
                )
                reduce_bundle_size_and_upload_extra_resources_to_s3(
                    build_directory=build_directory,
                    region=lambda_deployment_config.region,
                    s3_bucket=lambda_s3_bucket,
                    deployment_prefix=deployment_path_prefix,
                    function_name=api_name,
                    lambda_project_dir=lambda_project_dir,
                )
            else:
                logger.debug(
                    'Function bundle is within Lambda limit, removing '
                    'download_extra_resources.py file from function bundle'
                )
                os.remove(os.path.join(build_directory, 'download_extra_resources.py'))
        logger.info('Packaging AWS Lambda project at %s ...', lambda_project_dir)
        lambda_package(
            lambda_project_dir,
            lambda_deployment_config.region,
            lambda_s3_bucket,
            deployment_path_prefix,
        )
        logger.info('Deploying lambda project')
        stack_name = generate_aws_compatible_string(
            deployment_pb.namespace + '-' + deployment_pb.name
        )
        lambda_deploy(
            lambda_project_dir, lambda_deployment_config.region, stack_name=stack_name,
        )