Beispiel #1
0
def _deploy_lambda_function(
    deployment_pb,
    bento_service_metadata,
    deployment_spec,
    lambda_s3_bucket,
    lambda_deployment_config,
    bento_path,
):
    deployment_path_prefix = os.path.join(deployment_pb.namespace,
                                          deployment_pb.name)

    py_major, py_minor, _ = bento_service_metadata.env.python_version.split(
        '.')
    if py_major != '3':
        raise BentoMLException(
            'Python 2 is not supported for Lambda Deployment')
    python_runtime = 'python{}.{}'.format(py_major, py_minor)

    artifact_types = [
        item.artifact_type for item in bento_service_metadata.artifacts
    ]
    if any(i in ['TensorflowSavedModelArtifact', 'KerasModelArtifact']
           for i in artifact_types) and (py_major, py_minor) != ('3', '6'):
        raise BentoMLException(
            'AWS Lambda Deployment only supports BentoML services'
            'built with Python 3.6.x. To fix this, repack your'
            'service with the right Python version'
            '(hint: pyenv/anaconda) and try again')

    api_names = ([lambda_deployment_config.api_name]
                 if lambda_deployment_config.api_name else
                 [api.name for api in bento_service_metadata.apis])

    raise_if_api_names_not_found_in_bento_service_metadata(
        bento_service_metadata, api_names)

    with TempDirectory() as lambda_project_dir:
        logger.debug(
            'Generating cloudformation template.yaml for lambda project at %s',
            lambda_project_dir,
        )
        template_file_path = _create_aws_lambda_cloudformation_template_file(
            project_dir=lambda_project_dir,
            namespace=deployment_pb.namespace,
            deployment_name=deployment_pb.name,
            deployment_path_prefix=deployment_path_prefix,
            api_names=api_names,
            bento_service_name=deployment_spec.bento_name,
            s3_bucket_name=lambda_s3_bucket,
            py_runtime=python_runtime,
            memory_size=lambda_deployment_config.memory_size,
            timeout=lambda_deployment_config.timeout,
        )
        logger.debug('Validating generated template.yaml')
        validate_sam_template(
            template_file_path,
            lambda_deployment_config.region,
            lambda_project_dir,
        )
        logger.debug(
            'Initializing lambda project in directory: %s ...',
            lambda_project_dir,
        )
        init_sam_project(
            lambda_project_dir,
            bento_path,
            deployment_pb.name,
            deployment_spec.bento_name,
            api_names,
            aws_region=lambda_deployment_config.region,
        )
        for api_name in api_names:
            build_directory = os.path.join(lambda_project_dir, '.aws-sam',
                                           'build', api_name)
            logger.debug(
                'Checking is function "%s" bundle under lambda size '
                'limit',
                api_name,
            )
            # Since we only use s3 get object in lambda function, and
            # lambda function pack their own boto3/botocore modules,
            # we will just delete those modules from function bundle
            # directory
            delete_list = ['boto3', 'botocore']
            for name in delete_list:
                logger.debug('Remove module "%s" from build directory', name)
                shutil.rmtree(os.path.join(build_directory, name))
            total_build_dir_size = total_file_or_directory_size(
                build_directory)
            if total_build_dir_size > LAMBDA_FUNCTION_MAX_LIMIT:
                raise BentoMLException(
                    'Build function size is over 700MB, max size '
                    'capable for AWS Lambda function')
            if total_build_dir_size >= LAMBDA_FUNCTION_LIMIT:
                logger.debug(
                    'Function %s is over lambda size limit, attempting '
                    'reduce it',
                    api_name,
                )
                reduce_bundle_size_and_upload_extra_resources_to_s3(
                    build_directory=build_directory,
                    region=lambda_deployment_config.region,
                    s3_bucket=lambda_s3_bucket,
                    deployment_prefix=deployment_path_prefix,
                    function_name=api_name,
                    lambda_project_dir=lambda_project_dir,
                )
            else:
                logger.debug(
                    'Function bundle is within Lambda limit, removing '
                    'download_extra_resources.py file from function bundle')
                os.remove(
                    os.path.join(build_directory,
                                 'download_extra_resources.py'))
        logger.info('Packaging AWS Lambda project at %s ...',
                    lambda_project_dir)
        lambda_package(
            lambda_project_dir,
            lambda_deployment_config.region,
            lambda_s3_bucket,
            deployment_path_prefix,
        )
        logger.info('Deploying lambda project')
        stack_name = generate_aws_compatible_string(deployment_pb.namespace +
                                                    '-' + deployment_pb.name)
        lambda_deploy(
            lambda_project_dir,
            lambda_deployment_config.region,
            stack_name=stack_name,
        )
Beispiel #2
0
def deploy_ec2_service(
    deployment_pb,
    deployment_spec,
    bento_path,
    aws_ec2_deployment_config,
    s3_bucket_name,
    region,
):
    (
        sam_template_name,
        deployment_stack_name,
        repo_name,
        elb_name,
    ) = generate_ec2_resource_names(deployment_pb.namespace,
                                    deployment_pb.name)

    with TempDirectory() as project_path:
        repository_id = create_ecr_repository_if_not_exists(region, repo_name)
        registry_url, username, password = get_ecr_login_info(
            region, repository_id)
        ecr_tag = generate_docker_image_tag(repo_name,
                                            deployment_spec.bento_version,
                                            registry_url)
        build_docker_image(context_path=bento_path, image_tag=ecr_tag)
        push_docker_image_to_repository(repository=ecr_tag,
                                        username=username,
                                        password=password)

        logger.info("Generating user data")
        encoded_user_data = _make_user_data(registry_url, ecr_tag, region)

        logger.info("Making template")
        template_file_path = _make_cloudformation_template(
            project_path,
            encoded_user_data,
            s3_bucket_name,
            sam_template_name,
            elb_name,
            aws_ec2_deployment_config.ami_id,
            aws_ec2_deployment_config.instance_type,
            aws_ec2_deployment_config.autoscale_min_size,
            aws_ec2_deployment_config.autoscale_desired_capacity,
            aws_ec2_deployment_config.autoscale_max_size,
        )
        validate_sam_template(sam_template_name,
                              aws_ec2_deployment_config.region, project_path)

        logger.info("Building service")
        build_template(template_file_path, project_path,
                       aws_ec2_deployment_config.region)

        logger.info("Packaging service")
        package_template(s3_bucket_name, project_path,
                         aws_ec2_deployment_config.region)

        logger.info("Deploying service")
        deploy_template(
            deployment_stack_name,
            s3_bucket_name,
            project_path,
            aws_ec2_deployment_config.region,
        )
Beispiel #3
0
    def deploy_service(
        self,
        deployment_pb,
        deployment_spec,
        bento_path,
        aws_ec2_deployment_config,
        s3_bucket_name,
        region,
    ):
        sam_template_name = generate_aws_compatible_string(
            "btml-template-{namespace}-{name}".format(
                namespace=deployment_pb.namespace, name=deployment_pb.name))

        deployment_stack_name = generate_aws_compatible_string(
            "btml-stack-{namespace}-{name}".format(
                namespace=deployment_pb.namespace, name=deployment_pb.name))

        repo_name = generate_aws_compatible_string(
            "btml-repo-{namespace}-{name}".format(
                namespace=deployment_pb.namespace, name=deployment_pb.name))

        elb_name = generate_aws_compatible_string(
            "{namespace}-{name}".format(namespace=deployment_pb.namespace,
                                        name=deployment_pb.name),
            max_length=32,
        )

        with TempDirectory() as project_path:
            registry_id = _create_ecr_repo(repo_name, region)
            registry_token, registry_url = _get_ecr_password(
                registry_id, region)
            registry_username, registry_password = _get_creds_from_token(
                registry_token)

            registry_domain = registry_url.replace("https://", "")
            push_tag = f"{registry_domain}/{repo_name}"
            pull_tag = push_tag + f":{deployment_spec.bento_version}"

            logger.info("Containerizing service")
            containerize_bento_service(
                bento_name=deployment_spec.bento_name,
                bento_version=deployment_spec.bento_version,
                saved_bundle_path=bento_path,
                push=True,
                tag=push_tag,
                build_arg={},
                username=registry_username,
                password=registry_password,
            )

            logger.info("Generating user data")
            encoded_user_data = _make_user_data(registry_url, pull_tag, region)

            logger.info("Making template")
            template_file_path = _make_cloudformation_template(
                project_path,
                encoded_user_data,
                s3_bucket_name,
                sam_template_name,
                elb_name,
                aws_ec2_deployment_config.ami_id,
                aws_ec2_deployment_config.instance_type,
                aws_ec2_deployment_config.autoscale_min_size,
                aws_ec2_deployment_config.autoscale_desired_capacity,
                aws_ec2_deployment_config.autoscale_max_size,
            )
            validate_sam_template(sam_template_name,
                                  aws_ec2_deployment_config.region,
                                  project_path)

            logger.info("Building service")
            build_template(template_file_path, project_path,
                           aws_ec2_deployment_config.region)

            logger.info("Packaging service")
            package_template(s3_bucket_name, project_path,
                             aws_ec2_deployment_config.region)

            logger.info("Deploying service")
            deploy_template(
                deployment_stack_name,
                s3_bucket_name,
                project_path,
                aws_ec2_deployment_config.region,
            )
Beispiel #4
0
def deploy_ec2_service(
    deployment_pb,
    deployment_spec,
    bento_path,
    aws_ec2_deployment_config,
    s3_bucket_name,
    region,
):
    (
        sam_template_name,
        deployment_stack_name,
        repo_name,
        elb_name,
    ) = generate_ec2_resource_names(deployment_pb.namespace,
                                    deployment_pb.name)

    with TempDirectory() as project_path:
        repository_id = create_ecr_repository_if_not_exists(region, repo_name)
        repository_url, username, password = get_ecr_login_info(
            region, repository_id)

        registry_domain = repository_url.replace("https://", "")
        push_tag = f"{registry_domain}/{repo_name}"
        pull_tag = push_tag + f":{deployment_spec.bento_version}"

        logger.info("Containerizing service")
        containerize_bento_service(
            bento_name=deployment_spec.bento_name,
            bento_version=deployment_spec.bento_version,
            saved_bundle_path=bento_path,
            push=True,
            tag=push_tag,
            build_arg={},
            username=username,
            password=password,
        )

        logger.info("Generating user data")
        encoded_user_data = _make_user_data(repository_url, pull_tag, region)

        logger.info("Making template")
        template_file_path = _make_cloudformation_template(
            project_path,
            encoded_user_data,
            s3_bucket_name,
            sam_template_name,
            elb_name,
            aws_ec2_deployment_config.ami_id,
            aws_ec2_deployment_config.instance_type,
            aws_ec2_deployment_config.autoscale_min_size,
            aws_ec2_deployment_config.autoscale_desired_capacity,
            aws_ec2_deployment_config.autoscale_max_size,
        )
        validate_sam_template(sam_template_name,
                              aws_ec2_deployment_config.region, project_path)

        logger.info("Building service")
        build_template(template_file_path, project_path,
                       aws_ec2_deployment_config.region)

        logger.info("Packaging service")
        package_template(s3_bucket_name, project_path,
                         aws_ec2_deployment_config.region)

        logger.info("Deploying service")
        deploy_template(
            deployment_stack_name,
            s3_bucket_name,
            project_path,
            aws_ec2_deployment_config.region,
        )