def test_init_sam_project(mock_call_sam, tmpdir): mock_sam_project_path = os.path.join(tmpdir, 'mock_sam_project') mock_bento_bundle_path = os.path.join(tmpdir, 'mock_bento_service') mock_deployment_name = 'mock_deployment' mock_bento_name = 'mock_bento_name' mock_api_names = ['predict'] mock_region = 'us-west-2' mock_call_sam.return_value = 0, 'stdout', 'stderr' os.mkdir(mock_sam_project_path) os.mkdir(mock_bento_bundle_path) open(os.path.join(mock_bento_bundle_path, 'requirements.txt'), 'w').close() init_sam_project( mock_sam_project_path, mock_bento_bundle_path, mock_deployment_name, mock_bento_name, mock_api_names, mock_region, ) assert os.path.isfile( os.path.join(mock_sam_project_path, mock_deployment_name, 'app.py')) assert os.path.isfile( os.path.join(mock_sam_project_path, mock_deployment_name, 'requirements.txt')) assert os.path.isfile( os.path.join(mock_sam_project_path, mock_deployment_name, '__init__.py'))
def _deploy_lambda_function( deployment_pb, bento_service_metadata, deployment_spec, lambda_s3_bucket, lambda_deployment_config, bento_path, ): deployment_path_prefix = os.path.join(deployment_pb.namespace, deployment_pb.name) py_major, py_minor, _ = bento_service_metadata.env.python_version.split( '.') if py_major != '3': raise BentoMLException( 'Python 2 is not supported for Lambda Deployment') python_runtime = 'python{}.{}'.format(py_major, py_minor) artifact_types = [ item.artifact_type for item in bento_service_metadata.artifacts ] if any(i in ['TensorflowSavedModelArtifact', 'KerasModelArtifact'] for i in artifact_types) and (py_major, py_minor) != ('3', '6'): raise BentoMLException( 'AWS Lambda Deployment only supports BentoML services' 'built with Python 3.6.x. To fix this, repack your' 'service with the right Python version' '(hint: pyenv/anaconda) and try again') api_names = ([lambda_deployment_config.api_name] if lambda_deployment_config.api_name else [api.name for api in bento_service_metadata.apis]) raise_if_api_names_not_found_in_bento_service_metadata( bento_service_metadata, api_names) with TempDirectory() as lambda_project_dir: logger.debug( 'Generating cloudformation template.yaml for lambda project at %s', lambda_project_dir, ) template_file_path = _create_aws_lambda_cloudformation_template_file( project_dir=lambda_project_dir, namespace=deployment_pb.namespace, deployment_name=deployment_pb.name, deployment_path_prefix=deployment_path_prefix, api_names=api_names, bento_service_name=deployment_spec.bento_name, s3_bucket_name=lambda_s3_bucket, py_runtime=python_runtime, memory_size=lambda_deployment_config.memory_size, timeout=lambda_deployment_config.timeout, ) logger.debug('Validating generated template.yaml') validate_sam_template( template_file_path, lambda_deployment_config.region, lambda_project_dir, ) logger.debug( 'Initializing lambda project in directory: %s ...', lambda_project_dir, ) init_sam_project( lambda_project_dir, bento_path, deployment_pb.name, deployment_spec.bento_name, api_names, aws_region=lambda_deployment_config.region, ) for api_name in api_names: build_directory = os.path.join(lambda_project_dir, '.aws-sam', 'build', api_name) logger.debug( 'Checking is function "%s" bundle under lambda size ' 'limit', api_name, ) # Since we only use s3 get object in lambda function, and # lambda function pack their own boto3/botocore modules, # we will just delete those modules from function bundle # directory delete_list = ['boto3', 'botocore'] for name in delete_list: logger.debug('Remove module "%s" from build directory', name) shutil.rmtree(os.path.join(build_directory, name)) total_build_dir_size = total_file_or_directory_size( build_directory) if total_build_dir_size > LAMBDA_FUNCTION_MAX_LIMIT: raise BentoMLException( 'Build function size is over 700MB, max size ' 'capable for AWS Lambda function') if total_build_dir_size >= LAMBDA_FUNCTION_LIMIT: logger.debug( 'Function %s is over lambda size limit, attempting ' 'reduce it', api_name, ) reduce_bundle_size_and_upload_extra_resources_to_s3( build_directory=build_directory, region=lambda_deployment_config.region, s3_bucket=lambda_s3_bucket, deployment_prefix=deployment_path_prefix, function_name=api_name, lambda_project_dir=lambda_project_dir, ) else: logger.debug( 'Function bundle is within Lambda limit, removing ' 'download_extra_resources.py file from function bundle') os.remove( os.path.join(build_directory, 'download_extra_resources.py')) logger.info('Packaging AWS Lambda project at %s ...', lambda_project_dir) lambda_package( lambda_project_dir, lambda_deployment_config.region, lambda_s3_bucket, deployment_path_prefix, ) logger.info('Deploying lambda project') stack_name = generate_aws_compatible_string(deployment_pb.namespace + '-' + deployment_pb.name) lambda_deploy( lambda_project_dir, lambda_deployment_config.region, stack_name=stack_name, )