def create_lambda(self, envs: EnvSettings): is_app_only = self.node.try_get_context("is_app_only") if is_app_only == "true": code = Code.from_asset( path="../backend/functions/image_resize/.serverless/main.zip") else: code = Code.from_cfn_parameters() function = Function( self, "image-resize-lambda", function_name=f"{envs.project_name}-image-resize", code=code, handler="index.handler", runtime=Runtime.NODEJS_12_X, memory_size=512, timeout=Duration.seconds(30), tracing=Tracing.ACTIVE, ) api_gateway = LambdaRestApi( self, "ImageResizeLambdaApi", rest_api_name=f"{envs.project_name}-image-resize", handler=function) return function, code, api_gateway
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) self.event_bus = EventBus(scope=self, id='CustomEventBus', event_bus_name='CustomEventBus') self.source = Function( scope=self, id=f'SourceFunction', function_name=f'SourceFunction', code=Code.from_asset(path='./code_source/'), handler='index.handler', runtime=Runtime.PYTHON_3_6, ) self.source.add_to_role_policy(statement=PolicyStatement( actions=['events:PutEvents'], resources=[self.event_bus.event_bus_arn])) """ Define rule. """ self.rule = Rule( scope=self, id='EventBusRule', description='Sample description.', enabled=True, event_bus=self.event_bus, event_pattern=EventPattern(detail={ 'Domain': ["MedInfo"], 'Reason': ["InvokeTarget"] }), rule_name='EventBusRule', ) """ Add target. """ self.target = Function( scope=self, id=f'TargetFunction', function_name=f'TargetFunction', code=Code.from_asset(path='./code_target/'), handler='index.handler', runtime=Runtime.PYTHON_3_6, ) self.target: Union[IRuleTarget, LambdaFunction] = LambdaFunction( handler=self.target) self.rule.add_target(target=self.target)
def _create_layers(self): if os.path.isdir(BUILD_FOLDER): shutil.rmtree(BUILD_FOLDER) os.mkdir(BUILD_FOLDER) for layer in os.listdir(LAYERS_DIR): layer_folder = os.path.join(LAYERS_DIR, layer) build_folder = os.path.join(BUILD_FOLDER, layer) shutil.copytree(layer_folder, build_folder) requirements_path = os.path.join(build_folder, "requirements.txt") if os.path.isfile(requirements_path): packages_folder = os.path.join(build_folder, "python", "lib", "python3.8", "site-packages") # print(f"Installing layer requirements to target: {os.path.abspath(packages_folder)}") subprocess.check_output(["pip", "install", "-r", requirements_path, "-t", packages_folder]) clean_pycache() self.layers[layer] = LayerVersion( self, layer, layer_version_name=f"movies-{layer}", code=Code.from_asset(path=build_folder), compatible_runtimes=[Runtime.PYTHON_3_8], )
def init_lambda(self): tmp_dir = install_lambda_code_requirements() lambda_code = Code.from_asset(str(tmp_dir), exclude=[ ".env", "__main*", "*.dist-info", "bin", "requirements.txt", ]) lambda_function = Function(self, "lambda", code=lambda_code, handler="main.handler", runtime=Runtime.PYTHON_3_8) lambda_function.role.assume_role_policy.add_statements( PolicyStatement( actions=["sts:AssumeRole"], principals=[ServicePrincipal("edgelambda.amazonaws.com")])) version = Version(self, "version", lambda_=lambda_function) apply_removal_policy(lambda_function, version, lambda_function.role) return version
def __init__(self, scope: cdk.Construct, construct_id: str, lambda_context: str, **kwargs) -> None: super().__init__(scope, construct_id) fn = dict(self.node.try_get_context(lambda_context)) lambda_fn = Function( self, fn["fn_name"], function_name=fn["fn_name"], runtime=Runtime.PYTHON_3_8, handler=fn["fn_handler"], code=Code.from_asset(fn["fn_path"]), tracing=Tracing.ACTIVE, current_version_options={ "removal_policy": cdk.RemovalPolicy.RETAIN }, retry_attempts=fn["fn_retry_attempts"], timeout=Duration.seconds(fn["fn_timeout"]), reserved_concurrent_executions=fn["fn_reserved_concurrency"]) lambda_fn_alias = lambda_fn.current_version.add_alias(fn["fn_alias"]) # # Outputs cdk.CfnOutput(self, fn["fn_name"] + 'Arn', value=lambda_fn.function_arn) self._function = lambda_fn self._function_alias = lambda_fn_alias
def __init__(self, scope: Stack): super().__init__( scope=scope, id=f'{TestingStack.global_prefix()}FunctionWithUnitTests', code=Code.from_asset(root), handler='handler.handler', runtime=Runtime.PYTHON_3_8, timeout=Duration.minutes(5), memory_size=512, layers=[ Layer( scope=scope, name= f'{TestingStack.global_prefix()}TestingLayerWithUnitTests', dependencies={ # These dependencies are required for running unit tests inside lambda functions. # Pytest is used for running actual unit tests. 'pytest': PackageVersion.from_string_version('6.2.5'), # Pook is used for HTTP mocking, therefore it is also needed here. 'pook': PackageVersion.from_string_version('1.0.1'), # Not sure about this dependency. Lambda runtime throws errors if its missing. 'aws-cdk.core': PackageVersion.from_string_version('1.99.0'), # This dependency should be installed with 'pook' since it depends on 'jsonschema' which depends on this. # For some reason it doesn't. # Tests would fail with import error otherwise. 'importlib-resources': PackageVersion.from_string_version('5.4.0') }) ])
def __init__(self, scope: Construct, id: str, *, deployment: Deployment, **kwargs) -> None: super().__init__(scope, id, **kwargs) Tags.of(self).add("Application", self.application_name) Tags.of(self).add("Deployment", deployment.value) func = lambda_edge.create_function( self, f"Preview{deployment.value}IndexRedirect", runtime=Runtime.NODEJS_10_X, handler="index.handler", code=Code.from_asset("./lambdas/preview-redirect"), ) s3_cloud_front = S3CloudFront( self, "S3CloudFront", subdomain_name=self.subdomain_name, error_folder="/errors", lambda_function_associations=[ LambdaFunctionAssociation( event_type=LambdaEdgeEventType.ORIGIN_REQUEST, lambda_function=func, ), ], ) S3CloudFrontPolicy( self, "S3cloudFrontPolicy", s3_cloud_front=s3_cloud_front, )
def __init__(self, scope: Stack): super().__init__( scope=scope, id=f'{TestingStack.global_prefix()}TestingFunction1', code=Code.from_inline( 'from b_lambda_layer_common import api_gateway\n' 'from b_lambda_layer_common import exceptions\n' 'from b_lambda_layer_common import ssm\n' 'from b_lambda_layer_common import util\n' 'import boto3\n' 'import botocore\n' '\n\n' 'def handler(*args, **kwargs):\n' ' return dict(\n' ' Boto3Version=boto3.__version__,\n' ' BotocoreVersion=botocore.__version__,\n' ' )' '\n'), handler='index.handler', # Ensure Python 3.8 matches everywhere. runtime=Runtime.PYTHON_3_8, layers=[ Layer(scope=scope, name=f'{TestingStack.global_prefix()}TestingLayer1', dependencies={ 'boto3': PackageVersion.from_string_version('1.16.35'), 'botocore': PackageVersion.from_string_version('1.19.35'), }) ])
def __init__(self, app: App, id: str, txt: str, env: dict, policies: PolicyStatement, domain: str, hosted_zone_id: str) -> None: super().__init__(app, id) env['HOSTED_ZONE_ID'] = hosted_zone_id self.function = SingletonFunction(self, '{}Function'.format('{}'.format(id)), uuid=str(uuid4()), code=Code.inline(txt), runtime=Runtime( 'python3.7', supports_inline_code=True), handler='index.handler', environment=env) policy = Policy(self, '{}Policy'.format(id)) self.function.role.attach_inline_policy(policy) policy.add_statements(policies) rule_target = LambdaFunction(self.function) current_time = datetime.now() run_time = current_time + timedelta(minutes=3) run_schedule = Schedule.cron(year=str(run_time.year), month=str(run_time.month), day=str(run_time.day), hour=str(run_time.hour), minute=str(run_time.minute)) self.rule = Rule(self, '{}Rule'.format(id), enabled=True, schedule=run_schedule, targets=[rule_target])
def __init__(self, scope: Construct, construct_id: str, env: Environment) -> None: super().__init__(scope, construct_id, env=env) smol_table = SmolTable(self, "SmolTable", table_name=TABLE_NAME) smol_vpc = Vpc.from_lookup(self, "CoreVPC", vpc_name=VPC_NAME) smol_subnets = SubnetSelection( one_per_az=True, subnet_type=SubnetType.PRIVATE, ) smol_lambda = Function( self, "SmolAPI", code=Code.from_asset_image(directory=abspath("./")), environment={ "CAPTCHA_KEY": environ["CAPTCHA_KEY"], "SAFE_BROWSING_KEY": environ["SAFE_BROWSING_KEY"], }, function_name=FUNCTION_NAME, handler=Handler.FROM_IMAGE, log_retention=RetentionDays.ONE_WEEK, memory_size=MEMORY_ALLOCATION, reserved_concurrent_executions=RESERVED_CONCURRENCY, runtime=Runtime.FROM_IMAGE, timeout=Duration.seconds(TIMEOUT_SEC), tracing=Tracing.ACTIVE, vpc=smol_vpc, vpc_subnets=smol_subnets, ) smol_table.table.grant(smol_lambda, "dynamodb:DescribeTable") smol_table.table.grant(smol_lambda, "dynamodb:GetItem") smol_table.table.grant(smol_lambda, "dynamodb:PutItem") SmolTarget(self, "SmolTarget", smol_lambda, API_HOST)
def __init__(self, scope: Stack): super().__init__( scope=scope, id=f'{TestingStack.global_prefix()}TestingFunction3', code=Code.from_inline( 'import urllib3\n' 'from jose import jwk, jwt\n' 'from jose.utils import base64url_decode\n' 'from b_lambda_layer_common import api_gateway\n' 'from b_lambda_layer_common import exceptions\n' 'from b_lambda_layer_common import util\n' 'from b_lambda_layer_common.ssm.ssm_parameter import SSMParameter\n' 'import boto3\n' 'import botocore\n' '\n\n' 'def handler(*args, **kwargs):\n' ' SSMParameter("TestSSMParam")\n\n' ' return dict(\n' ' Boto3Version=boto3.__version__,\n' ' BotocoreVersion=botocore.__version__,\n' ' )' '\n'), handler='index.handler', # Ensure Python 3.8 matches everywhere. runtime=Runtime.PYTHON_3_8, layers=[ Layer(scope=scope, name=f'{TestingStack.global_prefix()}TestingLayer3', dependencies={ 'python-jose': PackageVersion.from_string_version('3.3.0') }) ])
def api_lambda_function(scope, name, handler, apigw, path, method, layer, tables, code="./backend"): _lambda = Function( scope, name, handler=handler, runtime=Runtime.PYTHON_3_8, code=Code.asset(code), tracing=Tracing.ACTIVE, layers=layer, ) _lambda.add_environment("POLL_TABLE", tables[0].table_name) _lambda.add_environment("MAIN_PAGE_GSI", "main_page_gsi") apigw.add_routes( path=path, methods=[method], integration=LambdaProxyIntegration(handler=_lambda), ) return _lambda
def __init__( self, app: App, id: str, code_txt: str, runtime: str, handler: str, env: dict, policy: str) -> None: super().__init__(app, id) function_role = Role( self, 'NonLazyRole', assumed_by=ServicePrincipal('lambda.amazonaws.com')) self.function = Function( self, 'Function'.format('{}'.format(id)), code=Code.inline(code_txt), runtime=Runtime('python3.7', supports_inline_code=True), handler='index.create', environment=env, initial_policy=[policy], tracing=Tracing.ACTIVE, role=function_role )
def __init__(self, scope: Construct, id: str, *, deployment: Deployment, **kwargs) -> None: super().__init__(scope, id, **kwargs) Tags.of(self).add("Application", self.application_name) Tags.of(self).add("Deployment", deployment.value) bucket_site = Bucket( self, "Site", block_public_access=BlockPublicAccess.BLOCK_ALL, ) bucket_access_logs = Bucket( self, "AccessLogs", encryption=BucketEncryption.S3_MANAGED, block_public_access=BlockPublicAccess.BLOCK_ALL, ) for subdomain_name in self.subdomain_names: func_version = lambda_edge.create_function( self, f"Redirect-{subdomain_name}-{deployment.value}", runtime=Runtime.NODEJS_10_X, handler="index.handler", code=Code.from_asset(f"./lambdas/redirect-{subdomain_name}"), ) if subdomain_name == "grfsearch": S3CloudFrontV2( self, f"S3CloudFront-{subdomain_name}", subdomain_name=subdomain_name, bucket_site=bucket_site, bucket_access_logs=bucket_access_logs, edge_lambdas=[ EdgeLambda( event_type=LambdaEdgeEventType.ORIGIN_REQUEST, function_version=func_version, ), ], forward_query_string=True, forward_query_string_cache_keys=["do", "q"], ) else: S3CloudFront( self, f"S3CloudFront-{subdomain_name}", subdomain_name=subdomain_name, bucket_site=bucket_site, bucket_access_logs=bucket_access_logs, lambda_function_associations=[ LambdaFunctionAssociation( event_type=LambdaEdgeEventType.ORIGIN_REQUEST, lambda_function=func_version, ), ], )
def __init__(self, scope: Construct): super().__init__(scope=scope) function = Function( scope=self, id=f'{self.global_prefix()}TestingFunction', code=Code.from_inline( 'from b_lambda_layer_common import api_gateway\n' 'from b_lambda_layer_common import exceptions\n' 'from b_lambda_layer_common import ssm\n' 'from b_lambda_layer_common import util\n' 'import boto3\n' 'import botocore\n' '\n\n' 'def handler(*args, **kwargs):\n' ' return dict(\n' ' Boto3Version=boto3.__version__,\n' ' BotocoreVersion=botocore.__version__,\n' ' )' '\n'), handler='index.handler', runtime=Runtime.PYTHON_3_6, layers=[ Layer(scope=self, name=f'{self.global_prefix()}TestingLayer', dependencies={ 'boto3': PackageVersion.from_string_version('1.16.35'), 'botocore': PackageVersion.from_string_version('1.19.35'), }) ]) self.add_output(self.LAMBDA_FUNCTION_NAME_KEY, value=function.function_name) # Create another function that is not using boto3. Function( scope=self, id=f'{self.global_prefix()}TestingFunction2', code=Code.from_inline('def handler(*args, **kwargs): return 200'), handler='index.handler', runtime=Runtime.PYTHON_3_6, layers=[ Layer(scope=self, name=f'{self.global_prefix()}TestingLayer2') ])
def __code(self) -> Code: """ Gets (and caches) source code cor the lambda function. :return: Lambda function source code (as an asset). """ from .source import root return Code.from_asset(root)
def __init__(self, scope: Stack): super().__init__( scope=scope, id=f'{TestingStack.global_prefix()}TestingFunction2', code=Code.from_inline('def handler(*args, **kwargs): return 200'), handler='index.handler', runtime=Runtime.PYTHON_3_6, layers=[Layer(scope=scope, name=f'{TestingStack.global_prefix()}TestingLayer2')] )
def layer_for(self, name: str, base: str, runtimes: List[Runtime]): bundling = self._get_bundling( base, source_path="python/lib/python3.8/site-packages") code = Code.from_asset(str(self.source_path), bundling=bundling) layer = LayerVersion(self, name, code=code, compatible_runtimes=runtimes) return layer
def _create_lambdas(self): clean_pycache() for root, dirs, files in os.walk(LAMBDAS_DIR): for f in files: if f != "__init__.py": continue parent_folder = os.path.basename(os.path.dirname(root)) lambda_folder = os.path.basename(root) name = f"{parent_folder}-{lambda_folder}" lambda_config = self.lambdas_config[name] layers = [] for layer_name in lambda_config["layers"]: layers.append(self.layers[layer_name]) lambda_role = Role( self, f"{name}_role", assumed_by=ServicePrincipal(service="lambda.amazonaws.com") ) for policy in lambda_config["policies"]: lambda_role.add_to_policy(policy) lambda_role.add_managed_policy( ManagedPolicy.from_aws_managed_policy_name("service-role/AWSLambdaBasicExecutionRole")) lambda_args = { "code": Code.from_asset(root), "handler": "__init__.handle", "runtime": Runtime.PYTHON_3_8, "layers": layers, "function_name": name, "environment": lambda_config["variables"], "role": lambda_role, "timeout": Duration.seconds(lambda_config["timeout"]), "memory_size": lambda_config["memory"], } if "concurrent_executions" in lambda_config: lambda_args["reserved_concurrent_executions"] = lambda_config["concurrent_executions"] self.lambdas[name] = Function(self, name, **lambda_args) self.lambdas["sqs_handlers-post_anime"].add_event_source(SqsEventSource(self.post_anime_queue)) Rule( self, "titles_updater", schedule=Schedule.cron(hour="2", minute="10"), targets=[LambdaFunction(self.lambdas["crons-titles_updater"])] ) Rule( self, "episodes_updater", schedule=Schedule.cron(hour="4", minute="10"), targets=[LambdaFunction(self.lambdas["crons-episodes_updater"])] )
def __init__(self, scope: Stack, name: str): self.__scope = scope super().__init__( scope=scope, id=name, code=Code.from_asset(root), compatible_runtimes=self.runtimes(), layer_version_name=name, )
def mock_layer_init( self, scope: constructs.Construct, id: builtins.str, *, code: Code, **kwargs ) -> None: # overriding the layers will prevent building with docker (a long running operation) # override the runtime list for now, as well, to match above with TemporaryDirectory() as tmpdirname: kwargs["code"] = Code.from_asset(path=tmpdirname) kwargs["compatible_runtimes"] = [Runtime.PYTHON_3_7] props = LayerVersionProps(**kwargs) jsii.create(LayerVersion, self, [scope, id, props])
def _create_lambda_fn(self, envs: EnvSettings, memory_size: int, queue: Queue): is_app_only = self.node.try_get_context("is_app_only") if is_app_only == "true": code = Code.from_asset( path="../backend/functions/worker/.serverless/main.zip") else: code = Code.from_cfn_parameters() function = Function( self, f"data-processing-worker-{memory_size}", function_name=f"{envs.project_name}-data-processing-{memory_size}", code=code, runtime=Runtime.PYTHON_3_8, handler="handler.main", environment={ "AWS_STORAGE_BUCKET_NAME": self.app_bucket.bucket_name, "IMAGE_SCRAPING_FETCH_TIMEOUT": "15", "AWS_IMAGE_STORAGE_BUCKET_NAME": self.resize_lambda_image_bucket.bucket_name, "AWS_IMAGE_STATIC_URL": self.resize_lambda_image_bucket.bucket_website_url, "BACKEND_URL": self.backend_url, "LAMBDA_AUTH_TOKEN": self.lambda_auth_token.secret_value.to_string(), }, memory_size=memory_size, timeout=Duration.seconds(300), tracing=Tracing.ACTIVE, ) function.add_event_source(SqsEventSource(queue, batch_size=1)) self.app_bucket.grant_read_write(function.role) self.resize_lambda_image_bucket.grant_read_write(function.role) return function, code
def __init__(self, scope: Stack): super().__init__(scope=scope, id=f'TestingStack', stack_name=f'TestingStack') Function(scope=self, id='TestingFunction', code=Code.from_inline('def handler(): return "Hello World!"'), handler='index.handler', runtime=Runtime.PYTHON_3_6, layers=[TwilioLayer(self, 'TestingTwilioLayer')])
def deploy_aws_ecs_public_dns(self): code_path = join(dirname(dirname(__file__)), 'build', 'aws-ecs-public-dns.zip') func = Function(self._stack, 'public_dns', runtime=Runtime.NODEJS_12_X, handler='src/update-task-dns.handler', memory_size=128, code=Code.from_asset(path=code_path)) self._tag_it(func) func.add_to_role_policy( statement=self.get_public_dns_policy_statement()) self.create_event_rule(func)
def __init__(self, scope: Stack): super().__init__( scope=scope, id=f'{TestingStack.global_prefix()}TestingFunction4', code=Code.from_inline( 'from b_lambda_layer_common.util.skip_invocation import skip_invocation\n' '\n\n' '@skip_invocation(determinator="heartbeat")\n' 'def handler(event, context):\n' ' return event' ), handler='index.handler', runtime=Runtime.PYTHON_3_6, layers=[Layer(scope=scope, name=f'{TestingStack.global_prefix()}TestingLayer4')] )
def create_dependencies_layer(self) -> LayerVersion: """ Creates a lambda layer containing the external packages (pyotp, requests) which are required for the secret rotation """ requirements_file = 'lambda_layers/external_dependencies/requirements.txt' output_dir = 'lambda_layers/external_dependencies' subprocess.check_call( f'pip3 install --upgrade -r {requirements_file} -t {output_dir}/python' .split()) layer_id = 'external-dependencies' layer_code = Code.from_asset(output_dir) return LayerVersion(self, layer_id, code=layer_code)
def __init__( self, scope: Construct, id: str, *, deployment: Deployment, additional_fqdns: Optional[List[str]] = None, **kwargs, ) -> None: super().__init__(scope, id, **kwargs) Tags.of(self).add("Application", self.application_name) Tags.of(self).add("Deployment", deployment.value) func = lambda_edge.create_function( self, f"BananasCdnRedirect{deployment.value}", runtime=Runtime.NODEJS_10_X, handler="index.handler", code=Code.from_asset("./lambdas/bananas-cdn"), ) s3_cloud_front = S3CloudFront( self, "S3CloudFront", subdomain_name=self.subdomain_name, error_folder="/errors", lambda_function_associations=[ LambdaFunctionAssociation( event_type=LambdaEdgeEventType.ORIGIN_REQUEST, lambda_function=func, ), ], price_class=PriceClass.PRICE_CLASS_ALL, additional_fqdns=additional_fqdns, viewer_protocol_policy=ViewerProtocolPolicy. ALLOW_ALL, # OpenTTD client doesn't support HTTPS ) self.bucket = s3_cloud_front.bucket_site S3CloudFrontPolicy( self, "S3cloudFrontPolicy", s3_cloud_front=s3_cloud_front, with_s3_get_object_access=True, )
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) bucket = Bucket( scope=self, id='WorkshopBucketWithCDK', bucket_name='cloudvisor-workshop-bucket-with-cdk' ) function = Function( scope=self, id='WorkshopFunctionWithCDK', function_name='WorkshopFunctionWithCDK', runtime=Runtime.PYTHON_3_6, handler='index.handler', code=Code.from_inline('def handler(*args, **kwargs): print(args); return 200') ) bucket.add_object_created_notification(LambdaDestination(function))
def mock_lambda_init( self, scope: constructs.Construct, id: builtins.str, *, code: Code, handler: builtins.str, runtime: Runtime, **kwargs ) -> None: # overriding the code will prevent building with docker (a long running operation) # override the runtime for now, as well, to support inline code props = FunctionProps( code=Code.from_inline("return"), handler=handler, runtime=Runtime.PYTHON_3_7, **kwargs ) jsii.create(Function, self, [scope, id, props])
def _create_lambdas(self): for root, dirs, files in os.walk(LAMBDAS_DIR): for f in files: if f != "__init__.py": continue parent_folder = os.path.basename(os.path.dirname(root)) lambda_folder = os.path.basename(root) name = f"{parent_folder}-{lambda_folder}" lambda_config = self.lambdas_config[name] layers = [] for layer_name in lambda_config["layers"]: layers.append(self.layers[layer_name]) lambda_role = Role(self, f"{name}_role", assumed_by=ServicePrincipal( service="lambda.amazonaws.com")) for policy in lambda_config["policies"]: lambda_role.add_to_policy(policy) lambda_role.add_managed_policy( ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaBasicExecutionRole")) self.lambdas[name] = Function( self, name, code=Code.from_asset(root), handler="__init__.handle", runtime=Runtime.PYTHON_3_8, layers=layers, function_name=name, environment=lambda_config["variables"], role=lambda_role, timeout=Duration.seconds(lambda_config["timeout"]), memory_size=lambda_config["memory"], ) Rule(self, "update_eps", schedule=Schedule.cron(hour="2", minute="10"), targets=[LambdaFunction(self.lambdas["cron-update_eps"])])