def create_deployment_group(self, group_name, handler): version = handler.add_version(datetime.now().isoformat()) alias = lambda_.Alias(self, f'{group_name}LambdaAlias', alias_name="Dev", version=version) codedeploy.LambdaDeploymentGroup( self, f'Deploy{group_name}Lambda', alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig.ALL_AT_ONCE, )
def __init__(self, scope: core.Construct, id: str, demo_table: dynamodb.Table, **kwargs) -> None: super().__init__(scope, id, **kwargs) bundling_options = core.BundlingOptions( image=_lambda.Runtime.NODEJS_12_X.bundling_docker_image, user="******", command=[ 'bash', '-c', 'cp /asset-input/* /asset-output/ && cd /asset-output && npm test' ] ) source_code = _lambda.Code.from_asset( './lambda', bundling=bundling_options ) # create lambda function web_lambda = _lambda.Function( self, "dynamo-lambda-function", runtime=_lambda.Runtime.NODEJS_12_X, handler="dynamoFunction.handler", code=source_code, environment=dict( TABLE_NAME=demo_table.table_name ) ) # grant permission to lambda to write to demo table demo_table.grant_full_access( web_lambda ) codedeploy.LambdaDeploymentGroup( self, "web-lambda-deployment", alias=web_lambda.current_version.add_alias("live"), deployment_config=codedeploy.LambdaDeploymentConfig.ALL_AT_ONCE ) gw = _apigw.LambdaRestApi( self, "Gateway", handler=web_lambda, description="Endpoint for a simple Lambda-powered web service" ) # add an output with a well-known name to read it from the integ tests self.gw_url = gw.url
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) role_arn = 'arn:aws:iam::315207712355:role/lbrole' role = iam.Role.from_role_arn(self, id='role_id', role_arn=role_arn) # The code that defines your stack goes here this_dir = path.dirname(__file__) handler = lmb.Function(self, 'Handler', runtime=lmb.Runtime.PYTHON_3_7, role=role, handler='handler.handler', code=lmb.Code.from_asset( path.join(this_dir, 'lambda'))) alias = lmb.Alias(self, 'HandlerAlias', alias_name='Current', version=handler.current_version) gw = apigw.LambdaRestApi( self, 'Gateway', description='Endpoint for a simple Lambda-powered web service', handler=alias) failure_alarm = cloudwatch.Alarm(self, 'FailureAlarm', metric=cloudwatch.Metric( metric_name='5XXError', namespace='AWS/ApiGateway', dimensions={ 'ApiName': 'Gateway', }, statistic='Sum', period=core.Duration.minutes(1)), threshold=1, evaluation_periods=1) codedeploy.LambdaDeploymentGroup( self, 'DeploymentGroup', alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig. CANARY_10_PERCENT_10_MINUTES, alarms=[failure_alarm]) self.url_output = core.CfnOutput(self, 'Url', value=gw.url)
def __init__(self, app: core.App, id: str, **kwargs): super().__init__(app, id, **kwargs) # [ Lambda: Code ] # # This represents the code to be supplied by the pipeline. self.lambda_code = lambda_.Code.from_cfn_parameters() # [ Lambda: Function ] # # Creates the Lambda. func = lambda_.Function( self, "Lambda", code=self.lambda_code, handler="index.handler", runtime=lambda_.Runtime.PYTHON_3_7, ) # [ Lambda: Version ] # # Adds versions to the Lambda with date of code execution. version = func.add_version(datetime.now().isoformat()) # [ Lambda: Alias ] # # Adds aliasing to the Lambda to allow for blue-green deployment. alias = lambda_.Alias(self, "LambdaAlias", alias_name="Prod", version=version) # [ CodeDeploy: Deployment Group ] # # Creates the group for Lambda(s) for the Aliases. codedeploy.LambdaDeploymentGroup( self, "DeploymentGroup", alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig. LINEAR_10_PERCENT_EVERY_1_MINUTE)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) this_dir = path.dirname(__file__) handler = lmb.Function(self, 'Handler', runtime=lmb.Runtime.PYTHON_3_7, handler='handler.handler', code=lmb.Code.from_asset(path.join(this_dir, 'lambda')) ) # For canary deployments alias = lmb.Alias(self, 'HandlerAlias', alias_name='Current', version=handler.current_version ) gw = apigw.LambdaRestApi(self, 'Gateway', description='Endpoint for app', handler=alias ) failure_alarm = cloudwatch.Alarm(self, 'FailureAlarm', metric=cloudwatch.Metric( metric_name='5XXError', namespace='AWS/ApiGateway', dimensions={ 'ApiName': 'Gateway' }, statistic='Sum', period=core.Duration.minutes(1) ), threshold=1, evaluation_periods=1 ) codedeploy.LambdaDeploymentGroup(self, 'Deploy', alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig.CANARY_10_PERCENT_10_MINUTES, alarms=[failure_alarm] ) self.url_output = core.CfnOutput(self, 'Url', value=gw.url )
def __init__(self, app: core.App, id: str, config, **kwargs): super().__init__(app, id, **kwargs) self.lambda_code = lambda_.Code.from_cfn_parameters() func = lambda_.Function(self, "Lambda", function_name=config["Default"]["project"] + "-lambda-" + randomness, code=self.lambda_code, handler="index.handler", runtime=lambda_.Runtime.NODEJS_12_X, tracing=lambda_.Tracing.ACTIVE) api = apigateway.RestApi(self, "lambda-service", rest_api_name=config["Default"]["project"] + "-api-" + randomness, description="This service serves the lambda.", deploy_options={ "logging_level": apigateway.MethodLoggingLevel.INFO, "tracing_enabled": True }) get_lambda_integration = apigateway.LambdaIntegration( func, request_templates={"text/html": '{ "statusCode": "200" }'}) api.root.add_method("GET", get_lambda_integration) # GET / version = func.latest_version alias = lambda_.Alias(self, "LambdaAlias", alias_name="Prod", version=version) codedeploy.LambdaDeploymentGroup( self, "DeploymentGroup", alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig. LINEAR_10_PERCENT_EVERY_1_MINUTE)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # myDateTimeFunction lambda function my_datetime_lambda = _lambda.Function( self, "my-datetime", runtime=_lambda.Runtime.NODEJS_12_X, handler="myDateTimeFunction.handler", code=_lambda.Code.asset("./lambda"), current_version_options=_lambda.VersionOptions( removal_policy=core.RemovalPolicy.RETAIN, retry_attempts=1 ) ) my_datetime_lambda.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=["lambda:InvokeFunction"], resources=["*"] ) ) codedeploy.LambdaDeploymentGroup( self, "datetime-lambda-deployment", alias=my_datetime_lambda.current_version.add_alias( "live" ), deployment_config=codedeploy.LambdaDeploymentConfig.ALL_AT_ONCE ) gw = _apigw.LambdaRestApi( self, "Gateway", handler=my_datetime_lambda, description="Endpoint for a simple Lambda-powered web service" ) # add an output with a well-known name to read it from the integ tests self.gw_url = gw.url
def __init__(self, stack: core.Construct, lambda_app: codedeploy.LambdaApplication, name: str): """Creates the underlying Lambda, Lambda Alias, and Deployment Group necessary to run this Lambda in the SpamDetectionPipeline stack. This code assumes the Lambda's handler can be invoked by the snake case verison of the name with `.handler` appended. :param stack: The stack. :param lambda_app: The Lambda Application that will control the deployments. :param name: The camel case name for this Lambda. """ self.__name = name # Create the underlying Lambda function on the stack. self.__lambda = _lambda.Function( stack, name, runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler=_convert_camel_case_to_snake_case(name) + '.handler', ) # Create the production alias to use when we want to refer to this Lambda. version = self.__lambda.add_version(_get_pipeline_lambda_version()) self.__lambda_alias = _lambda.Alias(stack, name + 'Prod', version=version, alias_name='prod') # Create the deployment group that will be used to updated the Lambda # based on the alias. codedeploy.LambdaDeploymentGroup( stack, name + 'DN', alias=self.__lambda_alias, application=lambda_app, deployment_config=codedeploy.LambdaDeploymentConfig.ALL_AT_ONCE, )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here lambda_policies = [ iam.PolicyStatement(actions=[ "logs:CreateLogStream", "logs:PutLogEvents", "logs:CreateLogGroup" ], effect=iam.Effect.ALLOW, resources=[ "arn:aws:logs:" + core.Aws.REGION + ":" + core.Aws.ACCOUNT_ID + ":*" ]), iam.PolicyStatement(actions=["dynamodb:*"], effect=iam.Effect.ALLOW, resources=[ "arn:aws:dynamodb:" + core.Aws.REGION + ":" + core.Aws.ACCOUNT_ID + ":*" ]) ] table = _dynamodb.Table( self, 'VisitTable', partition_key={ 'name': 'id', 'type': _dynamodb.AttributeType.STRING }, removal_policy=core.RemovalPolicy.DESTROY, read_capacity=5, write_capacity=5, ) # Modify the config.js with CF custome resource modify_policy = [ iam.PolicyStatement(actions=["dynamodb:*"], effect=iam.Effect.ALLOW, resources=[ "arn:aws:dynamodb:" + core.Aws.REGION + ":" + core.Aws.ACCOUNT_ID + ":*" ]) ] resource = _cfn.CustomResource( self, "VisitDataImportCustomResource", provider=_cfn.CustomResourceProvider.lambda_( _lambda.SingletonFunction( self, "CustomResourceSingleton", uuid="f7d4f730-4ee1-11e8-9c2d-fa7ae01bbebc", code=self.custom_resource, handler="index.handler", timeout=core.Duration.seconds(300), runtime=_lambda.Runtime.PYTHON_3_7, initial_policy=modify_policy)), properties={"DynamoDBTable": table.table_name}) base_lambda = _lambda.Function( self, 'ApiPetclinicVisitLambda', handler= 'org.springframework.samples.petclinic.visits.StreamLambdaHandler::handleRequest', runtime=_lambda.Runtime.JAVA_8, code=self.lambda_code, memory_size=1024, timeout=core.Duration.seconds(300), initial_policy=lambda_policies, environment={ "DYNAMODB_TABLE_NAME": table.table_name, "SERVER_SERVLET_CONTEXT_PATH": "/api/visit" }) version = base_lambda.add_version(str(round(time.time()))) alias = _lambda.Alias(self, 'ApiPetclinicVisitLambdaAlias', alias_name='Prod', version=version, provisioned_concurrent_executions=5) _deploy.LambdaDeploymentGroup( self, 'ApiPetclinicVisitDeploymentGroup', alias=alias, deployment_config=_deploy.LambdaDeploymentConfig. LINEAR_10_PERCENT_EVERY_1_MINUTE)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) env = kwargs['env'] work_dir = pathlib.Path(__file__).parents[1] # These below steps allows to reuse ecs cluster which is aleady creatd by shared stack # Get cluster name from ssm parameter cluster_name = ssm.StringParameter.from_string_parameter_name( self, "GetClusterName", string_parameter_name="/dev/compute/container/ecs-cluster-name" ).string_value vpc_az = ssm.StringListParameter.from_string_list_parameter_name( self, "GetVpcAz", string_list_parameter_name="/dev/network/vpc/vpc-az" ).string_list_value # using string instead of stringlist because of subnets parsing issue vpc_public_subnets_1 = ssm.StringParameter.from_string_parameter_name( self, "GetVpcPublicSubnets1", string_parameter_name="/dev/network/vpc/vpc-public-subnets-1" ).string_value vpc_public_subnets_2 = ssm.StringParameter.from_string_parameter_name( self, "GetVpcPublicSubnets2", string_parameter_name="/dev/network/vpc/vpc-public-subnets-2" ).string_value vpc_id = ssm.StringParameter.from_string_parameter_name( self, "GetVpcId", string_parameter_name="/dev/network/vpc/vpc-id").string_value ec2_vpc = ec2.Vpc.from_vpc_attributes( self, "GetVpc", availability_zones=vpc_az, vpc_id=vpc_id, public_subnet_ids=[vpc_public_subnets_1, vpc_public_subnets_2]) # Get security group id from ssm parameter security_group_id = ssm.StringParameter.from_string_parameter_name( self, "GetSgId", string_parameter_name="/dev/network/vpc/security-group-id" ).string_value # Get security group from lookup ec2_sgp = ec2.SecurityGroup.from_security_group_id( self, "GetSgp", security_group_id=security_group_id) # myDateTimeFunction lambda function my_datetime_lambda = _lambda.Function( self, "my-datetime", runtime=_lambda.Runtime.NODEJS_12_X, handler="myDateTimeFunction.handler", code=_lambda.Code.asset("./lambda"), current_version_options=_lambda.VersionOptions( removal_policy=core.RemovalPolicy.RETAIN, retry_attempts=1)) my_datetime_lambda.add_to_role_policy( iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=["lambda:InvokeFunction"], resources=["*"])) # beforeAllowTraffic lambda function pre_traffic_lambda = _lambda.Function( self, "pre-traffic", runtime=_lambda.Runtime.NODEJS_12_X, handler="beforeAllowTraffic.handler", code=_lambda.Code.asset("./lambda"), environment=dict( NewVersion=my_datetime_lambda.current_version.function_arn)) pre_traffic_lambda.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=["codedeploy:PutLifecycleEventHookExecutionStatus"], resources=["*"])) pre_traffic_lambda.add_to_role_policy( iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=["lambda:InvokeFunction"], resources=["*"])) # afterAllowTraffic lambda function post_traffic_lambda = _lambda.Function( self, "post-traffic", runtime=_lambda.Runtime.NODEJS_12_X, handler="afterAllowTraffic.handler", code=_lambda.Code.asset("./lambda"), environment=dict( NewVersion=my_datetime_lambda.current_version.function_arn)) post_traffic_lambda.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=["codedeploy:PutLifecycleEventHookExecutionStatus"], resources=["*"])) post_traffic_lambda.add_to_role_policy( iam.PolicyStatement(effect=iam.Effect.ALLOW, actions=["lambda:InvokeFunction"], resources=["*"])) # create a cloudwatch event rule rule = events.Rule( self, "CanaryRule", schedule=events.Schedule.expression("rate(10 minutes)"), targets=[ events_targets.LambdaFunction( my_datetime_lambda.current_version) ], ) # create a cloudwatch alarm based on the lambda erros metrics alarm = cloudwatch.Alarm( self, "CanaryAlarm", metric=my_datetime_lambda.current_version.metric_invocations(), threshold=0, evaluation_periods=2, datapoints_to_alarm=2, treat_missing_data=cloudwatch.TreatMissingData.IGNORE, period=core.Duration.minutes(5), alarm_name="CanaryAlarm") lambda_deployment_group = codedeploy.LambdaDeploymentGroup( self, "datetime-lambda-deployment", alias=my_datetime_lambda.current_version.add_alias("live"), deployment_config=codedeploy.LambdaDeploymentConfig.ALL_AT_ONCE, alarms=[alarm], auto_rollback=codedeploy.AutoRollbackConfig( deployment_in_alarm=True), pre_hook=pre_traffic_lambda, post_hook=post_traffic_lambda) # Pass vpc, sgp and ecs cluster name to get ecs cluster info ecs_cluster = ecs.Cluster.from_cluster_attributes( self, "GetEcsCluster", cluster_name=cluster_name, vpc=ec2_vpc, security_groups=[ec2_sgp]) # Fargate Service task_definition = ecs.FargateTaskDefinition( self, "TaskDef", memory_limit_mib=512, cpu=256, ) container = task_definition.add_container( "web", image=ecs.ContainerImage.from_asset( os.path.join(work_dir, "container")), # Built custom health check for your application specific # and add them here. Ex: Pingcheck, Database etc. health_check=ecs.HealthCheck(command=["CMD-SHELL", "echo"]), # environment=dict(name="latest") ) port_mapping = ecs.PortMapping(container_port=8000, protocol=ecs.Protocol.TCP) container.add_port_mappings(port_mapping) # Create Fargate Service # Current limitation: Blue/Green deployment # https://github.com/aws/aws-cdk/issues/1559 service = ecs.FargateService( self, "Service", cluster=ecs_cluster, task_definition=task_definition, assign_public_ip=True, deployment_controller=ecs.DeploymentController( type=ecs.DeploymentControllerType.ECS), desired_count=2, min_healthy_percent=50) # Create Application LoadBalancer lb = elbv2.ApplicationLoadBalancer(self, "LB", vpc=ec2_vpc, internet_facing=True) # Add listener to the LB listener = lb.add_listener("Listener", port=80, open=True) # Default to Lambda listener.add_targets( "Lambda", targets=[elb_targets.LambdaTarget(my_datetime_lambda)]) # Additionally route to container listener.add_targets("Fargate", port=8000, path_pattern="/container", priority=10, targets=[service]) # add an output with a well-known name to read it from the integ tests self.load_balancer_dns_name = lb.load_balancer_dns_name
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) # The code that defines your stack goes here this_dir = path.dirname(__file__) handler = lmb.Function(self, 'Handler', runtime=lmb.Runtime.PYTHON_3_7, handler='handler.handler', code=lmb.Code.from_asset( path.join(this_dir, 'lambda'))) alias = lmb.Alias(self, "HandlerAlias", alias_name="Current", version=handler.current_version) gw = apigw.LambdaRestApi( self, 'Gateway', description='Endpoint for a singple Lambda-powered web service', handler=alias, endpoint_types=[EndpointType.REGIONAL]) failure_alarm = cloudwatch.Alarm( self, "FailureAlarm", alarm_name=self.stack_name + '-' + '500Alarm', metric=cloudwatch.Metric(metric_name="5XXError", namespace="AWS/ApiGateway", dimensions={ "ApiName": "Gateway", }, statistic="Sum", period=core.Duration.minutes(1)), threshold=1, evaluation_periods=1) alarm500topic = sns.Topic(self, "Alarm500Topic", topic_name=self.stack_name + '-' + 'Alarm500TopicSNS') alarm500topic.add_subscription( subscriptions.EmailSubscription("*****@*****.**")) failure_alarm.add_alarm_action(cw_actions.SnsAction(alarm500topic)) codedeploy.LambdaDeploymentGroup( self, "DeploymentGroup", alias=alias, deployment_config=codedeploy.LambdaDeploymentConfig. CANARY_10_PERCENT_10_MINUTES, alarms=[failure_alarm]) # Create a dynamodb table table_name = self.stack_name + '-' + 'HelloCdkTable' table = dynamodb.Table(self, "TestTable", table_name=table_name, partition_key=Attribute( name="id", type=dynamodb.AttributeType.STRING)) table_name_id = cr.PhysicalResourceId.of(table.table_name) on_create_action = AwsSdkCall( action='putItem', service='DynamoDB', physical_resource_id=table_name_id, parameters={ 'Item': { 'id': { 'S': 'HOLA_CREATE' }, 'date': { 'S': datetime.today().strftime('%Y-%m-%d') }, 'epoch': { 'N': str(int(time.time())) } }, 'TableName': table_name }) on_update_action = AwsSdkCall( action='putItem', service='DynamoDB', physical_resource_id=table_name_id, parameters={ 'Item': { 'id': { 'S': 'HOLA_UPDATE' }, 'date': { 'S': datetime.today().strftime('%Y-%m-%d') }, 'epoch': { 'N': str(int(time.time())) } }, 'TableName': table_name }) cr.AwsCustomResource( self, "TestTableCustomResource", on_create=on_create_action, on_update=on_update_action, policy=cr.AwsCustomResourcePolicy.from_sdk_calls( resources=cr.AwsCustomResourcePolicy.ANY_RESOURCE)) # OUTPUT self.url_output = core.CfnOutput(self, 'Url', value=gw.url)