def __init__(self, scope: core.Construct, id: str, sns_topic_arn: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # SNS Topic creation topic = sns.Topic(self, 'TheXRayTracerSnsTopic', display_name='The XRay Tracer CDK Pattern Topic') sns_lambda = _lambda.Function( self, "snsLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sns_publish.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE, environment={"TOPIC_ARN": topic.topic_arn}) topic.grant_publish(sns_lambda) apigw_topic = sns.Topic.from_topic_arn(self, 'SNSTopic', sns_topic_arn) apigw_topic.add_subscription( subscriptions.LambdaSubscription(sns_lambda)) sns_subscriber_lambda = _lambda.Function( self, "snsSubscriptionLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sns_subscribe.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE) topic.add_subscription( subscriptions.LambdaSubscription(sns_subscriber_lambda))
def __init__(self, scope: core.Construct, id: str, *, prefix: str, environment: str, configuration, **kwargs): """ :param scope: Stack class, used by CDK. :param id: ID of the construct, used by CDK. :param prefix: Prefix of the construct, used for naming purposes. :param environment: Environment of the construct, used for naming purposes. :param configuration: Configuration of the construct. In this case SNS_CONFIG_SCHEMA. :param kwargs: Other parameters that could be used by the construct. """ super().__init__(scope, id, **kwargs) self.prefix = prefix self.environment_ = environment self._configuration = configuration # Validating that the payload passed is correct validate_configuration(configuration_schema=SNS_CONFIG_SCHEMA, configuration_received=self._configuration) # Defining SNS Topic topic_data = deepcopy(self._configuration["topic"]) self._sns_topic = base_topic(self, **topic_data) # Validating Lambda Function Runtime functions_data = self._configuration["lambda_handlers"] self._lambda_functions = list() for lambda_function in functions_data: _lambda_function = base_lambda_function(self, **lambda_function) self._lambda_functions.append(_lambda_function) # Defining the Lambda subscription to the specified SNS Topic in cdk.json file. sns_subscription = sns_subs.LambdaSubscription(fn=_lambda_function) self._sns_topic.add_subscription(sns_subscription)
def __init__(self, scope: core.Construct, id: str, sns_topic_arn: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Queue Setup sqs_queue = sqs.Queue(self, 'RDSPublishQueue', visibility_timeout=core.Duration.seconds(300)) sqs_lambda = _lambda.Function(self, "sqsLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sqs.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE, environment={ "SQS_URL": sqs_queue.queue_url } ) sqs_queue.grant_send_messages(sqs_lambda) topic = sns.Topic.from_topic_arn(self, 'SNSTopic', sns_topic_arn) topic.add_subscription(subscriptions.LambdaSubscription(sqs_lambda)) sqs_subscribe_lambda = _lambda.Function(self, "sqsSubscribeLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="sqs_subscribe.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE ) sqs_queue.grant_consume_messages(sqs_subscribe_lambda) sqs_subscribe_lambda.add_event_source(lambda_event.SqsEventSource(sqs_queue))
def __init__(self, scope: core.Construct, id: str, props, **kwargs) -> None: super().__init__(scope, id, **kwargs) ################################################################################ # Create a Lambda function to process the CodeBuild state change events # and send out appropriate Slack messages # Permissions for the Lambda lambda_role = _iam.Role( self, id='UmccriseCodeBuildSlackLambdaRole', assumed_by=_iam.ServicePrincipal('lambda.amazonaws.com'), managed_policies=[ _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonSSMReadOnlyAccess'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonEC2ContainerRegistryReadOnly') ]) # The Lambda function itself function = _lambda.Function( self, id='UmccriseCodeBuildSlackLambda', handler='notify_slack.lambda_handler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambdas/slack'), environment={ 'SLACK_HOST': 'hooks.slack.com', 'SLACK_CHANNEL': props['slack_channel'], 'ECR_NAME': props['ecr_name'], 'AWS_ACCOUNT': props['aws_account'] # TODO: get from kwargs (env) }, role=lambda_role) ################################################################################ # Create a reference to the UMCCRise CodeBuild project # TODO: should probably use cross-stack resource references cb_project = cb.Project.from_project_name( self, id='UmccriseCodeBuildProject', project_name=props['codebuild_project_name']) ################################################################################ # Create an SNS topic to receive CodeBuild state change events sns_topic = _sns.Topic(self, id='UmccriseCodeBuildSnsTopic', display_name='UmccriseCodeBuildSnsTopic', topic_name='UmccriseCodeBuildSnsTopic') sns_topic.grant_publish(cb_project) sns_topic.add_subscription(_sns_subs.LambdaSubscription(function)) # Send state change events to SNS topic cb_project.on_state_change( id='UmccriseCodebuildStateChangeRule', rule_name='UmccriseCodebuildStateChangeRule', target=targets.SnsTopic(sns_topic))
def __init__(self, app: App, id: str) -> None: super().__init__(app, id) # Lambda Function lambdaFn = _lambda.Function(self, "SNSEventHandler", runtime=_lambda.Runtime.PYTHON_3_9, code=_lambda.Code.from_asset("lambda"), handler="handler.main", timeout=Duration.seconds(10)) # Set Lambda Logs Retention and Removal Policy logs.LogGroup(self, 'logs', log_group_name=f"/aws/lambda/{lambdaFn.function_name}", removal_policy=RemovalPolicy.DESTROY, retention=logs.RetentionDays.ONE_DAY) # SNS topic topic = sns.Topic(self, 'sns-to-lambda-topic-test', display_name='My SNS topic') # subscribe Lambda to SNS topic topic.add_subscription(subs.LambdaSubscription(lambdaFn)) # Output information about the created resources CfnOutput(self, 'snsTopicArn', value=topic.topic_arn, description='The arn of the SNS topic') CfnOutput(self, 'functionName', value=lambdaFn.function_name, description='The name of the handler function')
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) prj_name = self.node.try_get_context('project_name') env_name = self.node.try_get_context('env') lambda_function = lb.Function(self, "notifiaction_lambda", runtime=lb.Runtime.PYTHON_3_8, code=lb.Code.asset('lambda'), handler='hello.handler') cw_rule = events.Rule(self, 'cwrule', schedule=events.Schedule.cron(minute='0', hour='5', month='*', week_day='*', year='*')) cw_rule.add_target(targets.LambdaFunction(lambda_function)) # cw_rule.add_target(targets.LambdaFunction(lambda_function)) lambda_topic = sns.Topic(self, 'lambdatopic', topic_name='serverless-lambda-topic') lambda_topic.add_subscription(subs.LambdaSubscription(lambda_function))
def __init__(self, scope: core.Construct, id: str, slack_channel: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_role = _iam.Role( self, 'SlackLambdaRole', assumed_by=_iam.ServicePrincipal('lambda.amazonaws.com'), managed_policies=[ _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonSSMReadOnlyAccess'), _iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole') ]) function = _lambda.Function(self, 'IapSlackLambda', handler='notify_slack.lambda_handler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambdas/iap'), environment={ "SLACK_HOST": "hooks.slack.com", "SLACK_CHANNEL": slack_channel }, role=lambda_role) sns_topic = _sns.Topic(self, 'IapSnsTopic', display_name='IapSnsTopic', topic_name='IapSnsTopic') sns_topic.grant_publish( _iam.AccountPrincipal(self.illumina_iap_account)) sns_topic.add_subscription(_sns_subs.LambdaSubscription(function))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) def get_userdata(): with open('bootstrap.sh', 'r') as userdata: return userdata.read() kratos_role = aws_iam.Role.from_role_arn( self, 'KratosXL', role_arn="arn:aws:iam::88888888:role/KratosRole") lambda_role = aws_iam.Role.from_role_arn( self, 'LambdaXL', role_arn="arn:aws:iam::999999999:role/Lambda_Kratos") sns_topic = aws_sns.Topic(self, "Topic", display_name="cdk-sns-trigger") lambda_function = aws_lambda.Function( self, "FetchAtopLogs", runtime=aws_lambda.Runtime.PYTHON_3_6, role=lambda_role, handler="lambda_handler.lambda_handler", code=aws_lambda.Code.from_asset('myfunc')) lambda_function.add_event_source( aws_lambda_event_sources.SnsEventSource(sns_topic)) sns_subscription = aws_sns_subscriptions.LambdaSubscription( lambda_function) def generate_instances(count=1): amazon_linux_2 = aws_ec2.GenericLinuxImage( {"us-east-1": "ami-0fc61db8544a617ed"}) ec2_objects = [] for i in range(count): ec2_instnace = aws_ec2.Instance( self, f"CDK-Instance-{i + int(1)}", instance_type=aws_ec2.InstanceType('t2.micro'), role=kratos_role, machine_image=amazon_linux_2, security_group=aws_ec2.CfnSecurityGroup( self, id=f"SG{i + int(1)}", group_description=f"SG-CDK-{i}"), vpc=aws_ec2.Vpc.from_lookup(self, f'CDK-VPC-{i + int(1)}', vpc_id="vpc-eeeee3"), user_data=aws_ec2.UserData.custom(get_userdata()), key_name="covidQuarantine") ec2_objects.append(ec2_instnace) return ec2_objects generate_instances()
def add_sns_subscription(self, lambda_function: _lambda.Function, squid_alarm_topic: sns.Topic): lambda_function.add_environment(key="TOPIC_ARN", value=squid_alarm_topic.topic_arn) lambda_function.add_permission( "squid-lambda-permission", principal=iam.ServicePrincipal("sns.amazonaws.com"), action='lambda:InvokeFunction', source_arn=squid_alarm_topic.topic_arn) squid_alarm_topic.add_subscription( sns_subscriptions.LambdaSubscription(lambda_function))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Create lambda function lambda_pipeline_alerts = _lambda.Function( self, id="lambda_pipeline_alerts_asset", function_name='lambda_pipeline_alerts_asset', code=_lambda.Code.asset("lambda_pipeline_alerts_asset"), runtime=_lambda.Runtime.PYTHON_3_7, handler="index.lambda_handler") # add Env Vars lambda_pipeline_alerts.add_environment( 'SLACK_WEB_HOOK_URL', 'https://hooks.slack.com/services/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' ) lambda_pipeline_alerts.add_environment('SLACK_CHANNEL', '#tech-pay-deploys') # Create sns topic for the pipeline events sns_topic_pipeline_alerts = _sns.Topic(self, id='sns_pipeline_alerts', display_name='pipelines-events', topic_name='pipelines-events') # add lambda to sns subscription sns_topic_pipeline_alerts.add_subscription( _sns_subscription.LambdaSubscription(lambda_pipeline_alerts)) # Create the event rule event_rule = _events.Rule( self, id='pipeline_alerts', rule_name='pipeline_alerts', description='Cloud Watch Event Rule to check pipeline events') # Cloud watch event configuration event_source = ["aws.codepipeline"] event_detail_type = ["CodePipeline Pipeline Execution State Change"] event_detail = {"state": ["FAILED"]} # add event pattern to send to target event_rule.add_event_pattern(detail=event_detail, detail_type=event_detail_type, source=event_source) # add target pipeline_name = _events.EventField.from_path('$.detail.pipeline') event_rule.add_target( _targets.SnsTopic( sns_topic_pipeline_alerts, message=_events.RuleTargetInput.from_text( f':rotating_light:The Pipeline `{pipeline_name}` has failed.:rotating_light:' )))
def __init__(self, scope: core.Construct, id: str, sns_topic_arn: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) http_lambda = _lambda.Function(self, "httpLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="http.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE ) topic = sns.Topic.from_topic_arn(self, 'SNSTopic', sns_topic_arn) topic.add_subscription(subscriptions.LambdaSubscription(http_lambda))
def test_subscriber(self, distribution_topics): test_subscriber = _lambda.Function( self, "TestSubscriber", handler='lambda_function.lambda_handler', # https://github.com/aws/aws-cdk/issues/5491 # pylint: disable=no-value-for-parameter code=_lambda.Code.asset('src/test_subscriber'), runtime=_lambda.Runtime.PYTHON_3_7, log_retention=_logs.RetentionDays.ONE_MONTH) for topic in distribution_topics: topic.add_subscription( _sns_subscriptions.LambdaSubscription(test_subscriber))
def __init__(self, scope: core.Construct, id: str, *, prefix: str, environment: str, configuration, **kwargs): """ :param scope: Stack class, used by CDK. :param id: ID of the construct, used by CDK. :param prefix: Prefix of the construct, used for naming purposes. :param environment: Environment of the construct, used for naming purposes. :param configuration: Configuration of the construct. In this case IOT_SNS_CONFIG_SCHEMA. :param kwargs: Other parameters that could be used by the construct. """ super().__init__(scope, id, **kwargs) self.prefix = prefix self.environment_ = environment self._configuration = configuration # Validating that the payload passed is correct validate_configuration(configuration_schema=IOT_SNS_CONFIG_SCHEMA, configuration_received=self._configuration) # Defining SNS Topic topic_data = deepcopy(self._configuration["topic"]) self._sns_topic = base_topic(self, **topic_data) # Defining IAM Role role = base_sns_role(self, resource_name=topic_data["topic_name"], principal_resource="iot") # Validating Lambda Function Runtime functions_data = self._configuration["lambda_handlers"] self._lambda_functions = list() for lambda_function in functions_data: _lambda_function = base_lambda_function(self, **lambda_function) self._lambda_functions.append(_lambda_function) # Defining the Lambda subscription to the specified SNS Topic in cdk.json file. sns_subscription = sns_subs.LambdaSubscription(fn=_lambda_function) self._sns_topic.add_subscription(sns_subscription) # Defining Topic Rule properties action = iot.CfnTopicRule.SnsActionProperty( target_arn=self._sns_topic.topic_arn, role_arn=role.role_arn) action_property = iot.CfnTopicRule.ActionProperty(sns=action) rule_data = self._configuration["iot_rule"] self._iot_rule = base_iot_rule(self, action_property=action_property, **rule_data)
def __init__(self, scope: core.Construct, id: str, squid_alarm_topic: sns.Topic) -> None: super().__init__(scope, id) # Create IAM role for Lambda lambda_iam_role = iam.Role( self, "lambda-role", assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaBasicExecutionRole") ]) # Add policies to allow Lambda that allow it to update route tables of the VPC to point to a healthy Squid instance ENI lambda_iam_role.add_to_policy(statement=iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ 'ec2:ModifyInstanceAttribute', 'autoscaling:Describe*', 'autoscaling:CompleteLifecycleAction', 'autoscaling:SetInstanceHealth', 'cloudwatch:Describe*', 'ec2:CreateRoute', 'ec2:CreateTags', 'ec2:ReplaceRoute', 'ec2:Describe*', ], resources=['*'])) # Create a Lambda function that is triggered when the Squid Alarm state changes squid_alarm_lambda = _lambda.Function( self, "alarm-function", runtime=_lambda.Runtime.PYTHON_3_8, handler="lambda-handler.handler", code=_lambda.Code.asset("./squid_app/squid_config_files/lambda"), role=lambda_iam_role, environment={"TOPIC_ARN": squid_alarm_topic.topic_arn}, timeout=core.Duration.seconds(60)) squid_alarm_lambda.add_permission( "squid-lambda-permission", principal=iam.ServicePrincipal("sns.amazonaws.com"), action='lambda:InvokeFunction', source_arn=squid_alarm_topic.topic_arn) squid_alarm_topic.add_subscription( sns_subscriptions.LambdaSubscription(squid_alarm_lambda))
def build_lambda(name): """Builder function for aws_lambda.Function objects.""" name = name.lower() _lambda = get_lambda( self, "{}-{}".format(id, name), code=code, handler='services.{}.handler'.format(name), layers=[lambda_layers["requests_oauthlib"]], environment={var: value for var, value in environ.items() if var.startswith(name.upper()) or var.startswith("LAMBDA_FUNCTIONS_") or var.startswith("GITHUB_")}, on_success=aws_lambda_destinations.LambdaDestination(create_report_lambda)) topic.add_subscription(aws_sns_subscriptions.LambdaSubscription(_lambda))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Define S3 bucket my_bucket = s3.Bucket(self, "ssl-s3-sns-event-raw") #Add Filters if required filter1 = s3.NotificationKeyFilter(prefix="home/") #sns Topic my_sns = sns.Topic(self, id="my-sns-topic", display_name="my-sns-topic") #Create the s3 notification objects which points to Lambda notification = notifications.SnsDestination(my_sns) #link s3 and sns my_bucket.add_event_notification(s3.EventType.OBJECT_CREATED, notification, filter1) #create sqs queue my_sqs = sqs.Queue(self, id="my-queue") #create sqs / sns subcription subscription = aws_sns_subscriptions.SqsSubscription(my_sqs) #add subscription to sns. my_sns.add_subscription(subscription) #create lambda function my_lambda = _lambda.Function(self, "HelloHandler", runtime=_lambda.Runtime.PYTHON_3_7, handler="hello.handler", code=_lambda.Code.asset('lambda')) #create sns/lambda subscription subscription = aws_sns_subscriptions.LambdaSubscription(my_lambda) #add lambda subscription to sns my_sns.add_subscription(subscription)
def __init__(self, scope: core.Construct, id: str, sns_topic_arn: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # DynamoDB Table table = dynamo_db.Table(self, "Hits", partition_key=dynamo_db.Attribute(name="path", type=dynamo_db.AttributeType.STRING) ) dynamo_lambda = _lambda.Function(self, "DynamoLambdaHandler", runtime=_lambda.Runtime.NODEJS_12_X, handler="dynamo.handler", code=_lambda.Code.from_asset("lambda_fns"), tracing=_lambda.Tracing.ACTIVE, environment={ "HITS_TABLE_NAME": table.table_name } ) # grant the lambda role read/write permissions to our table table.grant_read_write_data(dynamo_lambda) topic = sns.Topic.from_topic_arn(self, 'SNSTopic', sns_topic_arn) topic.add_subscription(subscriptions.LambdaSubscription(dynamo_lambda))
def __init__(self, app: App, id: str, **kwargs) -> None: super().__init__(app, id, **kwargs) # ======================================== # LAMBDA # ======================================== app_code = aws_lambda.Code.asset("./src") lambda_fn = aws_lambda.Function( self, "Lambda", function_name="telegram-bot-function", code=app_code, environment={ 'TELEGRAM_BOT_ID': bot_id, 'TELEGRAM_TOKEN': token, 'CHAT_ID': chat_id, 'BASE_URL': f"https://api.telegram.org/{bot_id}:{token}", 'PYTHONPATH': './packages' }, handler="handler.lambda_handler", runtime=aws_lambda.Runtime.PYTHON_3_7, timeout=Duration.seconds(60)) aws_logs.LogGroup( self, "LambdaFunctionLog", log_group_name=f"/aws/lambda/{lambda_fn.function_name}", removal_policy=RemovalPolicy.DESTROY, retention=aws_logs.RetentionDays.ONE_WEEK) # ======================================== # SNS # ======================================== topic = aws_sns.Topic(self, "Topic", topic_name="telegram-bot-alarm-topic", display_name="telegram-bot-alarm-topic") topic.add_subscription( aws_sns_subscriptions.LambdaSubscription(lambda_fn)) # ======================================== # SQS # ======================================== queue = aws_sqs.Queue(self, "Queue", queue_name="telegram-bot-queue") metric = queue.metric_approximate_number_of_messages_visible( label="Number of messages visible", period=Duration.seconds(60)) # ======================================== # CLOUDWATCH ALARM # ======================================== alarm = metric.create_alarm( self, "AlarmTooManyMessages", alarm_name="telegram-alarm-bot-TooManyMessages", comparison_operator=cw.ComparisonOperator.GREATER_THAN_THRESHOLD, evaluation_periods=1, threshold=1, treat_missing_data=cw.TreatMissingData.MISSING, ) alarm.add_alarm_action(cw_actions.SnsAction(topic)) alarm.add_ok_action(cw_actions.SnsAction(topic))
def __init__(self, scope: core.Construct, id: str, *, email: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Code Asset lamba_code = lambda_.AssetCode("./assets/") # DynamoDB dynamo_store_db = dynamo.Table(self,"products_to_check_db", partition_key=dynamo.Attribute(name="ProductTs",type=dynamo.AttributeType.NUMBER)) # SNS Topics sns_input_topic = sns.Topic(self,"checker_url_topic") sns_output_topic = sns.Topic(self,"email_topic") # Lambda function that scrapes the pages & emails lambda_checker = lambda_.Function( self, "lambda_checker", code=lamba_code, handler="checker.handler", timeout=core.Duration.seconds(60), runtime=lambda_.Runtime.NODEJS_12_X, environment= { "TOPIC_ARN": sns_output_topic.topic_arn, "DYNAMO_TABLE": dynamo_store_db.table_name } ) # Subscribe to SNS sns_input_topic.add_subscription(subs.LambdaSubscription(lambda_checker)) sns_output_topic.add_subscription(subs.EmailSubscription(email)) # Lambda function that populates SNS lambda_invoker = lambda_.Function( self, "lambda_invoker", code=lamba_code, handler="invoker.handler", timeout=core.Duration.seconds(300), runtime=lambda_.Runtime.NODEJS_12_X, environment= { "TOPIC_ARN": sns_input_topic.topic_arn, "DYNAMO_TABLE": dynamo_store_db.table_name } ) # Grant access to publish on SNS topics sns_input_topic.grant_publish(lambda_invoker) sns_output_topic.grant_publish(lambda_checker) # Grant access to Dynamo for lambdas dynamo_store_db.grant_read_data(lambda_invoker) dynamo_store_db.grant_read_write_data(lambda_checker) # Run every day at 05:00 UTC # See https://docs.aws.amazon.com/lambda/latest/dg/tutorial-scheduled-events-schedule-expressions.html rule = events.Rule( self, "runEveryDayAt5AM", schedule=events.Schedule.cron( minute='0', hour='5', month='*', week_day='*', year='*'), ) rule.add_target(targets.LambdaFunction(lambda_invoker))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) ### # Let's create our own Event Bus for this rather than using default ### bus = events.EventBus(self, 'DestinedEventBus', event_bus_name='the-destined-lambda') ### # Destinations need invoked Asynchronously so let's use SNS ### topic = sns.Topic(self, 'theDestinedLambdaTopic', display_name='The Destined Lambda CDK Pattern Topic') ### # Lambda configured with success and failure destinations # Note the actual lambda has no EventBridge code inside it ### destined_lambda = _lambda.Function( self, "destinedLambda", runtime=_lambda.Runtime.NODEJS_12_X, handler="destinedLambda.handler", code=_lambda.Code.from_asset("lambda_fns"), retry_attempts=0, on_success=destinations.EventBridgeDestination(event_bus=bus), on_failure=destinations.EventBridgeDestination(event_bus=bus)) topic.add_subscription( subscriptions.LambdaSubscription(destined_lambda)) ### # This is a lambda that will be called by onSuccess for destinedLambda # It simply prints the event it receives to the cloudwatch logs ### success_lambda = _lambda.Function( self, "successLambda", runtime=_lambda.Runtime.NODEJS_12_X, handler="success.handler", code=_lambda.Code.from_asset("lambda_fns"), timeout=core.Duration.seconds(3)) ### # EventBridge Rule to send events to our success lambda # Notice how we can still do event filtering based on the json payload returned by the destined lambda ### success_rule = events.Rule( self, 'successRule', event_bus=bus, description= 'all success events are caught here and logged centrally', event_pattern=events.EventPattern( detail={ "requestContext": { "condition": ["Success"] }, "responsePayload": { "source": ["cdkpatterns.the-destined-lambda"], "action": ["message"] } })) success_rule.add_target(targets.LambdaFunction(success_lambda)) ### # This is a lambda that will be called by onFailure for destinedLambda # It simply prints the event it receives to the cloudwatch logs. # Notice how it includes the message that came into destined lambda to make it fail so you have # everything you need to do retries or manually investigate ### failure_lambda = _lambda.Function( self, "failureLambda", runtime=_lambda.Runtime.NODEJS_12_X, handler="failure.handler", code=_lambda.Code.from_asset("lambda_fns"), timeout=core.Duration.seconds(3)) ### # EventBridge Rule to send events to our failure lambda ### failure_rule = events.Rule( self, 'failureRule', event_bus=bus, description= 'all failure events are caught here and logged centrally', event_pattern=events.EventPattern( detail={"responsePayload": { "errorType": ["Error"] }})) failure_rule.add_target(targets.LambdaFunction(failure_lambda)) ### # API Gateway Creation # This is complicated because it transforms the incoming json payload into a query string url # this url is used to post the payload to sns without a lambda inbetween ### gateway = api_gw.RestApi( self, 'theDestinedLambdaAPI', deploy_options=api_gw.StageOptions( metrics_enabled=True, logging_level=api_gw.MethodLoggingLevel.INFO, data_trace_enabled=True, stage_name='prod')) # Give our gateway permissions to interact with SNS api_gw_sns_role = iam.Role( self, 'ApiGatewaySNSRole', assumed_by=iam.ServicePrincipal('apigateway.amazonaws.com')) topic.grant_publish(api_gw_sns_role) # shortening the lines of later code schema = api_gw.JsonSchema schema_type = api_gw.JsonSchemaType # Because this isn't a proxy integration, we need to define our response model response_model = gateway.add_model( 'ResponseModel', content_type='application/json', model_name='ResponseModel', schema=schema( schema=api_gw.JsonSchemaVersion.DRAFT4, title='pollResponse', type=schema_type.OBJECT, properties={'message': schema(type=schema_type.STRING)})) error_response_model = gateway.add_model( 'ErrorResponseModel', content_type='application/json', model_name='ErrorResponseModel', schema=schema(schema=api_gw.JsonSchemaVersion.DRAFT4, title='errorResponse', type=schema_type.OBJECT, properties={ 'state': schema(type=schema_type.STRING), 'message': schema(type=schema_type.STRING) })) request_template = "Action=Publish&" + \ "TargetArn=$util.urlEncode('" + topic.topic_arn + "')&" + \ "Message=please $input.params().querystring.get('mode')&" + \ "Version=2010-03-31" # This is the VTL to transform the error response error_template = { "state": 'error', "message": "$util.escapeJavaScript($input.path('$.errorMessage'))" } error_template_string = json.dumps(error_template, separators=(',', ':')) # This is how our gateway chooses what response to send based on selection_pattern integration_options = api_gw.IntegrationOptions( credentials_role=api_gw_sns_role, request_parameters={ 'integration.request.header.Content-Type': "'application/x-www-form-urlencoded'" }, request_templates={"application/json": request_template}, passthrough_behavior=api_gw.PassthroughBehavior.NEVER, integration_responses=[ api_gw.IntegrationResponse( status_code='200', response_templates={ "application/json": json.dumps({"message": 'Message added to SNS topic'}) }), api_gw.IntegrationResponse( selection_pattern="^\[Error\].*", status_code='400', response_templates={ "application/json": error_template_string }, response_parameters={ 'method.response.header.Content-Type': "'application/json'", 'method.response.header.Access-Control-Allow-Origin': "'*'", 'method.response.header.Access-Control-Allow-Credentials': "'true'" }) ]) # Add an SendEvent endpoint onto the gateway gateway.root.add_resource('SendEvent') \ .add_method('GET', api_gw.Integration(type=api_gw.IntegrationType.AWS, integration_http_method='POST', uri='arn:aws:apigateway:us-east-1:sns:path//', options=integration_options ), method_responses=[ api_gw.MethodResponse(status_code='200', response_parameters={ 'method.response.header.Content-Type': True, 'method.response.header.Access-Control-Allow-Origin': True, 'method.response.header.Access-Control-Allow-Credentials': True }, response_models={ 'application/json': response_model }), api_gw.MethodResponse(status_code='400', response_parameters={ 'method.response.header.Content-Type': True, 'method.response.header.Access-Control-Allow-Origin': True, 'method.response.header.Access-Control-Allow-Credentials': True }, response_models={ 'application/json': error_response_model }), ] )
def __init__(self, scope: core.Construct, id: str, active_table: dynamodb.Table, person_table: dynamodb.Table, failedlogins_table: dynamodb.Table, loginevents_table: dynamodb.Table, logoutevents_table: dynamodb.Table, **kwargs) -> None: super().__init__(scope, id, **kwargs) # set pseudo parameters to use as environment variables region = core.Aws.REGION accountid = core.Aws.ACCOUNT_ID # create a bucket "AccessProjectBucket" self._bucket = s3.Bucket( self, "AccessProjectBucket", versioned=True ) # create a lifecycle rule for "AccessProjectCaptureBucket" to remove object after one day from creation lifecycle_rule = s3.LifecycleRule( expiration=core.Duration.days(1) ) # create a bucket "AccessProjectCaptureBucket" self._capture_bucket = s3.Bucket( self, "AccessProjectCaptureBucket", lifecycle_rules=[lifecycle_rule] ) # create a topic "WriteTag" self._write_topic = sns.Topic( self, "WriteTag" ) # create a topic "SendUserDataToRaspberryPi" send_topic = sns.Topic( self, "SendUserDataToRaspberryPi" ) # create an iam policy statement to allow lambda function to write to dynamodb active table write_to_activetable_policy_statement = iam.PolicyStatement( actions=["dynamodb:PutItem"], resources=[active_table.table_arn] ) # create a lambda function "create_active_user" activetable_put_user = _lambda.Function( self, 'ActivetablePutUserHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='activetable_put_user.create_active_user', initial_policy=[write_to_activetable_policy_statement], environment={"active_table": active_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to remove user from dynamodb active table delete_user_from_activetable_policy_statement = iam.PolicyStatement( actions=["dynamodb:DeleteItem"], resources=[active_table.table_arn] ) # create a lambda function "remove_user_from_active_table" activetable_remove_user = _lambda.Function( self, 'ActivetableRemoveUserHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='activetable_remove_user.remove_user_from_active_table', initial_policy=[delete_user_from_activetable_policy_statement], environment={"active_table": active_table.table_name, "region": region} ) #create an iam policy statement to allow lambda function to check for concurrent users from active table check_for_concurrent_users_policy_statement = iam.PolicyStatement( actions=["dynamodb:Scan"], resources=[active_table.table_arn], ) # create a lambda function "check_for_concurrent_users" check_for_concurrent_users = _lambda.Function( self, 'CheckForConcurrentUsersHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='check_for_concurrent_users.check_for_concurrent_users', initial_policy=[check_for_concurrent_users_policy_statement], environment={"active_table": active_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to check if person exists in person table check_for_user_in_persontable_policy_statement = iam.PolicyStatement( actions=["dynamodb:GetItem"], resources=[person_table.table_arn] ) # create a lambda function "get_user" check_for_user_in_persontable = _lambda.Function( self, 'CheckForUserInPersontableHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='check_for_user_in_persontable.get_user', initial_policy=[check_for_user_in_persontable_policy_statement], environment={"person_table": person_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to check if user exists in active table check_if_user_is_active_policy_statement = iam.PolicyStatement( actions=["dynamodb:GetItem"], resources=[active_table.table_arn] ) # create a lambda function "check_if_user_is_active" check_if_user_is_active = _lambda.Function( self, 'CheckIfUserIsActiveHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='check_if_user_is_active.check_if_user_is_active', initial_policy=[check_if_user_is_active_policy_statement], environment={"active_table": active_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to get object from access project bucket compare_faces_bucket_policy_statement = iam.PolicyStatement( actions=["s3:GetObject"], resources=[self._bucket.bucket_arn+'/*', self._capture_bucket.bucket_arn+'/*'] ) # create an iam policy statement to allow lambda function to use rekognition rekognition_policy_statement = iam.PolicyStatement( actions=["rekognition:CompareFaces"], resources=["*"] ) # create a lambda function "compare_faces" compare_faces = _lambda.Function( self, 'CompareFacesHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='compare_faces.compare_faces', initial_policy=[compare_faces_bucket_policy_statement, rekognition_policy_statement], environment={"original_photo_bucket": self._bucket.bucket_name, "capture_photo_bucket": self._capture_bucket.bucket_name, "region": region } ) # create a lambda function "generate_rekognition_response" generate_rekognition_response = _lambda.Function( self, 'GenerateRekognitionResponse', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='generate_rekognition_response.generate_rekognition_response' ) # create a lambda function "evaluate_authentication_response" evaluate_authentication_response = _lambda.Function( self, 'EvaluateAuthenticationResponseHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='evaluate_authentication_response.evaluate_authentication_response' ) # create a lambda function "evaluate_initial_authentication" evaluate_initial_authentication = _lambda.Function( self, 'EvaluateInitialAuthenticationHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='evaluate_initial_authentication.evaluate_initial_authentication' ) # create a lambda function "generate_db_response" generate_db_response = _lambda.Function( self, 'GenerateDbResponseHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='generate_db_response.generate_db_response' ) # create a lambda function "parse_rekognition_response" parse_rekognition_response = _lambda.Function( self, 'ParseRekognitionResponseHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='parse_rekognition_response.parse_rekognition_response' ) # create an iam policy statement to allow lambda function to create user to person table person_table_put_user_policy_statement = iam.PolicyStatement( actions=["dynamodb:PutItem"], resources=[person_table.table_arn] ) # create a lambda function "create_new_user" persontable_put_user = _lambda.Function( self, 'PersontablePutUserHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='persontable_put_user.create_new_user', initial_policy=[person_table_put_user_policy_statement], environment={"person_table": person_table.table_name, "region": region} ) # create a lambda subscription for "WriteTag" topic self._write_topic.add_subscription(subscriptions.LambdaSubscription(persontable_put_user)) # create an iam policy statement to allow lambda function to publish to iot topic publish_to_iot_policy_statement = iam.PolicyStatement( actions=["iot:Publish"], resources=["*"] ) # create a lambda function "publish_to_iot" publish_to_iot_topic = _lambda.Function( self, 'PublishToIoTTopicHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='publish_to_iot_topic.publish_to_iot', initial_policy=[publish_to_iot_policy_statement], environment={"region": region} ) # create a lamdba function "stream_delete_event_to_s3" stream_delete_event_to_s3 = _lambda.Function( self, 'StreamDeleteEventToS3', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='stream_delete_event_to_s3.stream_delete_event_to_s3', environment={"original_photo_bucket": self._bucket.bucket_name} ) # create an iam policy statement to allow lambda function to write to failedloginevents table write_to_failed_login_table_policy_statement = iam.PolicyStatement( actions=["dynamodb:PutItem"], resources=[failedlogins_table.table_arn] ) # create a lambda function "write_to_failed_login_table" write_to_failedlogins = _lambda.Function( self, 'WriteToFailedloginsHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='write_to_failedlogins.write_to_failed_login_table', initial_policy=[write_to_failed_login_table_policy_statement], environment={"failedlogins_table": failedlogins_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to write to loginevents table write_to_login_events_policy_statement = iam.PolicyStatement( actions=["dynamodb:PutItem"], resources=[loginevents_table.table_arn] ) # create a lambda function "write_to_login_events" write_to_login_events = _lambda.Function( self, 'WriteToLoginEventsHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='write_to_login_events.write_to_login_events', initial_policy=[write_to_login_events_policy_statement], environment={"loginevents_table": loginevents_table.table_name, "region": region} ) # create an iam policy statement to allow lambda function to write to logoutevents table write_to_logout_events_policy_statement = iam.PolicyStatement( actions=["dynamodb:PutItem"], resources=[logoutevents_table.table_arn] ) # create a lambda function "write_to_logout_events" write_to_logout_events = _lambda.Function( self, 'WriteToLogoutEventsHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='write_to_logout_events.write_to_logout_events', initial_policy=[write_to_logout_events_policy_statement], environment={"logoutevents_table": logoutevents_table.table_name, "region": region} ) # creates policy statement to allow state machine Lambda invocation inline_state_machine_statement = iam.PolicyStatement( actions=['lambda:InvokeFunction'], effect=iam.Effect.ALLOW, resources=['*'] ) # creates a new policy document and attaches the above statement to it inline_state_machine_policy = iam.PolicyDocument() inline_state_machine_policy.add_statements(inline_state_machine_statement) # creates state machine role which can be assumed by the states service principal state_machine_role = iam.Role( self, 'StateMachineRole', assumed_by=iam.ServicePrincipal('states.amazonaws.com'), inline_policies=[inline_state_machine_policy] ) # create a state machine with the provided definition. Lambda functions are referred with string literals state_machine = sf.CfnStateMachine( self, 'AccessControlStateMachine', role_arn=state_machine_role.role_arn, state_machine_name='AccessControlStateMachineCDK', definition_string='''{ "Comment":"RFID tag read state machine", "StartAt":"StartUserAuthentication", "States":{ "StartUserAuthentication":{ "Type":"Parallel", "Next":"EvaluateInitialAuthentication", "Branches":[ { "StartAt":"CheckForUserInPersonTable", "States":{ "CheckForUserInPersonTable":{ "Type":"Task", "Resource":"%s", "End":true } } }, { "StartAt":"CompareFaces", "States":{ "CompareFaces":{ "Type":"Task", "Resource":"%s", "Next":"IsFaceInS3?" }, "IsFaceInS3?":{ "Type":"Choice", "Choices":[ { "Not":{ "Variable":"$.face", "StringEquals":"notavailable" }, "Next":"ParseRekognitionResponse" }, { "Variable":"$.face", "StringEquals":"notavailable", "Next":"GenerateRekognitionResponse" } ], "Default":"ChoiceErrorState1" }, "ParseRekognitionResponse":{ "Type":"Task", "Resource":"%s", "Next":"GenerateRekognitionResponse" }, "GenerateRekognitionResponse":{ "Type":"Task", "Resource":"%s", "End":true }, "ChoiceErrorState1":{ "Type":"Fail", "Cause":"No Matches!" } } } ] }, "EvaluateInitialAuthentication":{ "Type":"Task", "Resource":"%s", "Next":"LoginSuccessful?" }, "LoginSuccessful?":{ "Type":"Choice", "Choices":[ { "Variable":"$.state", "StringEquals":"continue", "Next":"CheckIfUserIsActive" }, { "Variable":"$.state", "StringEquals":"failed", "Next":"GenerateDBResponse" } ], "Default":"ChoiceErrorState2" }, "CheckIfUserIsActive":{ "Type":"Task", "Resource":"%s", "Next":"IsUserActive?" }, "IsUserActive?":{ "Type":"Choice", "Choices":[ { "Variable":"$.logout", "IsPresent":true, "Next":"RemoveUserFromActiveTable" }, { "Variable":"$.login", "IsPresent":true, "Next":"CheckForConcurrentUsers" } ], "Default":"ChoiceErrorState2" }, "CheckForConcurrentUsers":{ "Type":"Task", "Resource":"%s", "Next":"CheckUserCount" }, "CheckUserCount":{ "Type":"Choice", "Choices":[ { "Variable":"$.usercount", "NumericLessThan":10, "Next":"AddUserToActiveTable" }, { "Variable":"$.usercount", "NumericGreaterThanEquals":10, "Next":"GenerateDBResponse" } ], "Default":"ChoiceErrorState2" }, "RemoveUserFromActiveTable":{ "Type":"Task", "Resource":"%s", "Next":"GenerateDBResponse" }, "AddUserToActiveTable":{ "Type":"Task", "Resource":"%s", "Next":"GenerateDBResponse" }, "GenerateDBResponse":{ "Type":"Task", "Resource":"%s", "Next":"EvaluateAuthenticationResponse" }, "ChoiceErrorState2":{ "Type":"Fail", "Cause":"No Matches!" }, "EvaluateAuthenticationResponse":{ "Type":"Task", "Resource":"%s", "Next": "WhichEventDBToWrite?" }, "WhichEventDBToWrite?":{ "Type":"Choice", "Choices":[ { "And": [ { "Variable":"$.state", "StringEquals":"login" }, { "Variable":"$.response.access", "StringEquals":"allow" } ], "Next":"WriteToLoginEvents" }, { "And": [ { "Variable":"$.state", "StringEquals":"logout" }, { "Variable":"$.response.access", "StringEquals":"allow" } ], "Next":"WriteToLogoutEvents" }, { "Variable":"$.response.access", "StringEquals":"deny", "Next":"WriteToDeniedLoginTable" } ] }, "WriteToLoginEvents":{ "Type":"Task", "Resource":"%s", "Next": "PublishToIoTTopic" }, "WriteToLogoutEvents":{ "Type":"Task", "Resource":"%s", "Next": "PublishToIoTTopic" }, "WriteToDeniedLoginTable":{ "Type":"Task", "Resource":"%s", "Next": "PublishToIoTTopic" }, "PublishToIoTTopic":{ "Type":"Task", "Resource":"%s", "End": true } } }''' % (check_for_user_in_persontable.function_arn, compare_faces.function_arn, parse_rekognition_response.function_arn, generate_rekognition_response.function_arn, evaluate_initial_authentication.function_arn, check_if_user_is_active.function_arn, check_for_concurrent_users.function_arn, activetable_remove_user.function_arn, activetable_put_user.function_arn, generate_db_response.function_arn, evaluate_authentication_response.function_arn, write_to_login_events.function_arn, write_to_logout_events.function_arn, write_to_failedlogins.function_arn, publish_to_iot_topic.function_arn ) ) # create an iam policy statement to allow lambda function to step functions state machine execution start_state_machine_policy_statement = iam.PolicyStatement( actions=["states:StartExecution"], resources=["*"] ) # create a lambda function "start_state_machine" start_state_machine = _lambda.Function( self, 'StartStateMachineHandler', runtime=_lambda.Runtime.PYTHON_3_7, code=_lambda.Code.asset('lambda'), handler='start_dbcheck_state_machine.start_state_machine', initial_policy=[start_state_machine_policy_statement], environment={"state_machine": "arn:aws:states:%s:%s:stateMachine:%s" % (region, accountid, state_machine.state_machine_name), "region": region} ) self._capture_bucket.add_event_notification(s3.EventType.OBJECT_CREATED_PUT, notifications.LambdaDestination(start_state_machine))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # ******* Database table audiobooksDB = aws_dynamodb.Table( self, "audiobooksDB", partition_key=aws_dynamodb.Attribute( name="id", type=aws_dynamodb.AttributeType.STRING), read_capacity=2, write_capacity=2, billing_mode=aws_dynamodb.BillingMode.PROVISIONED) # ******* Lambda functions book_upload_lambda_function = aws_lambda.Function( self, "HandleBookUploadLambda", handler='app.lambda_handler', runtime=aws_lambda.Runtime.PYTHON_3_8, code=aws_lambda.Code.from_asset( '../Functions/handlers/handle_book_upload')) polly_audio_lambda_function = aws_lambda.Function( self, "HandlePollyAudioLambda", handler='app.lambda_handler', runtime=aws_lambda.Runtime.PYTHON_3_8, code=aws_lambda.Code.from_asset( '../Functions/handlers/handle_polly_audio'), timeout=core.Duration.seconds(120)) # ******* S3 upload buckets BookUploadBucket = aws_s3.Bucket(self, "BookUploadBucket") AudioUploadBucket = aws_s3.Bucket(self, "AudioUploadBucket") VideoUploadBucket = aws_s3.Bucket(self, "VideoUploadBucket") ImageUploadBucket = aws_s3.Bucket(self, "ImageUploadBucket") # ******* Create S3 event source book_upload_lambda_function.add_event_source( S3EventSource(BookUploadBucket, events=[aws_s3.EventType.OBJECT_CREATED], filters=[{ "suffix": '.txt' }])) # ******* Create SNS topic PollySNSTopic = aws_sns.Topic(self, "PollySNSTopic") PollySNSTopic.add_subscription( aws_sns_subscriptions.LambdaSubscription( polly_audio_lambda_function)) # ******* Book function environment variables book_upload_lambda_function.add_environment("TABLE_NAME", audiobooksDB.table_name) book_upload_lambda_function.add_environment( "AUDIO_S3_BUCKET", AudioUploadBucket.bucket_name) book_upload_lambda_function.add_environment("SNS_TOPIC", PollySNSTopic.topic_arn) # ******* Book function permissions audiobooksDB.grant_write_data(book_upload_lambda_function) BookUploadBucket.grant_read(book_upload_lambda_function) AudioUploadBucket.grant_write(book_upload_lambda_function) PollySNSTopic.grant_publish(book_upload_lambda_function) book_upload_lambda_function.add_to_role_policy( aws_iam.PolicyStatement(actions=["polly:*"], resources=["*"])) # ******* Fargate container permissions role = aws_iam.Role( self, "FargateContainerRole", assumed_by=aws_iam.ServicePrincipal("ecs-tasks.amazonaws.com")) role.add_to_policy( aws_iam.PolicyStatement( actions=["s3:PutObject"], resources=[VideoUploadBucket.bucket_arn + "/*"])) role.add_to_policy( aws_iam.PolicyStatement( actions=["s3:GetObject"], resources=[AudioUploadBucket.bucket_arn + "/*"])) role.add_to_policy( aws_iam.PolicyStatement( actions=["s3:GetObject"], resources=[ImageUploadBucket.bucket_arn + "/*"])) # ******* Fargate container vpc = aws_ec2.Vpc(self, "CdkFargateVpc", max_azs=2) cluster = aws_ecs.Cluster(self, 'FargateCluster', vpc=vpc) image = aws_ecs.ContainerImage.from_asset( "../Functions/ECSContainerFiles") task_definition = aws_ecs.FargateTaskDefinition( self, "FargateContainerTaskDefinition", execution_role=role, task_role=role, cpu=1024, memory_limit_mib=3072) port_mapping = aws_ecs.PortMapping(container_port=80, host_port=80) container = task_definition.add_container( "Container", image=image, logging=aws_ecs.AwsLogDriver( stream_prefix="videoProcessingContainer")) container.add_port_mappings(port_mapping) # ******* Audio function environment variables polly_audio_lambda_function.add_environment( "VIDEO_S3_BUCKET", VideoUploadBucket.bucket_name) polly_audio_lambda_function.add_environment( "TASK_DEFINITION_ARN", task_definition.task_definition_arn) polly_audio_lambda_function.add_environment("CLUSTER_ARN", cluster.cluster_arn) polly_audio_lambda_function.add_environment("TABLE_NAME", audiobooksDB.table_name) polly_audio_lambda_function.add_environment("CONTAINER_NAME", container.container_name) polly_audio_lambda_function.add_environment("VPC_ID", str(vpc.vpc_id)) # ******* Audio function permissions audiobooksDB.grant_read_write_data(polly_audio_lambda_function) polly_audio_lambda_function.add_to_role_policy( aws_iam.PolicyStatement(actions=["ecs:RunTask"], resources=["*"])) polly_audio_lambda_function.add_to_role_policy( aws_iam.PolicyStatement(actions=["iam:PassRole"], resources=["*"])) polly_audio_lambda_function.add_to_role_policy( aws_iam.PolicyStatement(actions=["ec2:DescribeSubnets"], resources=["*"]))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) sns_principals_mapping = core.CfnMapping( scope=self, id="Inspector SNS Principals", mapping={ "eu-central-1": { "ARN": "arn:aws:iam::537503971621:root" }, "us-east-1": { "ARN": "arn:aws:iam::316112463485:root" }, "eu-west-1": { "ARN": "arn:aws:iam::357557129151:root" }, "us-east-2": { "ARN": "arn:aws:iam::646659390643:root" }, } ) inspector_rules_mapping = core.CfnMapping( scope=self, id="Inspector Rule packages", mapping={ "eu-central-1": { "CVE": "arn:aws:inspector:eu-central-1:537503971621:rulespackage/0-wNqHa8M9", "CIS": "arn:aws:inspector:eu-central-1:537503971621:rulespackage/0-nZrAVuv8", "securityBestPractices": "arn:aws:inspector:eu-central-1:537503971621:rulespackage/0-ZujVHEPB", "runtimeBehaviorAnalysis": "arn:aws:inspector:eu-central-1:537503971621:rulespackage/0-0GMUM6fg" }, "eu-west-1": { "CVE": "arn:aws:inspector:eu-west-1:357557129151:rulespackage/0-ubA5XvBh", "CIS": "arn:aws:inspector:eu-west-1:357557129151:rulespackage/0-sJBhCr0F", "securityBestPractices": "arn:aws:inspector:eu-west-1:357557129151:rulespackage/0-SnojL3Z6", "runtimeBehaviorAnalysis": "arn:aws:inspector:eu-west-1:357557129151:rulespackage/0-lLmwe1zd" }, "us-east-1": { "CVE": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-gEjTy7T7", "CIS": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-rExsr2X8", "securityBestPractices": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-R01qwB5Q", "runtimeBehaviorAnalysis": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-gBONHN9h" }, "us-east-2": { "CVE": "arn:aws:inspector:us-east-2:646659390643:rulespackage/0-JnA8Zp85", "CIS": "arn:aws:inspector:us-east-2:646659390643:rulespackage/0-m8r61nnh", "securityBestPractices": "arn:aws:inspector:us-east-2:646659390643:rulespackage/0-AxKmMHPX", "runtimeBehaviorAnalysis": "arn:aws:inspector:us-east-2:646659390643:rulespackage/0-UCYZFKPV" }, } ) resource_group = inspector.CfnResourceGroup( scope=self, id="CDK test resource group", resource_group_tags=[core.CfnTag(key="Inspector", value="true")] ) assessment_target = inspector.CfnAssessmentTarget( scope=self, id="CDK test assessment target", resource_group_arn=resource_group.attr_arn ) assessment_template = inspector.CfnAssessmentTemplate( scope=self, id="CDK test assessment template", assessment_target_arn=assessment_target.attr_arn, duration_in_seconds=300, rules_package_arns=[ inspector_rules_mapping.find_in_map(self.region, package) for package in ( "CVE", "CIS", "securityBestPractices", "runtimeBehaviorAnalysis" ) ] ) report_function = aws_lambda.Function( scope=self, id="CDK Inspector test report processor", code=aws_lambda.Code.from_asset("report_function"), handler="report.lambda_handler", runtime=aws_lambda.Runtime.PYTHON_3_7 ) topic = sns.Topic( scope=self, id="CDK Inspector topic" ) topic.add_to_resource_policy( statement=iam.PolicyStatement( actions=["SNS:Publish"], principals=[iam.ArnPrincipal(arn=sns_principals_mapping.find_in_map(self.region, "ARN"))], resources=[topic.topic_arn] ) ) topic.add_subscription( subscription=sns_subs.LambdaSubscription(fn=report_function) ) subscriber = InspectorSubscriberCustomResource( scope=self, id="Inspector SNS Subscriber", Template=assessment_template.attr_arn, Topic=topic.topic_arn )
def __init__(self, scope: core.Construct, id_: str, imported_assets_config: ImportedAssetsConfiguration, mongodb_config: MongoDBConfiguration, access_keys_config: AccessKeysConfiguration, **kwargs): super().__init__(scope, id_, **kwargs) # LAMBDAS DEFINITIONS lambda_dispatch_stream = lambda_.Function( self, 'DispatchStream', code=lambda_.AssetCode( 'stack/lambda/dispatch_stream/1.0.0/python/dispatch_stream'), timeout=core.Duration.seconds(10), description='', function_name='DispatchStream', reserved_concurrent_executions=10, handler=DEFAULT_LAMBDA_HANDLER, runtime=DEFAULT_LAMBDA_RUNTIME, log_retention=DEFAULT_LAMBDA_LOG_RETENTION, memory_size=128, retry_attempts=0, dead_letter_queue_enabled=False) lambda_geocode_property = lambda_.Function( self, 'GeocodeProperty', code=lambda_.AssetCode( 'stack/lambda/geocode_property/1.1.3/python/geocode_property'), timeout=core.Duration.seconds(15), description='', function_name='GeocodeProperty', reserved_concurrent_executions=10, handler=DEFAULT_LAMBDA_HANDLER, runtime=DEFAULT_LAMBDA_RUNTIME, log_retention=DEFAULT_LAMBDA_LOG_RETENTION, memory_size=128, retry_attempts=0, dead_letter_queue_enabled=True) lambda_fetch_properties = lambda_.Function( self, 'FetchProperties', code=lambda_.AssetCode( 'stack/lambda/fetch_properties/1.4.0/python/fetch_properties'), timeout=core.Duration.seconds(10), description='', function_name='FetchProperties', reserved_concurrent_executions=10, handler=DEFAULT_LAMBDA_HANDLER, runtime=DEFAULT_LAMBDA_RUNTIME, log_retention=DEFAULT_LAMBDA_LOG_RETENTION, memory_size=128, retry_attempts=0, dead_letter_queue_enabled=True) # LAYERS DEFINITIONS layer_dispatch_stream = lambda_.LayerVersion( self, 'DispatchStreamLibs', code=lambda_.Code.from_asset( 'stack/lambda/dispatch_stream/1.0.0/'), description='', layer_version_name='DispatchStreamLibs', compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME]) layer_geocode_property = lambda_.LayerVersion( self, 'GeocodePropertyLibs', code=lambda_.Code.from_asset( 'stack/lambda/geocode_property/1.1.3/'), description='', layer_version_name='GeocodePropertyLibs', compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME]) layer_fetch_properties = lambda_.LayerVersion( self, 'FetchPropertiesLibs', code=lambda_.Code.from_asset( 'stack/lambda/fetch_properties/1.4.0/'), description='', layer_version_name='FetchPropertiesLibs', compatible_runtimes=[DEFAULT_LAMBDA_RUNTIME]) # CLOUDWATCH RULES DEFINITIONS # - # SQS QUEUES DEFINITIONS # - # SNS TOPICS DEFINITIONS topic_new_properties = sns.Topic(self, 'NewProperties', display_name='', topic_name='NewProperties') # API GATEWAYS api_gateway_graphql = api_gateway.LambdaRestApi( self, 'GraphQLApi', handler=lambda_fetch_properties, rest_api_name='GraphQLApi', description='GraphQL API', cloud_watch_role=True) api_gateway_graphql_resource = api_gateway_graphql.root.add_resource( 'graphql') api_gateway_graphql_resource.add_method('GET', api_key_required=False) api_gateway_graphql.add_usage_plan( 'GraphQLUsagePlan', name='GraphQLUsagePlan', throttle=api_gateway.ThrottleSettings(rate_limit=1, burst_limit=1)) # DYNAMODB PERMISSIONS lambda_dispatch_stream.add_event_source( event_sources.DynamoEventSource( table=imported_assets_config.table_property, starting_position=lambda_.StartingPosition.LATEST, batch_size=10, max_batching_window=core.Duration.seconds(30), parallelization_factor=10, retry_attempts=0)) # CLOUDWATCH SCHEDULING RULES # - # SQS PERMISSIONS # - # SNS PERMISSIONS topic_new_properties.grant_publish(lambda_dispatch_stream) topic_new_properties.add_subscription( sns_subscriptions.LambdaSubscription(lambda_geocode_property)) # LAYERS ASSIGNMENTS lambda_dispatch_stream.add_layers(layer_dispatch_stream) lambda_geocode_property.add_layers(layer_geocode_property) lambda_fetch_properties.add_layers(layer_fetch_properties) # ENVIRONMENT VARIABLES lambda_geocode_property.add_environment(key='MONGODB_URI', value=mongodb_config.uri) lambda_geocode_property.add_environment(key='MONGODB_DATABASE', value=mongodb_config.database) lambda_geocode_property.add_environment( key='MONGODB_COLLECTION', value=mongodb_config.collection) lambda_geocode_property.add_environment( key='API_ACCESS_TOKEN_GEOCODING', value=access_keys_config.geocoding) lambda_fetch_properties.add_environment(key='MONGODB_URI', value=mongodb_config.uri) lambda_fetch_properties.add_environment(key='MONGODB_DATABASE', value=mongodb_config.database) lambda_fetch_properties.add_environment( key='MONGODB_COLLECTION', value=mongodb_config.collection) lambda_fetch_properties.add_environment( key='MONGODB_MAX_PAGE_SIZE', value=mongodb_config.max_page_size)
def __init__(self, scope: core.Construct, stack_id: str, **kwargs) -> None: super().__init__(scope, stack_id, **kwargs) # A reference to the Layer containing the Image Hash python libaries. self.__image_hash_layer = _lambda.LayerVersion.from_layer_version_arn( self, stack_id, IMAGE_HASH_LAYER_ARN) lambda_app = codedeploy.LambdaApplication( self, 'spam-detection-pipeline-lambda-app', application_name='SpamDetectionPipelineLambda', ) self.__analyze_image = PipelineLambda(self, lambda_app, 'AnalyzeImage') self.__detect_known_bad_content = PipelineLambda( self, lambda_app, 'DetectKnownBadContent') self.__detect_spammy_words = PipelineLambda(self, lambda_app, 'DetectSpammyWords') self.__detect_adult_content = PipelineLambda(self, lambda_app, 'DetectAdultContent') self.__update_spam_score = PipelineLambda(self, lambda_app, 'UpdateSpamScore') # Only the DetectKnownBadContent needs the ImageHash layer. self.__detect_known_bad_content.function.add_layers( self.__image_hash_layer) all_lambdas = [ self.__analyze_image, self.__detect_known_bad_content, self.__detect_spammy_words, self.__detect_adult_content, self.__update_spam_score, ] # Define an API gateway and map the initial and final Lambda self.__api = apigw.LambdaRestApi(self, 'spam_detection_api', handler=self.__analyze_image.alias, proxy=False) for pipeline_lambda in all_lambdas: self.__map_post_to_lambda_alias(pipeline_lambda) # Create an SNS topic to use for fan-out from the initial Lambda self.__analyze_requests_topic = sns.Topic(self, "analyze_requests") # Create an SNS topic to use for fan-in from the detection Lambdas self.__update_spam_score_topic = sns.Topic(self, "update_spam_score") self.__enable_publish_from_lambda( self.__analyze_requests_topic, self.__analyze_image, 'SNS_ANALYZE_IMAGE_TOPIC_ARN', ) # noinspection PyTypeChecker self.__update_spam_score_topic.add_subscription( sns_subscriptions.LambdaSubscription( self.__update_spam_score.alias)) # For each detection Lambda: # - Allow it to invoke the UpdateSpamScore Lambda to report results # - Add a subscription to the SNS Topic so it receives processing requests # - Allow it to invoke AWS Rekognition via AWS Managed IAM Policy # - Add a PolicyStatement for access to the S3 bucket for aws_lambda in all_lambdas: if aws_lambda.name.startswith('Detect'): # noinspection PyTypeChecker self.__analyze_requests_topic.add_subscription( sns_subscriptions.LambdaSubscription(aws_lambda.alias)) self.__enable_publish_from_lambda( self.__update_spam_score_topic, aws_lambda, 'SNS_UPDATE_SPAM_SCORE_TOPIC_ARN', ) aws_lambda.function.add_environment( 'IMAGE_CONFIDENCE_THRESHOLD', '0.6') aws_lambda.function.role.add_managed_policy( _iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonRekognitionFullAccess')) aws_lambda.function.role.add_to_policy( _iam.PolicyStatement( actions=['*'], resources=['arn:aws:s3:::scalyr-serverless-demo/*'], ))