def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.pushover = get_lambda(self, "%s-lambda-pushover" % id, code='lib/stacks/%s/lambdas' % id, handler="send_to_pushover.handler", environment={ "PUSHOVER_TOKEN": env["PUSHOVER_TOKEN"], "PUSHOVER_USERKEY": env["PUSHOVER_USERKEY"], "LAMBDA_FUNCTIONS_LOG_LEVEL": "INFO", }) self.mailjet = get_lambda(self, "%s-lambda-mailjet" % id, code='lib/stacks/%s/lambdas' % id, handler="send_to_mailjet.handler", environment={ "MAILJET_API_KEY": env["MAILJET_API_KEY"], "MAILJET_API_SECRET": env["MAILJET_API_SECRET"], "MAILJET_DEFAULT_TO_ADDRESS": env["MAILJET_DEFAULT_TO_ADDRESS"], "MAILJET_FROM_ADDRESS": env["MAILJET_FROM_ADDRESS"], }) self.mailjet.grant_invoke(aws_iam.User(self, f"{id}-mail-sender"))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.table = aws_dynamodb.Table( self, f"{id}-table", partition_key=aws_dynamodb.Attribute( name="url", type=aws_dynamodb.AttributeType.STRING), ) poller = get_lambda(self, f"{id}-lambda-poller", code=f"lib/stacks/{id}/lambdas", handler="pagespeed_poller.handler", layers=[get_layer(self, "requests_oauthlib", id)], environment={ "DYNAMODB_TABLE": self.table.table_name, "GOOGLE_PAGESPEED_API_KEY": env["GOOGLE_PAGESPEED_API_KEY"], "GOOGLE_PAGESPEED_TARGET_URLS": env["GOOGLE_PAGESPEED_TARGET_URLS"], }) self.table.grant_read_write_data(poller) cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(hour="6-16", minute="30"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=poller))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.table = aws_dynamodb.Table( self, f"{id}-table", partition_key=aws_dynamodb.Attribute( name="url", type=aws_dynamodb.AttributeType.STRING), ) deployer = get_lambda(self, f"{id}-lambda-deployer", code=f"lib/stacks/{id}/lambdas", handler="deployer.handler", layers=[get_layer(self, "feedparser", id)], environment={ "DEPLOYER_FEED_URLS": env["DEPLOYER_FEED_URLS"], "DYNAMODB_TABLE": self.table.table_name, "NETLIFY_HOOK": env["NETLIFY_HOOK"], }) self.table.grant_read_write_data(deployer) cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(hour="6-16", minute="0"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=deployer))
def __init__( self, scope: core.Construct, id: str, # pylint: disable=redefined-builtin lambda_notifications: aws_lambda.IFunction, social_log_group: aws_logs.ILogGroup, pagespeed_table: aws_dynamodb.ITable, **kwargs) -> None: super().__init__(scope, id, **kwargs) api_lambda = get_lambda( self, id, code='lib/stacks/{id}/{id}'.format(id=id), handler='main.handler', environment={ 'CORS_ALLOW_ORIGIN': env['CORS_ALLOW_ORIGIN'], 'PUSHOVER_TOKEN': env['PUSHOVER_TOKEN'], 'PUSHOVER_USERKEY': env['PUSHOVER_USERKEY'], 'LAMBDA_FUNCTIONS_LOG_LEVEL': 'INFO', 'LAMBDA_NOTIFICATIONS': lambda_notifications.function_name, 'PAGESPEED_TABLE': pagespeed_table.table_name, 'REPORT_LOG_GROUP_NAME': social_log_group.log_group_name, }, ) lambda_notifications.grant_invoke(api_lambda) social_log_group.grant(api_lambda, "logs:GetLogEvents", "logs:DescribeLogStreams") pagespeed_table.grant_read_data(api_lambda) cert = aws_certificatemanager.Certificate( self, '{}-certificate'.format(id), domain_name=env['API_DOMAIN'], ) domain = aws_apigateway.DomainNameOptions( certificate=cert, domain_name=env['API_DOMAIN'], ) cors = aws_apigateway.CorsOptions( allow_methods=['POST'], allow_origins=[env['CORS_ALLOW_ORIGIN']] if "CORS_ALLOW_ORIGIN" in env else aws_apigateway.Cors.ALL_ORIGINS) aws_apigateway.LambdaRestApi( self, '%s-gateway' % id, handler=api_lambda, domain_name=domain, default_cors_preflight_options=cors, )
def __init__( self, scope: core.Construct, id: str, # pylint: disable=redefined-builtin lambda_notifications: aws_lambda.IFunction, **kwargs) -> None: super().__init__(scope, id, **kwargs) _lambda = get_lambda( self, f"{id}-lambda", code=f"lib/stacks/{id}/lambdas", handler="send_report.handler", environment={ "LAMBDA_FUNCTIONS_LOG_LEVEL": "INFO", "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name, }, timeout=core.Duration.minutes(15), # pylint: disable=no-value-for-parameter ) lambda_notifications.grant_invoke(_lambda) aws_iam.Policy( self, f"{id}-iam-policy-logs", roles=[_lambda.role], statements=[ aws_iam.PolicyStatement( actions=[ "logs:DescribeLogGroups", "logs:GetQueryResults", "logs:StartQuery", ], resources=[ f"arn:aws:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:*" ], ) ], ) cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(hour="0", minute="0"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=_lambda))
def build_lambda(name): """Builder function for aws_lambda.Function objects.""" name = name.lower() _lambda = get_lambda( self, "{}-{}".format(id, name), code=code, handler='services.{}.handler'.format(name), layers=[lambda_layers["requests_oauthlib"]], environment={var: value for var, value in environ.items() if var.startswith(name.upper()) or var.startswith("LAMBDA_FUNCTIONS_") or var.startswith("GITHUB_")}, on_success=aws_lambda_destinations.LambdaDestination(create_report_lambda)) topic.add_subscription(aws_sns_subscriptions.LambdaSubscription(_lambda))
def __init__( self, scope: core.Construct, id: str, # pylint: disable=redefined-builtin lambda_notifications: aws_lambda.IFunction, **kwargs) -> None: super().__init__(scope, id, **kwargs) function = get_lambda( self, f"{id}-lambda", code=f"lib/stacks/{id.replace('-', '_')}/lambdas", handler="backups_monitor.handler", environment={ "BUCKETS_TO_MONITOR": env["BUCKETS_TO_MONITOR"], "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name, }) lambda_notifications.grant_invoke(function) aws_iam.Policy( self, f"{id.replace('-', '_')}-iam-policy", roles=[function.role], statements=[ aws_iam.PolicyStatement( actions=["s3:ListBucket"], resources=[ f"arn:aws:s3:::{line.split(',')[0]}" for line in env["BUCKETS_TO_MONITOR"].split(";") ], ) ], ) cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(minute="0", hour="6"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(function))
def __init__(self, scope: core.Construct, id: str, lambda_notifications: aws_lambda.IFunction, **kwargs) -> None: super().__init__(scope, id, **kwargs) poller = get_lambda(self, f"{id}-lambda-poller", code=f"lib/stacks/{id}/lambdas", handler="whois_poller.handler", environment={ "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name, "WHOIS_DOMAINS": env["WHOIS_DOMAINS"], "WHOISXMLAPI_KEY": env["WHOISXMLAPI_KEY"], }) lambda_notifications.grant_invoke(poller) cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(hour="23", minute="30"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=poller))
def __init__( self, scope: core.Construct, id: str, # pylint: disable=redefined-builtin lambda_notifications: aws_lambda.IFunction, **kwargs) -> None: super().__init__(scope, id, **kwargs) # CloudWatch LogGroup and Stream to store 'since' timestamp value since_log_group = aws_logs.LogGroup( self, f"{id}-log-group", log_group_name=f"{id}-timestamps", retention=DEFAULT_LOG_RETENTION, removal_policy=core.RemovalPolicy.DESTROY, ) since_log_group.add_stream( f"{id}-log-stream", log_stream_name=since_log_group.log_group_name, ) # Lambda shared code lambda_code = code_from_path(path=f"lib/stacks/{id}/lambdas") # Lambda create_epub (and layers): build epub file and store to S3 bucket epub_bucket = get_bucket(self, f"{id}-epub-bucket") lambda_create_epub = get_lambda( self, id + "-create-epub", code=lambda_code, handler="create_epub.handler", environment={ "EPUB_BUCKET": epub_bucket.bucket_name, }, layers=[ get_layer(self, layer_name=layer, prefix=id) for layer in ("pandoc", "html2text", "requests_oauthlib") ], timeout=core.Duration.minutes(5), # pylint: disable=no-value-for-parameter ) epub_bucket.grant_write(lambda_create_epub) # Lambda send_to_kindle: invoked when new MOBI dropped into S3 bucket, deliver MOBI as # email attachment via lambda_notifications mobi_bucket = get_bucket(self, f"{id}-mobi-bucket") lambda_send_to_kindle = get_lambda( self, id + "-send-to-kindle", code=lambda_code, handler="send_to_kindle.handler", environment={ "KINDLE_EMAIL": env["KINDLE_EMAIL"], "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name, "MOBI_SRC_BUCKET": mobi_bucket.bucket_name, "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"], "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"], }) mobi_bucket.add_event_notification( event=aws_s3.EventType.OBJECT_CREATED_PUT, dest=aws_s3_notifications.LambdaDestination(lambda_send_to_kindle), ) lambda_notifications.grant_invoke(lambda_send_to_kindle) aws_iam.Policy( self, f"{id}-mail-attachment-policy", roles=[lambda_notifications.role], statements=[ aws_iam.PolicyStatement( actions=["s3:GetObject"], resources=[f"{mobi_bucket.bucket_arn}/*"]) ], ) # Lambda reader: fetch new articles from Pocket and fan-out trigger create_epub Lambda lambda_reader = get_lambda( self, id + "-reader", code=lambda_code, handler="reader.handler", environment={ "LAMBDA_PUBLISHER": lambda_create_epub.function_name, "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"], "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"], "SINCE_LOG_GROUP": since_log_group.log_group_name, }, ) since_log_group.grant( lambda_reader, "logs:GetLogEvents", "logs:PutLogEvents", ) lambda_create_epub.grant_invoke(lambda_reader) # Fargate task: run dockerized `kindlegen` to parse EPUB to MOBI, # triggered by trigger_ecs_task Lambda # https://medium.com/@piyalikamra/s3-event-based-trigger-mechanism-to-start-ecs-far-gate-tasks-without-lambda-32f57ed10b0d cluster, vpc = get_fargate_cluster(self, id) mem_limit = "512" task = get_fargate_task(self, id, mem_limit) aws_iam.Policy( self, f"{id}-bucket-policy", roles=[task.task_role], statements=[ aws_iam.PolicyStatement( actions=["s3:GetObject"], resources=[f"{epub_bucket.bucket_arn}/*"]), aws_iam.PolicyStatement( actions=["s3:PutObject"], resources=[f"{mobi_bucket.bucket_arn}/*"]), ], ) container = get_fargate_container(self, id, task, mem_limit) # Lambda trigger_ecs_task: trigger Fargate task when new EPUB file is dropped into epub_bucket lambda_trigger_ecs_task = get_lambda( self, f"{id}-trigger-ecs-task", code=lambda_code, handler="trigger_ecs_task.handler", environment={ "ECS_CLUSTER": cluster.cluster_arn, "ECS_CLUSTER_SECURITY_GROUP": vpc.vpc_default_security_group, "ECS_CLUSTER_SUBNET": vpc.public_subnets[0].subnet_id, "ECS_CONTAINER": container.container_name, "ECS_TASK": task.task_definition_arn, "MOBI_DEST_BUCKET": mobi_bucket.bucket_name, }, ) epub_bucket.add_event_notification( event=aws_s3.EventType.OBJECT_CREATED_PUT, dest=aws_s3_notifications.LambdaDestination( lambda_trigger_ecs_task), ) aws_iam.Policy( self, f"{id}-lambda-trigger-policy", roles=[lambda_trigger_ecs_task.role], statements=[ aws_iam.PolicyStatement( actions=["ecs:RunTask"], resources=[task.task_definition_arn], ), aws_iam.PolicyStatement( actions=["iam:PassRole"], resources=[ task.execution_role.role_arn, task.task_role.role_arn, ], ) ], ) # Cloudwatch cronjob event to check for new articles every hour cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(minute="0"), # pylint: disable=no-value-for-parameter ) cronjob.add_target( aws_events_targets.LambdaFunction(handler=lambda_reader))
def __init__(self, scope: core.Construct, id: str, # pylint: disable=redefined-builtin lambda_notifications: aws_lambda.IFunction, **kwargs) -> None: super().__init__(scope, id, **kwargs) # CloudWatch LogGroup and Stream to store 'since' timestamp value since_log_group = aws_logs.LogGroup( self, f"{id}-log-group", log_group_name=f"{id}-timestamps", retention=DEFAULT_LOG_RETENTION, removal_policy=core.RemovalPolicy.DESTROY, ) since_log_group.add_stream( f"{id}-log-stream", log_stream_name=since_log_group.log_group_name, ) # Lambda shared code lambda_code = code_from_path(path=f"lib/stacks/{id}/lambdas") # Lambda create_doc (and layers): build document file and store to S3 bucket bucket = get_bucket(self, f"{id}-bucket") lambda_create_doc = get_lambda( self, id + "-create-document", code=lambda_code, handler="create_doc.handler", environment={ "DOCUMENT_BUCKET": bucket.bucket_name, }, layers=[get_layer(self, layer_name=layer, prefix=id) for layer in ("readability", "requests_oauthlib")], timeout=core.Duration.minutes(5), # pylint: disable=no-value-for-parameter ) bucket.grant_write(lambda_create_doc) # Lambda send_to_kindle: invoked when new documents dropped into S3 bucket, # deliver document as email attachment via lambda_notifications lambda_send_to_kindle = get_lambda( self, id + "-send-to-kindle", code=lambda_code, handler="send_to_kindle.handler", environment={ "KINDLE_EMAIL": env["KINDLE_EMAIL"], "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name, "DOCUMENT_SRC_BUCKET": bucket.bucket_name, "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"], "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"], } ) bucket.add_event_notification( event=aws_s3.EventType.OBJECT_CREATED_PUT, dest=aws_s3_notifications.LambdaDestination(lambda_send_to_kindle), ) lambda_notifications.grant_invoke(lambda_send_to_kindle) aws_iam.Policy( self, f"{id}-mail-attachment-policy", roles=[lambda_notifications.role], statements=[ aws_iam.PolicyStatement( actions=["s3:GetObject"], resources=[f"{bucket.bucket_arn}/*"] ) ], ) # Lambda reader: fetch new articles from Pocket and fan-out trigger create_doc Lambda lambda_reader = get_lambda( self, id + "-reader", code=lambda_code, handler="reader.handler", environment={ "LAMBDA_PUBLISHER": lambda_create_doc.function_name, "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"], "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"], "SINCE_LOG_GROUP": since_log_group.log_group_name, }, ) since_log_group.grant( lambda_reader, "logs:GetLogEvents", "logs:PutLogEvents", ) lambda_create_doc.grant_invoke(lambda_reader) # Cloudwatch cronjob event to check for new articles every hour cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(minute="0"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=lambda_reader))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: # pylint: disable=redefined-builtin super().__init__(scope, id, **kwargs) topic = aws_sns.Topic( self, "topic-{}".format(id), ) code = code_from_path(path='lib/stacks/{}/lambda'.format(id)) # Lambda Layers lambda_layers = {layer: get_layer(self, layer_name=layer, prefix=id) for layer in ("bs4", "requests_oauthlib")} # PUBLISH lambda lambda_publish_to_social = get_lambda( self, id, code=code, handler='{}.handler'.format(id.replace("-", "_")), layers=[ lambda_layers["bs4"], lambda_layers["requests_oauthlib"], ], environment={ 'SNS_TOPIC': topic.topic_arn, 'LAMBDA_FUNCTIONS_LOG_LEVEL': environ.get("LAMBDA_FUNCTIONS_LOG_LEVEL", "INFO"), }, ) topic.grant_publish(lambda_publish_to_social) # REPORT lambdas and CloudWatch resources report_log_group_name = "%s-reports" % id self.log_group = aws_logs.LogGroup( self, "%s-report-log-group" % id, log_group_name=report_log_group_name, retention=aws_logs.RetentionDays.INFINITE, ) create_report_lambda = get_lambda( self, "%s-create-report" % id, code=code, handler="send_report.handler", environment={ "REPORT_LOG_GROUP_NAME": report_log_group_name, } ) self.log_group.grant_write(create_report_lambda) # SUBSCRIBE lambdas social_lambdas = [social.lower() for social in environ.get("LAMBDA_FUNCTIONS", "") .replace(" ", "") .split(",") if social] for social in social_lambdas: self.log_group.add_stream( "%s-%s-report-log-stream" % (id, social), log_stream_name=social) def build_lambda(name): """Builder function for aws_lambda.Function objects.""" name = name.lower() _lambda = get_lambda( self, "{}-{}".format(id, name), code=code, handler='services.{}.handler'.format(name), layers=[lambda_layers["requests_oauthlib"]], environment={var: value for var, value in environ.items() if var.startswith(name.upper()) or var.startswith("LAMBDA_FUNCTIONS_") or var.startswith("GITHUB_")}, on_success=aws_lambda_destinations.LambdaDestination(create_report_lambda)) topic.add_subscription(aws_sns_subscriptions.LambdaSubscription(_lambda)) for social in social_lambdas: build_lambda(name=social) # POLLER lambda lambda_poll = get_lambda( self, f"{id}-poller", code=code, handler="feed_poller.handler", layers=[get_layer(self, "feedparser", id)], environment={ "BLOG_FEED_URL": environ["BLOG_FEED_URL"], "LAMBDA_PUBLISH": lambda_publish_to_social.function_name, }, retry_attempts=0, ) lambda_publish_to_social.grant_invoke(lambda_poll) # CRONJOB cronjob = aws_events.Rule( self, f"{id}-scheduled-event", enabled=True, schedule=aws_events.Schedule.cron(hour="6", minute="0"), # pylint: disable=no-value-for-parameter ) cronjob.add_target(aws_events_targets.LambdaFunction(handler=lambda_poll))