Beispiel #1
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        self._table = ddb.Table(self,
                                "Hits",
                                partition_key={
                                    "name": "path",
                                    "type": ddb.AttributeType.STRING
                                })

        self._handler = _lambda.Function(
            self,
            "HitCountHandler",
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='hitcount.handler',
            code=_lambda.Code.asset('lambda'),
            environment={
                "DOWNSTREAM_FUNCTION_NAME":
                downstream.function_name,  # resolve at run time
                "HITS_TABLE_NAME": self._table.table_name,  # same a table_name
            })

        # gives the hitcounter lambda permissions to write to dynamodb table
        self._table.grant_read_write_data(self.handler)
        # gives the HintCounter permission to invoke the downstream lambda function
        downstream.grant_invoke(self.handler)
Beispiel #2
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        self._table = ddb.Table(self,
                                'Hits',
                                partition_key={
                                    'name': 'path',
                                    'type': ddb.AttributeType.STRING
                                })

        self._handler = _lambda.Function(self,
                                         'HitCountHandler',
                                         runtime=_lambda.Runtime.PYTHON_3_7,
                                         handler='hitcount.handler',
                                         code=_lambda.Code.asset('lambda'),
                                         environment={
                                             'DOWNSTREAM_FUNCTION_NAME':
                                             downstream.function_name,
                                             'HITS_TABLE_NAME':
                                             self._table.table_name,
                                         })

        self._table.grant_read_write_data(self.handler)
        downstream.grant_invoke(self.handler)
Beispiel #3
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        self._table = ddb.Table(
            self,
            "Hits",
            partition_key={
                "name": "path",
                "type": ddb.AttributeType.STRING
            },
        )

        self._handler = _lambda.Function(
            self,
            "HitCountHandler",
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="hitcount.handler",
            code=_lambda.Code.asset(
                "/Users/vincent/Workspace/python_lambda_iac_deployment/python_lambda_iac_deployment/lambda_function"
            ),
            environment={
                "DOWNSTREAM_FUNCTION_NAME": downstream.function_name,
                "HITS_TABLE_NAME": self._table.table_name,
            },
        )

        self._table.grant_read_write_data(self.handler)
        downstream.grant_invoke(self.handler)
Beispiel #4
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        # define a DynamoDB table with 'path' as the partition key
        # **every DynamoDB table must have a single partition key
        table = ddb.Table(self,
                          'Hits',
                          table_name="dyna-HitCountDB",
                          partition_key={
                              'name': 'path',
                              'type': ddb.AttributeType.STRING
                          })

        # define a Lambda function
        self._handler = _lambda.Function(
            self,
            'HitCountHandler',
            function_name="dyna-HitCount",
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='hitcount.handler',
            code=_lambda.Code.asset('lambda'),

            # wire the Lambda's environment variables
            # to the function_name and table_name of our resources
            environment={
                'DOWNSTREAM_FUNCTION_NAME': downstream.function_name,
                'HITS_TABLE_NAME': table.table_name,
            })
        # allow lambda to read/write the DynamoDB table
        table.grant_read_write_data(self.handler)
        # Grant invoke permissions
        downstream.grant_invoke(self.handler)
    def __init__(self, scope: core.Construct, id: str, downstream: lambda_function.IFunction) -> None:
        super().__init__(scope, id)

        self._table = dynamodb.Table(
            self,
            "Hits",
            table_name="Samples_CDK_HitCountTable",
            partition_key=dynamodb.Attribute(
                name="path",
                type=dynamodb.AttributeType.STRING
            )
        )

        self._handler = lambda_function.Function(
            self,
            "HitCountHandler",
            function_name="Samples_CDK_HitCounter",
            runtime=lambda_function.Runtime.PYTHON_3_7,
            code=lambda_function.Code.asset("lambda"),
            handler="hitcount.handler",
            environment={
                "DOWNSTREAM_FUNCTION_NAME": downstream.function_name,
                "HITS_TABLE_NAME": self._table.table_name
            }
        )

        self.table.grant_read_write_data(self.handler)

        downstream.grant_invoke(self.handler)
Beispiel #6
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        self._table = ddb.Table(
            self,
            "Hits",
            partition_key={
                "name": "path",
                "type": ddb.AttributeType.STRING
            },
        )

        self._handler = _lambda.Function(
            self,
            "HitCountHandler",
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler="hitcount.handler",
            code=_lambda.Code.from_asset(os.path.join(os.getcwd(), "lambda")),
            environment={
                "DOWNSTREAM_FUNCTION_NAME": downstream.function_name,
                "HITS_TABLE_NAME": self._table.table_name,
            },
        )

        self._table.grant_read_write_data(self.handler)
        downstream.grant_invoke(self.handler)
    def __init__(self, scope: core.Construct, id: str,
                 downstream: _lambda.IFunction, **kwargs):
        super().__init__(scope, id, **kwargs)

        # Define the DynamoDB table to store hit counts
        table = ddb.Table(self,
                          'Hits',
                          partition_key={
                              'name': 'path',
                              'type': ddb.AttributeType.STRING
                          })

        with open("lambda_src/request_processor.py", encoding="utf-8") as fp:
            request_processor_handler_code = fp.read()

        # Define the lambda funtion for counting hits
        self._handler = _lambda.Function(
            self,
            'requestProcessorId',
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='index.handler',
            code=_lambda.Code.inline(request_processor_handler_code),
            environment={
                'DOWNSTREAM_FUNCTION_NAME': downstream.function_name,
                'HITS_TABLE_NAME': table.table_name,
            })

        # Add permission for lambda to write to table
        table.grant_read_write_data(self._handler)

        # All this Lambda to Invoke downstream Lambda
        downstream.grant_invoke(self._handler)
 def stream_lambda_source(self, table: dynamo.ITable,
                          function: _lambda.IFunction):
     dynamodb_stream_source = event_source.DynamoEventSource(
         table=table,
         starting_position=_lambda.StartingPosition.LATEST,
         batch_size=1,
         retry_attempts=1)
     function.add_event_source(dynamodb_stream_source)
Beispiel #9
0
    def __init__(
            self,
            scope: core.Construct,
            id: str,  # pylint: disable=redefined-builtin
            lambda_notifications: aws_lambda.IFunction,
            social_log_group: aws_logs.ILogGroup,
            pagespeed_table: aws_dynamodb.ITable,
            **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        api_lambda = get_lambda(
            self,
            id,
            code='lib/stacks/{id}/{id}'.format(id=id),
            handler='main.handler',
            environment={
                'CORS_ALLOW_ORIGIN': env['CORS_ALLOW_ORIGIN'],
                'PUSHOVER_TOKEN': env['PUSHOVER_TOKEN'],
                'PUSHOVER_USERKEY': env['PUSHOVER_USERKEY'],
                'LAMBDA_FUNCTIONS_LOG_LEVEL': 'INFO',
                'LAMBDA_NOTIFICATIONS': lambda_notifications.function_name,
                'PAGESPEED_TABLE': pagespeed_table.table_name,
                'REPORT_LOG_GROUP_NAME': social_log_group.log_group_name,
            },
        )
        lambda_notifications.grant_invoke(api_lambda)
        social_log_group.grant(api_lambda, "logs:GetLogEvents",
                               "logs:DescribeLogStreams")
        pagespeed_table.grant_read_data(api_lambda)

        cert = aws_certificatemanager.Certificate(
            self,
            '{}-certificate'.format(id),
            domain_name=env['API_DOMAIN'],
        )

        domain = aws_apigateway.DomainNameOptions(
            certificate=cert,
            domain_name=env['API_DOMAIN'],
        )

        cors = aws_apigateway.CorsOptions(
            allow_methods=['POST'],
            allow_origins=[env['CORS_ALLOW_ORIGIN']]
            if "CORS_ALLOW_ORIGIN" in env else aws_apigateway.Cors.ALL_ORIGINS)

        aws_apigateway.LambdaRestApi(
            self,
            '%s-gateway' % id,
            handler=api_lambda,
            domain_name=domain,
            default_cors_preflight_options=cors,
        )
 def set_forecast_etl_permissions(
     self,
     function: IFunction,
     database: CfnResource,
     workgroup: CfnResource,
     quicksight_principal: CfnParameter,
     quicksight_source: CfnMapping,
     athena_bucket: IBucket,
     data_bucket_name_resource: CfnResource,
 ):
     function.role.attach_inline_policy(
         self.policies.athena_access(workgroup.ref))
     function.role.attach_inline_policy(
         self.policies.glue_access(
             catalog=database,
             athena_bucket=athena_bucket,
             data_bucket_name_resource=data_bucket_name_resource,
         ))
     function.role.attach_inline_policy(self.policies.quicksight_access())
     function.add_environment("SCHEMA_NAME", database.ref)
     function.add_environment("WORKGROUP_NAME", workgroup.ref)
     function.add_environment("QUICKSIGHT_PRINCIPAL",
                              quicksight_principal.value_as_string)
     function.add_environment(
         "QUICKSIGHT_SOURCE",
         quicksight_source.find_in_map("General",
                                       "QuickSightSourceTemplateArn"),
     )
Beispiel #11
0
    def __init__(
            self,
            scope: core.Construct,
            id: str,  # pylint: disable=redefined-builtin
            lambda_notifications: aws_lambda.IFunction,
            **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        _lambda = get_lambda(
            self,
            f"{id}-lambda",
            code=f"lib/stacks/{id}/lambdas",
            handler="send_report.handler",
            environment={
                "LAMBDA_FUNCTIONS_LOG_LEVEL": "INFO",
                "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name,
            },
            timeout=core.Duration.minutes(15),  # pylint: disable=no-value-for-parameter
        )

        lambda_notifications.grant_invoke(_lambda)

        aws_iam.Policy(
            self,
            f"{id}-iam-policy-logs",
            roles=[_lambda.role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=[
                        "logs:DescribeLogGroups",
                        "logs:GetQueryResults",
                        "logs:StartQuery",
                    ],
                    resources=[
                        f"arn:aws:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:*"
                    ],
                )
            ],
        )

        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(hour="0", minute="0"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(aws_events_targets.LambdaFunction(handler=_lambda))
 def set_forecast_s3_access_permissions(
         self, name, function: IFunction,
         data_bucket_name_resource: CfnResource):
     forecast_s3_access_role = self.policies.forecast_s3_access_role(
         name=name, data_bucket_name_resource=data_bucket_name_resource)
     function.role.attach_inline_policy(
         iam.Policy(
             self,
             f"{function.node.id}ForecastPassRolePolicy",
             statements=[
                 iam.PolicyStatement(
                     effect=iam.Effect.ALLOW,
                     actions=["iam:PassRole"],
                     resources=[forecast_s3_access_role.role_arn],
                 )
             ],
         ))
     function.add_environment("FORECAST_ROLE",
                              forecast_s3_access_role.role_arn)
Beispiel #13
0
    def __init__(
            self,
            scope: core.Construct,
            id: str,  # pylint: disable=redefined-builtin
            lambda_notifications: aws_lambda.IFunction,
            **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        function = get_lambda(
            self,
            f"{id}-lambda",
            code=f"lib/stacks/{id.replace('-', '_')}/lambdas",
            handler="backups_monitor.handler",
            environment={
                "BUCKETS_TO_MONITOR": env["BUCKETS_TO_MONITOR"],
                "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name,
            })
        lambda_notifications.grant_invoke(function)

        aws_iam.Policy(
            self,
            f"{id.replace('-', '_')}-iam-policy",
            roles=[function.role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=["s3:ListBucket"],
                    resources=[
                        f"arn:aws:s3:::{line.split(',')[0]}"
                        for line in env["BUCKETS_TO_MONITOR"].split(";")
                    ],
                )
            ],
        )

        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(minute="0", hour="6"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(aws_events_targets.LambdaFunction(function))
Beispiel #14
0
    def __init__(self, scope: core.Construct, id: str,
                 lambda_notifications: aws_lambda.IFunction, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        poller = get_lambda(self,
                            f"{id}-lambda-poller",
                            code=f"lib/stacks/{id}/lambdas",
                            handler="whois_poller.handler",
                            environment={
                                "LAMBDA_NOTIFICATIONS":
                                lambda_notifications.function_name,
                                "WHOIS_DOMAINS": env["WHOIS_DOMAINS"],
                                "WHOISXMLAPI_KEY": env["WHOISXMLAPI_KEY"],
                            })
        lambda_notifications.grant_invoke(poller)

        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(hour="23", minute="30"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(aws_events_targets.LambdaFunction(handler=poller))
Beispiel #15
0
    def __init__(self, scope: core.Construct, id: str,
                 downstream_function: _lambda.IFunction) -> None:
        super().__init__(scope, id)

        self._table = _ddb.Table(self,
                                 'HitsTable',
                                 partition_key=_ddb.Attribute(
                                     name='path',
                                     type=_ddb.AttributeType.STRING))

        functions_path = os.path.join(os.path.dirname(__file__), '../lambda')
        self._handler = _lambda.Function(
            self,
            'HitCountHandler',
            runtime=_lambda.Runtime.PYTHON_3_7,
            handler='hitcount.handler',
            code=_lambda.Code.from_asset(functions_path),
            environment={
                'DOWNSTREAM_FUNCTION': downstream_function.function_name,
                'HITS_TABLE_NAME': self._table.table_name,
            })
        downstream_function.grant_invoke(self.handler)
        self._table.grant_read_write_data(self.handler)
    def create_version(self, lambda_function: aws_lambda.IFunction,
                       version: str):
        # MARK: this feature is deprecated and each stack deploy operation
        # can create only a single version. We might have to deploy lambda stack twice
        # to add version 2 that is required by the tests. See
        # https://github.com/aws/aws-cdk/issues/5334
        # https://github.com/aws/aws-cdk/commit/c94ce62bc71387d031cf291dbce40243feb50e83
        replace_in_file(
            "lambda/echo.py",
            r"ECHO_EVENT_VERSION = [0-9]*",
            "ECHO_EVENT_VERSION = {}".format(version),
        )

        current_version = lambda_function.add_version(name=version)
        self.attach_alias_to_version(version_obj=current_version,
                                     alias="Version{}Alias".format(version))
Beispiel #17
0
    def __init__(self, scope: core.Construct, id: str, stage: str,
                 api: _api_gw.IRestApi, fn: _lambda.IFunction,
                 table: _ddb.ITable, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        gw = dict(self.node.try_get_context("gateway"))

        ###
        # Custom Metrics
        ###

        # Gather the % of lambda invocations that error in past 5 mins
        lambda_error_perc = cloud_watch.MathExpression(
            expression="e / i * 100",
            label="% of invocations that errored, last 5 mins",
            using_metrics={
                "i": fn.metric(metric_name="Invocations", statistic="sum"),
                "e": fn.metric(metric_name="Errors", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        # note: throttled requests are not counted in total num of invocations
        lambda_throttled_perc = cloud_watch.MathExpression(
            expression="t / (i + t) * 100",
            label="% of throttled requests, last 30 mins",
            using_metrics={
                "i": fn.metric(metric_name="Invocations", statistic="sum"),
                "t": fn.metric(metric_name="Throttles", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        dashboard = cloud_watch.Dashboard(self,
                                          id="CloudWatchDashBoard",
                                          dashboard_name="Serverlesslens")

        dashboard.add_widgets(
            cloud_watch.GraphWidget(title="Requests",
                                    width=8,
                                    left=[
                                        self.metric_for_api_gw(
                                            api_name=gw["gw_name"],
                                            stage=stage,
                                            metric_name="Count",
                                            label="# Requests",
                                            stat="sum")
                                    ]),
            cloud_watch.GraphWidget(
                title="API GW Latency",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_name=gw["gw_name"],
                                           stage=stage,
                                           metric_name="Latency",
                                           label="API Latency p50",
                                           stat="p50"),
                    self.metric_for_api_gw(api_name=gw["gw_name"],
                                           stage=stage,
                                           metric_name="Latency",
                                           label="API Latency p90",
                                           stat="p90"),
                    self.metric_for_api_gw(api_name=gw["gw_name"],
                                           stage=stage,
                                           metric_name="Latency",
                                           label="API Latency p99",
                                           stat="p99")
                ]),
            cloud_watch.GraphWidget(
                title="API GW Errors",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_name=gw["gw_name"],
                                           stage=stage,
                                           metric_name="4XXError",
                                           label="4XX Errors",
                                           stat="sum"),
                    self.metric_for_api_gw(api_name=gw["gw_name"],
                                           stage=stage,
                                           metric_name="5XXError",
                                           label="5XX Errors",
                                           stat="sum")
                ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Error %",
                                    width=8,
                                    left=[lambda_error_perc]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Duration",
                                    width=8,
                                    stacked=True,
                                    left=[
                                        fn.metric_duration(statistic="p50"),
                                        fn.metric_duration(statistic="p90"),
                                        fn.metric_duration(statistic="p99")
                                    ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Throttle %",
                                    width=8,
                                    left=[lambda_throttled_perc]),
            cloud_watch.GraphWidget(
                title="DynamoDB Latency",
                width=8,
                stacked=True,
                left=[
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "GetItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "UpdateItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "PutItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "DeleteItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "Query"
                        }),
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Consumed Read/Write Units",
                width=8,
                stacked=False,
                left=[
                    table.metric(metric_name="ConsumedReadCapacityUnits"),
                    table.metric(metric_name="ConsumedWriteCapacityUnits")
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Throttles",
                width=8,
                stacked=True,
                left=[
                    table.metric(metric_name="ReadThrottleEvents",
                                 statistic="sum"),
                    table.metric(metric_name="WriteThrottleEvents",
                                 statistic="sum")
                ]),
        )
Beispiel #18
0
    def __init__(self, scope: core.Construct, id: str,  # pylint: disable=redefined-builtin
                 lambda_notifications: aws_lambda.IFunction, **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        # CloudWatch LogGroup and Stream to store 'since' timestamp value
        since_log_group = aws_logs.LogGroup(
            self,
            f"{id}-log-group",
            log_group_name=f"{id}-timestamps",
            retention=DEFAULT_LOG_RETENTION,
            removal_policy=core.RemovalPolicy.DESTROY,
        )
        since_log_group.add_stream(
            f"{id}-log-stream",
            log_stream_name=since_log_group.log_group_name,
        )

        # Lambda shared code
        lambda_code = code_from_path(path=f"lib/stacks/{id}/lambdas")

        # Lambda create_doc (and layers): build document file and store to S3 bucket
        bucket = get_bucket(self, f"{id}-bucket")

        lambda_create_doc = get_lambda(
            self,
            id + "-create-document",
            code=lambda_code,
            handler="create_doc.handler",
            environment={
                "DOCUMENT_BUCKET": bucket.bucket_name,
            },
            layers=[get_layer(self, layer_name=layer, prefix=id)
                    for layer in ("readability", "requests_oauthlib")],
            timeout=core.Duration.minutes(5),  # pylint: disable=no-value-for-parameter
        )
        bucket.grant_write(lambda_create_doc)

        # Lambda send_to_kindle: invoked when new documents dropped into S3 bucket,
        # deliver document as email attachment via lambda_notifications
        lambda_send_to_kindle = get_lambda(
            self,
            id + "-send-to-kindle",
            code=lambda_code,
            handler="send_to_kindle.handler",
            environment={
                "KINDLE_EMAIL": env["KINDLE_EMAIL"],
                "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name,
                "DOCUMENT_SRC_BUCKET": bucket.bucket_name,
                "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"],
                "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"],
            }
        )
        bucket.add_event_notification(
            event=aws_s3.EventType.OBJECT_CREATED_PUT,
            dest=aws_s3_notifications.LambdaDestination(lambda_send_to_kindle),
        )
        lambda_notifications.grant_invoke(lambda_send_to_kindle)
        aws_iam.Policy(
            self,
            f"{id}-mail-attachment-policy",
            roles=[lambda_notifications.role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=["s3:GetObject"],
                    resources=[f"{bucket.bucket_arn}/*"]
                )
            ],
        )

        # Lambda reader: fetch new articles from Pocket and fan-out trigger create_doc Lambda
        lambda_reader = get_lambda(
            self,
            id + "-reader",
            code=lambda_code,
            handler="reader.handler",
            environment={
                "LAMBDA_PUBLISHER": lambda_create_doc.function_name,
                "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"],
                "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"],
                "SINCE_LOG_GROUP": since_log_group.log_group_name,
            },
        )
        since_log_group.grant(
            lambda_reader,
            "logs:GetLogEvents",
            "logs:PutLogEvents",
        )
        lambda_create_doc.grant_invoke(lambda_reader)

        # Cloudwatch cronjob event to check for new articles every hour
        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(minute="0"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(aws_events_targets.LambdaFunction(handler=lambda_reader))
Beispiel #19
0
    def __init__(
            self,
            scope: core.Construct,
            id: str,  # pylint: disable=redefined-builtin
            lambda_notifications: aws_lambda.IFunction,
            **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        # CloudWatch LogGroup and Stream to store 'since' timestamp value
        since_log_group = aws_logs.LogGroup(
            self,
            f"{id}-log-group",
            log_group_name=f"{id}-timestamps",
            retention=DEFAULT_LOG_RETENTION,
            removal_policy=core.RemovalPolicy.DESTROY,
        )
        since_log_group.add_stream(
            f"{id}-log-stream",
            log_stream_name=since_log_group.log_group_name,
        )

        # Lambda shared code
        lambda_code = code_from_path(path=f"lib/stacks/{id}/lambdas")

        # Lambda create_epub (and layers): build epub file and store to S3 bucket
        epub_bucket = get_bucket(self, f"{id}-epub-bucket")

        lambda_create_epub = get_lambda(
            self,
            id + "-create-epub",
            code=lambda_code,
            handler="create_epub.handler",
            environment={
                "EPUB_BUCKET": epub_bucket.bucket_name,
            },
            layers=[
                get_layer(self, layer_name=layer, prefix=id)
                for layer in ("pandoc", "html2text", "requests_oauthlib")
            ],
            timeout=core.Duration.minutes(5),  # pylint: disable=no-value-for-parameter
        )
        epub_bucket.grant_write(lambda_create_epub)

        # Lambda send_to_kindle: invoked when new MOBI dropped into S3 bucket, deliver MOBI as
        # email attachment via lambda_notifications
        mobi_bucket = get_bucket(self, f"{id}-mobi-bucket")

        lambda_send_to_kindle = get_lambda(
            self,
            id + "-send-to-kindle",
            code=lambda_code,
            handler="send_to_kindle.handler",
            environment={
                "KINDLE_EMAIL": env["KINDLE_EMAIL"],
                "LAMBDA_NOTIFICATIONS": lambda_notifications.function_name,
                "MOBI_SRC_BUCKET": mobi_bucket.bucket_name,
                "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"],
                "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"],
            })
        mobi_bucket.add_event_notification(
            event=aws_s3.EventType.OBJECT_CREATED_PUT,
            dest=aws_s3_notifications.LambdaDestination(lambda_send_to_kindle),
        )
        lambda_notifications.grant_invoke(lambda_send_to_kindle)
        aws_iam.Policy(
            self,
            f"{id}-mail-attachment-policy",
            roles=[lambda_notifications.role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=["s3:GetObject"],
                    resources=[f"{mobi_bucket.bucket_arn}/*"])
            ],
        )

        # Lambda reader: fetch new articles from Pocket and fan-out trigger create_epub Lambda
        lambda_reader = get_lambda(
            self,
            id + "-reader",
            code=lambda_code,
            handler="reader.handler",
            environment={
                "LAMBDA_PUBLISHER": lambda_create_epub.function_name,
                "POCKET_CONSUMER_KEY": env["POCKET_CONSUMER_KEY"],
                "POCKET_SECRET_TOKEN": env["POCKET_SECRET_TOKEN"],
                "SINCE_LOG_GROUP": since_log_group.log_group_name,
            },
        )
        since_log_group.grant(
            lambda_reader,
            "logs:GetLogEvents",
            "logs:PutLogEvents",
        )
        lambda_create_epub.grant_invoke(lambda_reader)

        # Fargate task: run dockerized `kindlegen` to parse EPUB to MOBI,
        # triggered by trigger_ecs_task Lambda
        # https://medium.com/@piyalikamra/s3-event-based-trigger-mechanism-to-start-ecs-far-gate-tasks-without-lambda-32f57ed10b0d
        cluster, vpc = get_fargate_cluster(self, id)

        mem_limit = "512"
        task = get_fargate_task(self, id, mem_limit)
        aws_iam.Policy(
            self,
            f"{id}-bucket-policy",
            roles=[task.task_role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=["s3:GetObject"],
                    resources=[f"{epub_bucket.bucket_arn}/*"]),
                aws_iam.PolicyStatement(
                    actions=["s3:PutObject"],
                    resources=[f"{mobi_bucket.bucket_arn}/*"]),
            ],
        )

        container = get_fargate_container(self, id, task, mem_limit)

        # Lambda trigger_ecs_task: trigger Fargate task when new EPUB file is dropped into epub_bucket
        lambda_trigger_ecs_task = get_lambda(
            self,
            f"{id}-trigger-ecs-task",
            code=lambda_code,
            handler="trigger_ecs_task.handler",
            environment={
                "ECS_CLUSTER": cluster.cluster_arn,
                "ECS_CLUSTER_SECURITY_GROUP": vpc.vpc_default_security_group,
                "ECS_CLUSTER_SUBNET": vpc.public_subnets[0].subnet_id,
                "ECS_CONTAINER": container.container_name,
                "ECS_TASK": task.task_definition_arn,
                "MOBI_DEST_BUCKET": mobi_bucket.bucket_name,
            },
        )
        epub_bucket.add_event_notification(
            event=aws_s3.EventType.OBJECT_CREATED_PUT,
            dest=aws_s3_notifications.LambdaDestination(
                lambda_trigger_ecs_task),
        )
        aws_iam.Policy(
            self,
            f"{id}-lambda-trigger-policy",
            roles=[lambda_trigger_ecs_task.role],
            statements=[
                aws_iam.PolicyStatement(
                    actions=["ecs:RunTask"],
                    resources=[task.task_definition_arn],
                ),
                aws_iam.PolicyStatement(
                    actions=["iam:PassRole"],
                    resources=[
                        task.execution_role.role_arn,
                        task.task_role.role_arn,
                    ],
                )
            ],
        )

        # Cloudwatch cronjob event to check for new articles every hour
        cronjob = aws_events.Rule(
            self,
            f"{id}-scheduled-event",
            enabled=True,
            schedule=aws_events.Schedule.cron(minute="0"),  # pylint: disable=no-value-for-parameter
        )
        cronjob.add_target(
            aws_events_targets.LambdaFunction(handler=lambda_reader))