Пример #1
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDB Table
        table = dynamo_db.Table(self,
                                "Hits",
                                partition_key=dynamo_db.Attribute(
                                    name="path",
                                    type=dynamo_db.AttributeType.STRING))

        # defines an AWS  Lambda resource
        dynamo_lambda = _lambda.Function(
            self,
            "DynamoLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,  # execution environment
            handler="lambda.handler",  # file is "lambda", function is "handler"
            code=_lambda.Code.from_asset(
                "lambda_fns"),  # Code loaded from the lambda_fns dir
            environment={'HITS_TABLE_NAME': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(dynamo_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self,
                             'Endpoint',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=dynamo_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Пример #2
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Lambda Function that takes in text and returns a polly voice synthesis
        polly_lambda = _lambda.Function(self,
                                        'pollyHandler',
                                        runtime=_lambda.Runtime.PYTHON_3_8,
                                        code=_lambda.Code.asset('lambda_fns'),
                                        handler='polly.handler')

        # https://docs.aws.amazon.com/polly/latest/dg/api-permissions-reference.html
        # https://docs.aws.amazon.com/translate/latest/dg/translate-api-permissions-ref.html
        polly_policy = iam.PolicyStatement(
            effect=iam.Effect.ALLOW,
            resources=['*'],
            actions=['translate:TranslateText', 'polly:SynthesizeSpeech'])
        polly_lambda.add_to_role_policy(polly_policy)

        # defines an API Gateway Http API resource backed by our "efs_lambda" function.
        api = api_gw.HttpApi(self,
                             'Polly',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=polly_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Пример #3
0
    def add_endpoint(self, bucket: s3.Bucket, fn: Function):
        # create the queue
        queue = sqs.Queue(self,
                          f'{fn.id_prefix}Queue',
                          dead_letter_queue=sqs.DeadLetterQueue(
                              max_receive_count=5,
                              queue=sqs.Queue(
                                  self,
                                  f'{fn.id_prefix}DLQ',
                                  queue_name=f'{fn.queue_name}-dlq')),
                          queue_name=fn.queue_name)

        # create the receiver function
        # add the queue url as an environment variable
        receiver_function = lambda_.Function(
            self,
            f'{fn.id_prefix}ReceiverFunction',
            code=fn.function_code,
            environment={'QUEUE_URL': queue.queue_url},
            function_name=f'{fn.function_name_prefix}-receiver',
            handler=fn.receiver_function_handler,
            layers=[fn.function_dependencies_layer],
            # memory_size=256,
            runtime=lambda_.Runtime.PYTHON_3_8)

        # allow the receiver function to enqueue messages
        queue.grant_send_messages(receiver_function)

        # route requests to the receiver lambda
        self.api.add_routes(integration=apigw.LambdaProxyIntegration(
            handler=receiver_function),
                            methods=[fn.api_method],
                            path=fn.api_path)

        # create the handler function
        # add the bucket name as an environment variable
        handler_function = lambda_.Function(
            self,
            f'{fn.id_prefix}HandlerFunction',
            code=fn.function_code,
            environment={'BUCKET_NAME': bucket.bucket_name},
            function_name=f'{fn.function_name_prefix}-handler',
            handler=fn.handler_function_handler,
            layers=[fn.function_dependencies_layer],
            # memory_size=256,
            runtime=lambda_.Runtime.PYTHON_3_8)

        # add the queue as a trigger for the handler function
        handler_function.add_event_source(SqsEventSource(queue))

        # allow the handler function to access the bucket
        bucket.grant_read_write(handler_function)
Пример #4
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        memory: int = 1024,
        timeout: int = 30,
        concurrent: Optional[int] = None,
        permissions: Optional[List[iam.PolicyStatement]] = None,
        layer_arn: Optional[str] = None,
        env: dict = {},
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, *kwargs)

        permissions = permissions or []

        lambda_env = DEFAULT_ENV.copy()
        lambda_env.update(env)

        lambda_function = aws_lambda.Function(
            self,
            f"{id}-lambda",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=self.create_package(code_dir),
            handler="handler.handler",
            memory_size=memory,
            reserved_concurrent_executions=concurrent,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
        )

        for perm in permissions:
            lambda_function.add_to_role_policy(perm)

        if layer_arn:
            lambda_function.add_layers(
                aws_lambda.LayerVersion.from_layer_version_arn(
                    self,
                    layer_arn.split(":")[-2], layer_arn))

        api = apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw.LambdaProxyIntegration(
                handler=lambda_function),
        )
        core.CfnOutput(self, "Endpoint", value=api.url)
Пример #5
0
    def __init__(self, scope: core.Construct, stack_id: str, *, api_name: str,
                 domain_name: str, functions_stacks: List[FunctionsStack],
                 subdomain: str, **kwargs):
        super().__init__(scope, stack_id, **kwargs)

        hosted_zone = route53.HostedZone.from_lookup(self,
                                                     'HostedZone',
                                                     domain_name=domain_name)

        subdomain = f'{subdomain}.{hosted_zone.zone_name}'

        certificate = acm.DnsValidatedCertificate(self,
                                                  'Certificate',
                                                  domain_name=subdomain,
                                                  hosted_zone=hosted_zone)

        self.api = apigw.HttpApi(self, 'HttpApi', api_name=api_name)

        domain_name = apigw.CfnDomainName(
            self,
            'DomainName',
            domain_name=subdomain,
            domain_name_configurations=[
                apigw.CfnDomainName.DomainNameConfigurationProperty(
                    certificate_arn=certificate.certificate_arn)
            ])

        # add an alias to the hosted zone
        route53.ARecord(self,
                        'ARecord',
                        record_name=subdomain,
                        target=route53.RecordTarget.from_alias(
                            ApiGatewayV2Domain(domain_name)),
                        zone=hosted_zone)

        mapping = apigw.CfnApiMapping(self,
                                      'ApiMapping',
                                      api_id=self.api.http_api_id,
                                      domain_name=domain_name.ref,
                                      stage='$default')

        mapping.add_depends_on(domain_name)

        for functions_stack in functions_stacks:
            self.api.add_routes(integration=apigw.LambdaProxyIntegration(
                handler=functions_stack.receiver_function),
                                methods=[functions_stack.api_method],
                                path=functions_stack.api_path)
def add_lambda(self, name: str, table, apigw, method, authorizer):
    mylambda = _lambda.Function(
        self,
        f'{name}-ApiLambda',
        handler=f'{name}.handler',
        runtime=_lambda.Runtime.PYTHON_3_7,
        code=_lambda.Code.asset(f'backend/lambda'),
    )
    mylambda.add_environment("TABLE_NAME", table.table_name)
    table.grant_read_write_data(mylambda)
    lambda_integration = _apigw2.LambdaProxyIntegration(handler=mylambda)
    routes = apigw.add_routes(
        path=f'/note/{name}',
        methods=[method],
        integration=lambda_integration,
    )
    for route in routes:
        routeCfn = route.node.default_child
        routeCfn.authorizer_id = authorizer.ref
        routeCfn.authorization_type = 'JWT'
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # DynamoDB Table
        table = dynamo_db.Table(self,
                                "CircuitBreakerTable",
                                partition_key=dynamo_db.Attribute(
                                    name="id",
                                    type=dynamo_db.AttributeType.STRING),
                                removal_policy=core.RemovalPolicy.DESTROY)

        # install node dependencies for lambdas
        lambda_folder = os.path.dirname(
            os.path.realpath(__file__)) + "/../lambda_fns"
        subprocess.check_call("npm i".split(), cwd=lambda_folder)
        subprocess.check_call("npm run build".split(), cwd=lambda_folder)

        # defines an AWS Lambda resource with unreliable code
        unreliable_lambda = _lambda.Function(
            self,
            "UnreliableLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,
            handler="unreliable.handler",
            code=_lambda.Code.from_asset("lambda_fns"),
            # Code loaded from the lambda_fns dir
            environment={'CIRCUITBREAKER_TABLE': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(unreliable_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self,
                             'CircuitBreakerGateway',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=unreliable_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Пример #8
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # EFS needs to be setup in a VPC
        vpc = ec2.Vpc(self, 'Vpc', max_azs=2)

        # Create a file system in EFS to store information
        fs = efs.FileSystem(self,
                            'FileSystem',
                            vpc=vpc,
                            removal_policy=core.RemovalPolicy.DESTROY)

        access_point = fs.add_access_point(
            'AccessPoint',
            create_acl=efs.Acl(owner_gid='1001',
                               owner_uid='1001',
                               permissions='750'),
            path="/export/lambda",
            posix_user=efs.PosixUser(gid="1001", uid="1001"))

        efs_lambda = _lambda.Function(
            self,
            'rdsProxyHandler',
            runtime=_lambda.Runtime.PYTHON_3_8,
            code=_lambda.Code.asset('lambdas'),
            handler='message_wall.lambda_handler',
            vpc=vpc,
            filesystem=_lambda.FileSystem.from_efs_access_point(
                access_point, '/mnt/msg'))

        # defines an API Gateway Http API resource backed by our "efs_lambda" function.
        api = api_gw.HttpApi(self,
                             'EFS Lambda',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=efs_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)
Пример #9
0
    def __init__(self, scope: core.Construct, id: str,
            secret_key: str,
            custom_domain: Optional[str] = None,
            hosted_zone_id: Optional[str] = None,
            hosted_zone_name: Optional[str] = None,
            **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        self.files_bucket = s3.Bucket(self, 'files-bucket',
            bucket_name='once-shared-files',
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            encryption=s3.BucketEncryption.S3_MANAGED,
            removal_policy=core.RemovalPolicy.DESTROY)

        self.files_table = dynamodb.Table(self, 'once-files-table',
            table_name='once-files',
            partition_key=dynamodb.Attribute(name='id', type=dynamodb.AttributeType.STRING),
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            removal_policy=core.RemovalPolicy.DESTROY)

        self.api = apigw.HttpApi(self, 'once-api', api_name='once-api')

        api_url = self.api.url
        if custom_domain is not None:
            api_url = f'https://{custom_domain}/'

        core.CfnOutput(self, 'base-url', value=api_url)

        self.get_upload_ticket_function = lambda_.Function(self, 'get-upload-ticket-function',
            function_name='once-get-upload-ticket',
            description='Returns a pre-signed request to share a file',
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=make_python_zip_bundle(os.path.join(BASE_PATH, 'get-upload-ticket')),
            handler='handler.on_event',
            log_retention=LOG_RETENTION,
            environment={
                'APP_URL': api_url,
                'FILES_TABLE_NAME': self.files_table.table_name,
                'FILES_BUCKET': self.files_bucket.bucket_name,
                'SECRET_KEY': secret_key
            })

        self.files_bucket.grant_put(self.get_upload_ticket_function)
        self.files_table.grant_read_write_data(self.get_upload_ticket_function)

        self.download_and_delete_function = lambda_.Function(self, 'download-and-delete-function',
            function_name='once-download-and-delete',
            description='Serves a file from S3 and deletes it as soon as it has been successfully transferred',
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=lambda_.Code.from_asset(os.path.join(BASE_PATH, 'download-and-delete')),
            handler='handler.on_event',
            log_retention=LOG_RETENTION,
            environment={
                'FILES_BUCKET': self.files_bucket.bucket_name,
                'FILES_TABLE_NAME': self.files_table.table_name
            })

        self.files_bucket.grant_read(self.download_and_delete_function)
        self.files_bucket.grant_delete(self.download_and_delete_function)
        self.files_table.grant_read_write_data(self.download_and_delete_function)

        get_upload_ticket_integration = apigw.LambdaProxyIntegration(handler=self.get_upload_ticket_function)
        self.api.add_routes(
            path='/',
            methods=[apigw.HttpMethod.GET],
            integration=get_upload_ticket_integration)

        download_and_delete_integration = apigw.LambdaProxyIntegration(handler=self.download_and_delete_function)
        self.api.add_routes(
            path='/{entry_id}/{filename}',
            methods=[apigw.HttpMethod.GET],
            integration=download_and_delete_integration)

        self.cleanup_function = lambda_.Function(self, 'delete-served-files-function',
            function_name='once-delete-served-files',
            description='Deletes files from S3 once they have been marked as deleted in DynamoDB',
            runtime=lambda_.Runtime.PYTHON_3_7,
            code=lambda_.Code.from_asset(os.path.join(BASE_PATH, 'delete-served-files')),
            handler='handler.on_event',
            log_retention=LOG_RETENTION,
            environment={
                'FILES_BUCKET': self.files_bucket.bucket_name,
                'FILES_TABLE_NAME': self.files_table.table_name
            })

        self.files_bucket.grant_delete(self.cleanup_function)
        self.files_table.grant_read_write_data(self.cleanup_function)

        events.Rule(self, 'once-delete-served-files-rule',
            schedule=events.Schedule.rate(core.Duration.hours(24)),
            targets=[targets.LambdaFunction(self.cleanup_function)])

        if custom_domain is not None:
            self.custom_domain_stack = CustomDomainStack(self, 'custom-domain',
                api=self.api,
                domain_name=custom_domain,
                hosted_zone_id=hosted_zone_id,
                hosted_zone_name=hosted_zone_name)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # -----------------------------------------------------------------------------------------------------------
        # The Simple Webservice Logic - This is what we will be monitoring
        #
        # API GW HTTP API, Lambda Fn and DynamoDB
        # https://github.com/cdk-patterns/serverless/tree/master/the-simple-webservice
        # -----------------------------------------------------------------------------------------------------------

        # DynamoDB Table
        table = dynamo_db.Table(
            self,
            "Hits",
            partition_key=dynamo_db.Attribute(
                name="path", type=dynamo_db.AttributeType.STRING),
            billing_mode=dynamo_db.BillingMode.PAY_PER_REQUEST)

        # defines an AWS  Lambda resource
        dynamo_lambda = _lambda.Function(
            self,
            "DynamoLambdaHandler",
            runtime=_lambda.Runtime.NODEJS_12_X,  # execution environment
            handler="lambda.handler",  # file is "lambda", function is "handler"
            code=_lambda.Code.from_asset(
                "lambda_fns"),  # Code loaded from the lambda dir
            environment={'HITS_TABLE_NAME': table.table_name})

        # grant the lambda role read/write permissions to our table'
        table.grant_read_write_data(dynamo_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self,
                             'HttpAPI',
                             default_integration=api_gw.LambdaProxyIntegration(
                                 handler=dynamo_lambda))

        core.CfnOutput(self, 'HTTP API Url', value=api.url)

        # -----------------------------------------------------------------------------------------------------------
        # Monitoring Logic Starts Here
        #
        # This is everything we need to understand the state of our system:
        # - custom metrics
        # - cloudwatch alarms
        # - custom cloudwatch dashboard
        # -----------------------------------------------------------------------------------------------------------

        # SNS Topic so we can hook things into our alerts e.g. email
        error_topic = sns.Topic(self, 'theBigFanTopic')

        ###
        # Custom Metrics
        ###

        api_gw_4xx_error_percentage = cloud_watch.MathExpression(
            expression="m1/m2*100",
            label="% API Gateway 4xx Errors",
            using_metrics={
                "m1":
                self.metric_for_api_gw(api.http_api_id, '4XXError',
                                       '4XX Errors', 'sum'),
                "m2":
                self.metric_for_api_gw(api.http_api_id, 'Count', '# Requests',
                                       'sum'),
            },
            period=core.Duration.minutes(5))

        # Gather the % of lambda invocations that error in past 5 mins
        lambda_error_perc = cloud_watch.MathExpression(
            expression="e / i * 100",
            label="% of invocations that errored, last 5 mins",
            using_metrics={
                "i":
                dynamo_lambda.metric(metric_name="Invocations",
                                     statistic="sum"),
                "e":
                dynamo_lambda.metric(metric_name="Errors", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        # note: throttled requests are not counted in total num of invocations
        lambda_throttled_perc = cloud_watch.MathExpression(
            expression="t / (i + t) * 100",
            label="% of throttled requests, last 30 mins",
            using_metrics={
                "i":
                dynamo_lambda.metric(metric_name="Invocations",
                                     statistic="sum"),
                "t":
                dynamo_lambda.metric(metric_name="Throttles", statistic="sum"),
            },
            period=core.Duration.minutes(5))

        # I think usererrors are at an account level rather than a table level so merging
        # these two metrics until I can get a definitive answer. I think usererrors
        # will always show as 0 when scoped to a table so this is still effectively
        # a system errors count
        dynamo_db_total_errors = cloud_watch.MathExpression(
            expression="m1 + m2",
            label="DynamoDB Errors",
            using_metrics={
                "m1": table.metric_user_errors(),
                "m2": table.metric_system_errors(),
            },
            period=core.Duration.minutes(5))

        # Rather than have 2 alerts, let's create one aggregate metric
        dynamo_db_throttles = cloud_watch.MathExpression(
            expression="m1 + m2",
            label="DynamoDB Throttles",
            using_metrics={
                "m1":
                table.metric(metric_name="ReadThrottleEvents",
                             statistic="sum"),
                "m2":
                table.metric(metric_name="WriteThrottleEvents",
                             statistic="sum"),
            },
            period=core.Duration.minutes(5))
        ###
        # Alarms
        ###

        # Api Gateway

        # 4xx are user errors so a large volume indicates a problem
        cloud_watch.Alarm(self,
                          id="API Gateway 4XX Errors > 1%",
                          metric=api_gw_4xx_error_percentage,
                          threshold=1,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 5xx are internal server errors so we want 0 of these
        cloud_watch.Alarm(self,
                          id="API Gateway 5XX Errors > 0",
                          metric=self.metric_for_api_gw(api_id=api.http_api_id,
                                                        metric_name="5XXError",
                                                        label="5XX Errors",
                                                        stat="p99"),
                          threshold=0,
                          period=core.Duration.minutes(5),
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        cloud_watch.Alarm(self,
                          id="API p99 latency alarm >= 1s",
                          metric=self.metric_for_api_gw(api_id=api.http_api_id,
                                                        metric_name="Latency",
                                                        label="API GW Latency",
                                                        stat="p99"),
                          threshold=1000,
                          period=core.Duration.minutes(5),
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # Lambda

        # 2% of Dynamo Lambda invocations erroring
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda 2% Error",
                          metric=lambda_error_perc,
                          threshold=2,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 1% of Lambda invocations taking longer than 1 second
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda p99 Long Duration (>1s)",
                          metric=dynamo_lambda.metric_duration(),
                          period=core.Duration.minutes(5),
                          threshold=1000,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          statistic="p99",
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # 2% of our lambda invocations are throttled
        cloud_watch.Alarm(self,
                          id="Dynamo Lambda 2% Throttled",
                          metric=lambda_throttled_perc,
                          threshold=2,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # DynamoDB

        # DynamoDB Interactions are throttled - indicated poorly provisioned
        cloud_watch.Alarm(self,
                          id="DynamoDB Table Reads/Writes Throttled",
                          metric=dynamo_db_throttles,
                          threshold=1,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        # There should be 0 DynamoDB errors
        cloud_watch.Alarm(self,
                          id="DynamoDB Errors > 0",
                          metric=dynamo_db_total_errors,
                          threshold=0,
                          evaluation_periods=6,
                          datapoints_to_alarm=1,
                          treat_missing_data=cloud_watch.TreatMissingData.NOT_BREACHING) \
            .add_alarm_action(actions.SnsAction(error_topic))

        dashboard = cloud_watch.Dashboard(self, id="CloudWatchDashBoard")
        dashboard.add_widgets(
            cloud_watch.GraphWidget(title="Requests",
                                    width=8,
                                    left=[
                                        self.metric_for_api_gw(
                                            api_id=api.http_api_id,
                                            metric_name="Count",
                                            label="# Requests",
                                            stat="sum")
                                    ]),
            cloud_watch.GraphWidget(
                title="API GW Latency",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p50",
                                           stat="p50"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p90",
                                           stat="p90"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="Latency",
                                           label="API Latency p99",
                                           stat="p99")
                ]),
            cloud_watch.GraphWidget(
                title="API GW Errors",
                width=8,
                stacked=True,
                left=[
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="4XXError",
                                           label="4XX Errors",
                                           stat="sum"),
                    self.metric_for_api_gw(api_id=api.http_api_id,
                                           metric_name="5XXError",
                                           label="5XX Errors",
                                           stat="sum")
                ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Error %",
                                    width=8,
                                    left=[lambda_error_perc]),
            cloud_watch.GraphWidget(
                title="Dynamo Lambda Duration",
                width=8,
                stacked=True,
                left=[
                    dynamo_lambda.metric_duration(statistic="p50"),
                    dynamo_lambda.metric_duration(statistic="p90"),
                    dynamo_lambda.metric_duration(statistic="p99")
                ]),
            cloud_watch.GraphWidget(title="Dynamo Lambda Throttle %",
                                    width=8,
                                    left=[lambda_throttled_perc]),
            cloud_watch.GraphWidget(
                title="DynamoDB Latency",
                width=8,
                stacked=True,
                left=[
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "GetItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "UpdateItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "PutItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "DeleteItem"
                        }),
                    table.metric_successful_request_latency(
                        dimensions={
                            "TableName": table.table_name,
                            "Operation": "Query"
                        }),
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Consumed Read/Write Units",
                width=8,
                stacked=False,
                left=[
                    table.metric(metric_name="ConsumedReadCapacityUnits"),
                    table.metric(metric_name="ConsumedWriteCapacityUnits")
                ]),
            cloud_watch.GraphWidget(
                title="DynamoDB Throttles",
                width=8,
                stacked=True,
                left=[
                    table.metric(metric_name="ReadThrottleEvents",
                                 statistic="sum"),
                    table.metric(metric_name="WriteThrottleEvents",
                                 statistic="sum")
                ]),
        )
Пример #11
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # RDS needs to be setup in a VPC
        vpc = ec2.Vpc(self, 'Vpc', max_azs=2)

        # We need this security group to add an ingress rule and allow our lambda to query the proxy
        lambda_to_proxy_group = ec2.SecurityGroup(self, 'Lambda to RDS Proxy Connection', vpc=vpc)

        # We need this security group to allow our proxy to query our MySQL Instance
        db_connection_group = ec2.SecurityGroup(self, 'Proxy to DB Connection', vpc=vpc)
        db_connection_group.add_ingress_rule(db_connection_group,ec2.Port.tcp(3306), 'allow db connection')
        db_connection_group.add_ingress_rule(lambda_to_proxy_group, ec2.Port.tcp(3306), 'allow lambda connection')

        db_credentials_secret = secrets.Secret(self, 'DBCredentialsSecret',
                                               secret_name=id+'-rds-credentials',
                                               generate_secret_string=secrets.SecretStringGenerator(
                                                   secret_string_template="{\"username\":\"syscdk\"}",
                                                   exclude_punctuation=True,
                                                   include_space=False,
                                                   generate_string_key="password"
                                               ))

        ssm.StringParameter(self, 'DBCredentialsArn',
                            parameter_name='rds-credentials-arn',
                            string_value=db_credentials_secret.secret_arn)

        # MySQL DB Instance (delete protection turned off because pattern is for learning.)
        # re-enable delete protection for a real implementation
        rds_instance = rds.DatabaseInstance(self,
                                            'DBInstance',
                                            engine=rds.DatabaseInstanceEngine.MYSQL,
                                            master_username=
                                            db_credentials_secret.secret_value_from_json('username').to_string(),
                                            master_user_password=
                                            db_credentials_secret.secret_value_from_json('password'),
                                            instance_type=
                                            ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE2, ec2.InstanceSize.SMALL),
                                            vpc=vpc,
                                            removal_policy=core.RemovalPolicy.DESTROY,
                                            deletion_protection=False,
                                            security_groups=[db_connection_group])

        # Create an RDS proxy
        proxy = rds_instance.add_proxy(id+'-proxy',
                                       secret=db_credentials_secret,
                                       debug_logging=True,
                                       vpc=vpc,
                                       security_groups=[db_connection_group])

        # Workaround for bug where TargetGroupName is not set but required
        target_group = proxy.node.find_child('ProxyTargetGroup')
        target_group.add_property_override('TargetGroupName', 'default')

        rds_lambda = _lambda.Function(self, 'rdsProxyHandler',
                                      runtime=_lambda.Runtime.NODEJS_12_X,
                                      code=_lambda.Code.asset('lambda_fns/rds'),
                                      handler='rdsLambda.handler',
                                      vpc=vpc,
                                      security_groups=[lambda_to_proxy_group],
                                      environment={
                                          "PROXY_ENDPOINT": proxy.endpoint,
                                          "RDS_SECRET_NAME": id+'-rds-credentials'
                                      })

        db_credentials_secret.grant_read(rds_lambda)

        # defines an API Gateway Http API resource backed by our "dynamoLambda" function.
        api = api_gw.HttpApi(self, 'Endpoint',
                             default_integration=api_gw.LambdaProxyIntegration(handler=rds_lambda));

        core.CfnOutput(self, 'HTTP API Url', value=api.url);
Пример #12
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        diaries_dynamodb_table = aws_dynamodb.Table(self, 'Diaries',
            partition_key=aws_dynamodb.Attribute(
                name='id',
                type=aws_dynamodb.AttributeType.STRING)
        )

        api = aws_apigatewayv2.HttpApi(
            self, 'HttpApi', api_name='AutomaticTranslationDiary',
            cors_preflight=aws_apigatewayv2.CorsPreflightOptions(
                allow_headers=['Content-Type'],
                allow_methods=[
                    aws_apigatewayv2.HttpMethod.GET,
                    aws_apigatewayv2.HttpMethod.POST,
                    aws_apigatewayv2.HttpMethod.OPTIONS
                ],
                allow_origins=['*'],
            ),
        )

        def create_function(handler: str):
            function = aws_lambda.Function(
                self, handler.replace('.', '-'),
                function_name=handler.replace('.', '-'),
                runtime=aws_lambda.Runtime.PYTHON_3_8,
                code=aws_lambda.Code.asset('lambda/src'),
                handler=handler)

            diaries_dynamodb_table.grant_read_write_data(function)
            function.add_environment(
                'DYNAMODB_NAME_DIARIES',
                diaries_dynamodb_table.table_name)
            function.add_to_role_policy(
                aws_iam.PolicyStatement(
                    resources=['*'],
                    actions=['translate:TranslateText', 'polly:SynthesizeSpeech']))

            return function

        api.add_routes(
            path='/diaries/{lang}',
            methods=[aws_apigatewayv2.HttpMethod.POST],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.save')
            ))

        api.add_routes(
            path='/diaries',
            methods=[aws_apigatewayv2.HttpMethod.GET],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.diaries')
            ))

        api.add_routes(
            path='/diaries/{diaryId}/speech/{lang}',
            methods=[aws_apigatewayv2.HttpMethod.GET],
            integration=aws_apigatewayv2.LambdaProxyIntegration(
                handler=create_function('diary_handler.speech')
            ))
Пример #13
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        memory: int = 1024,
        timeout: int = 30,
        concurrent: Optional[int] = None,
        permissions: Optional[List[iam.PolicyStatement]] = None,
        layer_arn: Optional[str] = None,
        env: dict = {},
        code_dir: str = "./",
        **kwargs: Any,
    ) -> None:
        """Define stack."""
        super().__init__(scope, id, *kwargs)

        permissions = permissions or []

        lambda_env = DEFAULT_ENV.copy()
        lambda_env.update(env)

        lambda_function = aws_lambda.Function(
            self,
            f"{id}-lambda",
            runtime=aws_lambda.Runtime.PYTHON_3_7,
            code=self.create_package(code_dir),
            handler="handler.handler",
            memory_size=memory,
            reserved_concurrent_executions=concurrent,
            timeout=core.Duration.seconds(timeout),
            environment=lambda_env,
        )

        # # If you use dynamodb mosaic backend you should add IAM roles to read/put Item and maybe create Table
        # permissions.append(
        #     iam.PolicyStatement(
        #         actions=[
        #             "dynamodb:GetItem",
        #             "dynamodb:PutItem",
        #             "dynamodb:CreateTable",
        #             "dynamodb:Scan",
        #             "dynamodb:BatchWriteItem",
        #         ],
        #         resources=[f"arn:aws:dynamodb:{self.region}:{self.account}:table/*"],
        #     )
        # )

        for perm in permissions:
            lambda_function.add_to_role_policy(perm)

        if layer_arn:
            lambda_function.add_layers(
                aws_lambda.LayerVersion.from_layer_version_arn(
                    self,
                    layer_arn.split(":")[-2], layer_arn))

        api = apigw.HttpApi(
            self,
            f"{id}-endpoint",
            default_integration=apigw.LambdaProxyIntegration(
                handler=lambda_function),
        )
        core.CfnOutput(self, "Endpoint", value=api.url)
Пример #14
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        wsgi_function = lmb_py.PythonFunction(self,
                                              "wsgi-function",
                                              entry="./lambdas/wsgi")

        wsgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=wsgi_function,
            payload_format_version=apigw_v2.PayloadFormatVersion.VERSION_1_0,
        )

        asgi_function = lmb_py.PythonFunction(
            self,
            "asgi-function",
            entry="./lambdas/asgi",
        )

        asgi_integration = apigw_v2.LambdaProxyIntegration(
            handler=asgi_function)

        self.http_api = apigw_v2.HttpApi(self,
                                         "http-api",
                                         default_integration=asgi_integration)

        self.http_api.add_routes(
            path="/wsgi",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api.add_routes(
            path="/wsgi/{proxy+}",
            methods=[apigw_v2.HttpMethod.GET],
            integration=wsgi_integration,
        )

        self.http_api_url = core.CfnOutput(self,
                                           "RestApiUrl",
                                           value=self.http_api.url)

        self.graphql_api = appsync.GraphqlApi(
            self,
            "graphql-api",
            name="notes-example-api",
            schema=appsync.Schema.from_asset("./graphql/schema.graphql"),
        )

        core.CfnOutput(self, "GraphQLUrl", value=self.graphql_api.graphql_url)

        core.CfnOutput(self, "GraphQlApiKey", value=self.graphql_api.api_key)

        graphql_handler = lmb_py.PythonFunction(
            self,
            "graphql-handler",
            entry="./lambdas/graphql",
            runtime=lmb.Runtime.PYTHON_3_8,
        )

        data_source = self.graphql_api.add_lambda_data_source(
            "lambdaDatasource", graphql_handler)

        data_source.create_resolver(type_name="Query",
                                    field_name="getNoteById")

        data_source.create_resolver(type_name="Query", field_name="listNotes")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="createNote")

        data_source.create_resolver(type_name="Mutation",
                                    field_name="deleteNote")

        dynamo_table = dynamodb.Table(
            self,
            "notes-table",
            billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
            partition_key=dynamodb.Attribute(
                name="id", type=dynamodb.AttributeType.STRING),
        )

        dynamo_table.grant_read_write_data(graphql_handler)

        graphql_handler.add_environment("NOTES_TABLE", dynamo_table.table_name)